1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 hash::Hash,
40 mem,
41 ops::Range,
42 path::{Component, Path, PathBuf},
43 rc::Rc,
44 sync::{
45 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
46 Arc,
47 },
48 time::Instant,
49};
50use util::{post_inc, ResultExt, TryFutureExt as _};
51
52pub use fs::*;
53pub use worktree::*;
54
55pub trait Item: Entity {
56 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
57}
58
59pub struct Project {
60 worktrees: Vec<WorktreeHandle>,
61 active_entry: Option<ProjectEntryId>,
62 languages: Arc<LanguageRegistry>,
63 language_servers:
64 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
65 started_language_servers:
66 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
67 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
68 language_server_settings: Arc<Mutex<serde_json::Value>>,
69 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
70 next_language_server_id: usize,
71 client: Arc<client::Client>,
72 next_entry_id: Arc<AtomicUsize>,
73 user_store: ModelHandle<UserStore>,
74 fs: Arc<dyn Fs>,
75 client_state: ProjectClientState,
76 collaborators: HashMap<PeerId, Collaborator>,
77 subscriptions: Vec<client::Subscription>,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136#[derive(Serialize)]
137pub struct LanguageServerStatus {
138 pub name: String,
139 pub pending_work: BTreeMap<String, LanguageServerProgress>,
140 pub pending_diagnostic_updates: isize,
141}
142
143#[derive(Clone, Debug, Serialize)]
144pub struct LanguageServerProgress {
145 pub message: Option<String>,
146 pub percentage: Option<usize>,
147 #[serde(skip_serializing)]
148 pub last_update_at: Instant,
149}
150
151#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
152pub struct ProjectPath {
153 pub worktree_id: WorktreeId,
154 pub path: Arc<Path>,
155}
156
157#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
158pub struct DiagnosticSummary {
159 pub error_count: usize,
160 pub warning_count: usize,
161}
162
163#[derive(Debug)]
164pub struct Location {
165 pub buffer: ModelHandle<Buffer>,
166 pub range: Range<language::Anchor>,
167}
168
169#[derive(Debug)]
170pub struct DocumentHighlight {
171 pub range: Range<language::Anchor>,
172 pub kind: DocumentHighlightKind,
173}
174
175#[derive(Clone, Debug)]
176pub struct Symbol {
177 pub source_worktree_id: WorktreeId,
178 pub worktree_id: WorktreeId,
179 pub language_server_name: LanguageServerName,
180 pub path: PathBuf,
181 pub label: CodeLabel,
182 pub name: String,
183 pub kind: lsp::SymbolKind,
184 pub range: Range<PointUtf16>,
185 pub signature: [u8; 32],
186}
187
188#[derive(Default)]
189pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
190
191impl DiagnosticSummary {
192 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
193 let mut this = Self {
194 error_count: 0,
195 warning_count: 0,
196 };
197
198 for entry in diagnostics {
199 if entry.diagnostic.is_primary {
200 match entry.diagnostic.severity {
201 DiagnosticSeverity::ERROR => this.error_count += 1,
202 DiagnosticSeverity::WARNING => this.warning_count += 1,
203 _ => {}
204 }
205 }
206 }
207
208 this
209 }
210
211 pub fn is_empty(&self) -> bool {
212 self.error_count == 0 && self.warning_count == 0
213 }
214
215 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
216 proto::DiagnosticSummary {
217 path: path.to_string_lossy().to_string(),
218 error_count: self.error_count as u32,
219 warning_count: self.warning_count as u32,
220 }
221 }
222}
223
224#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
225pub struct ProjectEntryId(usize);
226
227impl ProjectEntryId {
228 pub const MAX: Self = Self(usize::MAX);
229
230 pub fn new(counter: &AtomicUsize) -> Self {
231 Self(counter.fetch_add(1, SeqCst))
232 }
233
234 pub fn from_proto(id: u64) -> Self {
235 Self(id as usize)
236 }
237
238 pub fn to_proto(&self) -> u64 {
239 self.0 as u64
240 }
241
242 pub fn to_usize(&self) -> usize {
243 self.0
244 }
245}
246
247impl Project {
248 pub fn init(client: &Arc<Client>) {
249 client.add_model_message_handler(Self::handle_add_collaborator);
250 client.add_model_message_handler(Self::handle_buffer_reloaded);
251 client.add_model_message_handler(Self::handle_buffer_saved);
252 client.add_model_message_handler(Self::handle_start_language_server);
253 client.add_model_message_handler(Self::handle_update_language_server);
254 client.add_model_message_handler(Self::handle_remove_collaborator);
255 client.add_model_message_handler(Self::handle_register_worktree);
256 client.add_model_message_handler(Self::handle_unregister_worktree);
257 client.add_model_message_handler(Self::handle_unshare_project);
258 client.add_model_message_handler(Self::handle_update_buffer_file);
259 client.add_model_message_handler(Self::handle_update_buffer);
260 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
261 client.add_model_message_handler(Self::handle_update_worktree);
262 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
263 client.add_model_request_handler(Self::handle_apply_code_action);
264 client.add_model_request_handler(Self::handle_reload_buffers);
265 client.add_model_request_handler(Self::handle_format_buffers);
266 client.add_model_request_handler(Self::handle_get_code_actions);
267 client.add_model_request_handler(Self::handle_get_completions);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
269 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
270 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
271 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
272 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
273 client.add_model_request_handler(Self::handle_search_project);
274 client.add_model_request_handler(Self::handle_get_project_symbols);
275 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
276 client.add_model_request_handler(Self::handle_open_buffer_by_id);
277 client.add_model_request_handler(Self::handle_open_buffer_by_path);
278 client.add_model_request_handler(Self::handle_save_buffer);
279 }
280
281 pub fn local(
282 client: Arc<Client>,
283 user_store: ModelHandle<UserStore>,
284 languages: Arc<LanguageRegistry>,
285 fs: Arc<dyn Fs>,
286 cx: &mut MutableAppContext,
287 ) -> ModelHandle<Self> {
288 cx.add_model(|cx: &mut ModelContext<Self>| {
289 let (remote_id_tx, remote_id_rx) = watch::channel();
290 let _maintain_remote_id_task = cx.spawn_weak({
291 let rpc = client.clone();
292 move |this, mut cx| {
293 async move {
294 let mut status = rpc.status();
295 while let Some(status) = status.next().await {
296 if let Some(this) = this.upgrade(&cx) {
297 if status.is_connected() {
298 this.update(&mut cx, |this, cx| this.register(cx)).await?;
299 } else {
300 this.update(&mut cx, |this, cx| this.unregister(cx));
301 }
302 }
303 }
304 Ok(())
305 }
306 .log_err()
307 }
308 });
309
310 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
311 Self {
312 worktrees: Default::default(),
313 collaborators: Default::default(),
314 opened_buffers: Default::default(),
315 shared_buffers: Default::default(),
316 loading_buffers: Default::default(),
317 loading_local_worktrees: Default::default(),
318 buffer_snapshots: Default::default(),
319 client_state: ProjectClientState::Local {
320 is_shared: false,
321 remote_id_tx,
322 remote_id_rx,
323 _maintain_remote_id_task,
324 },
325 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
326 subscriptions: Vec::new(),
327 active_entry: None,
328 languages,
329 client,
330 user_store,
331 fs,
332 next_entry_id: Default::default(),
333 language_servers: Default::default(),
334 started_language_servers: Default::default(),
335 language_server_statuses: Default::default(),
336 last_workspace_edits_by_language_server: Default::default(),
337 language_server_settings: Default::default(),
338 next_language_server_id: 0,
339 nonce: StdRng::from_entropy().gen(),
340 }
341 })
342 }
343
344 pub async fn remote(
345 remote_id: u64,
346 client: Arc<Client>,
347 user_store: ModelHandle<UserStore>,
348 languages: Arc<LanguageRegistry>,
349 fs: Arc<dyn Fs>,
350 cx: &mut AsyncAppContext,
351 ) -> Result<ModelHandle<Self>> {
352 client.authenticate_and_connect(true, &cx).await?;
353
354 let response = client
355 .request(proto::JoinProject {
356 project_id: remote_id,
357 })
358 .await?;
359
360 let replica_id = response.replica_id as ReplicaId;
361
362 let mut worktrees = Vec::new();
363 for worktree in response.worktrees {
364 let (worktree, load_task) = cx
365 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
366 worktrees.push(worktree);
367 load_task.detach();
368 }
369
370 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
371 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
372 let mut this = Self {
373 worktrees: Vec::new(),
374 loading_buffers: Default::default(),
375 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
376 shared_buffers: Default::default(),
377 loading_local_worktrees: Default::default(),
378 active_entry: None,
379 collaborators: Default::default(),
380 languages,
381 user_store: user_store.clone(),
382 fs,
383 next_entry_id: Default::default(),
384 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
385 client: client.clone(),
386 client_state: ProjectClientState::Remote {
387 sharing_has_stopped: false,
388 remote_id,
389 replica_id,
390 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
391 async move {
392 let mut status = client.status();
393 let is_connected =
394 status.next().await.map_or(false, |s| s.is_connected());
395 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
396 if !is_connected || status.next().await.is_some() {
397 if let Some(this) = this.upgrade(&cx) {
398 this.update(&mut cx, |this, cx| this.project_unshared(cx))
399 }
400 }
401 Ok(())
402 }
403 .log_err()
404 }),
405 },
406 language_servers: Default::default(),
407 started_language_servers: Default::default(),
408 language_server_settings: Default::default(),
409 language_server_statuses: response
410 .language_servers
411 .into_iter()
412 .map(|server| {
413 (
414 server.id as usize,
415 LanguageServerStatus {
416 name: server.name,
417 pending_work: Default::default(),
418 pending_diagnostic_updates: 0,
419 },
420 )
421 })
422 .collect(),
423 last_workspace_edits_by_language_server: Default::default(),
424 next_language_server_id: 0,
425 opened_buffers: Default::default(),
426 buffer_snapshots: Default::default(),
427 nonce: StdRng::from_entropy().gen(),
428 };
429 for worktree in worktrees {
430 this.add_worktree(&worktree, cx);
431 }
432 this
433 });
434
435 let user_ids = response
436 .collaborators
437 .iter()
438 .map(|peer| peer.user_id)
439 .collect();
440 user_store
441 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
442 .await?;
443 let mut collaborators = HashMap::default();
444 for message in response.collaborators {
445 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
446 collaborators.insert(collaborator.peer_id, collaborator);
447 }
448
449 this.update(cx, |this, _| {
450 this.collaborators = collaborators;
451 });
452
453 Ok(this)
454 }
455
456 #[cfg(any(test, feature = "test-support"))]
457 pub async fn test(
458 fs: Arc<dyn Fs>,
459 root_paths: impl IntoIterator<Item = impl AsRef<Path>>,
460 cx: &mut gpui::TestAppContext,
461 ) -> ModelHandle<Project> {
462 let languages = Arc::new(LanguageRegistry::test());
463 let http_client = client::test::FakeHttpClient::with_404_response();
464 let client = client::Client::new(http_client.clone());
465 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
466 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
467 for path in root_paths {
468 let (tree, _) = project
469 .update(cx, |project, cx| {
470 project.find_or_create_local_worktree(path, true, cx)
471 })
472 .await
473 .unwrap();
474 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
475 .await;
476 }
477 project
478 }
479
480 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
481 self.opened_buffers
482 .get(&remote_id)
483 .and_then(|buffer| buffer.upgrade(cx))
484 }
485
486 pub fn languages(&self) -> &Arc<LanguageRegistry> {
487 &self.languages
488 }
489
490 #[cfg(any(test, feature = "test-support"))]
491 pub fn check_invariants(&self, cx: &AppContext) {
492 if self.is_local() {
493 let mut worktree_root_paths = HashMap::default();
494 for worktree in self.worktrees(cx) {
495 let worktree = worktree.read(cx);
496 let abs_path = worktree.as_local().unwrap().abs_path().clone();
497 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
498 assert_eq!(
499 prev_worktree_id,
500 None,
501 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
502 abs_path,
503 worktree.id(),
504 prev_worktree_id
505 )
506 }
507 } else {
508 let replica_id = self.replica_id();
509 for buffer in self.opened_buffers.values() {
510 if let Some(buffer) = buffer.upgrade(cx) {
511 let buffer = buffer.read(cx);
512 assert_eq!(
513 buffer.deferred_ops_len(),
514 0,
515 "replica {}, buffer {} has deferred operations",
516 replica_id,
517 buffer.remote_id()
518 );
519 }
520 }
521 }
522 }
523
524 #[cfg(any(test, feature = "test-support"))]
525 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
526 let path = path.into();
527 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
528 self.opened_buffers.iter().any(|(_, buffer)| {
529 if let Some(buffer) = buffer.upgrade(cx) {
530 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
531 if file.worktree == worktree && file.path() == &path.path {
532 return true;
533 }
534 }
535 }
536 false
537 })
538 } else {
539 false
540 }
541 }
542
543 pub fn fs(&self) -> &Arc<dyn Fs> {
544 &self.fs
545 }
546
547 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
548 self.unshare(cx);
549 for worktree in &self.worktrees {
550 if let Some(worktree) = worktree.upgrade(cx) {
551 worktree.update(cx, |worktree, _| {
552 worktree.as_local_mut().unwrap().unregister();
553 });
554 }
555 }
556
557 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
558 *remote_id_tx.borrow_mut() = None;
559 }
560
561 self.subscriptions.clear();
562 }
563
564 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
565 self.unregister(cx);
566
567 let response = self.client.request(proto::RegisterProject {});
568 cx.spawn(|this, mut cx| async move {
569 let remote_id = response.await?.project_id;
570
571 let mut registrations = Vec::new();
572 this.update(&mut cx, |this, cx| {
573 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
574 *remote_id_tx.borrow_mut() = Some(remote_id);
575 }
576
577 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
578
579 this.subscriptions
580 .push(this.client.add_model_for_remote_entity(remote_id, cx));
581
582 for worktree in &this.worktrees {
583 if let Some(worktree) = worktree.upgrade(cx) {
584 registrations.push(worktree.update(cx, |worktree, cx| {
585 let worktree = worktree.as_local_mut().unwrap();
586 worktree.register(remote_id, cx)
587 }));
588 }
589 }
590 });
591
592 futures::future::try_join_all(registrations).await?;
593 Ok(())
594 })
595 }
596
597 pub fn remote_id(&self) -> Option<u64> {
598 match &self.client_state {
599 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
600 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
601 }
602 }
603
604 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
605 let mut id = None;
606 let mut watch = None;
607 match &self.client_state {
608 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
609 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
610 }
611
612 async move {
613 if let Some(id) = id {
614 return id;
615 }
616 let mut watch = watch.unwrap();
617 loop {
618 let id = *watch.borrow();
619 if let Some(id) = id {
620 return id;
621 }
622 watch.next().await;
623 }
624 }
625 }
626
627 pub fn replica_id(&self) -> ReplicaId {
628 match &self.client_state {
629 ProjectClientState::Local { .. } => 0,
630 ProjectClientState::Remote { replica_id, .. } => *replica_id,
631 }
632 }
633
634 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
635 &self.collaborators
636 }
637
638 pub fn worktrees<'a>(
639 &'a self,
640 cx: &'a AppContext,
641 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
642 self.worktrees
643 .iter()
644 .filter_map(move |worktree| worktree.upgrade(cx))
645 }
646
647 pub fn visible_worktrees<'a>(
648 &'a self,
649 cx: &'a AppContext,
650 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
651 self.worktrees.iter().filter_map(|worktree| {
652 worktree.upgrade(cx).and_then(|worktree| {
653 if worktree.read(cx).is_visible() {
654 Some(worktree)
655 } else {
656 None
657 }
658 })
659 })
660 }
661
662 pub fn worktree_for_id(
663 &self,
664 id: WorktreeId,
665 cx: &AppContext,
666 ) -> Option<ModelHandle<Worktree>> {
667 self.worktrees(cx)
668 .find(|worktree| worktree.read(cx).id() == id)
669 }
670
671 pub fn worktree_for_entry(
672 &self,
673 entry_id: ProjectEntryId,
674 cx: &AppContext,
675 ) -> Option<ModelHandle<Worktree>> {
676 self.worktrees(cx)
677 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
678 }
679
680 pub fn worktree_id_for_entry(
681 &self,
682 entry_id: ProjectEntryId,
683 cx: &AppContext,
684 ) -> Option<WorktreeId> {
685 self.worktree_for_entry(entry_id, cx)
686 .map(|worktree| worktree.read(cx).id())
687 }
688
689 pub fn can_share(&self, cx: &AppContext) -> bool {
690 self.is_local() && self.visible_worktrees(cx).next().is_some()
691 }
692
693 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
694 let rpc = self.client.clone();
695 cx.spawn(|this, mut cx| async move {
696 let project_id = this.update(&mut cx, |this, cx| {
697 if let ProjectClientState::Local {
698 is_shared,
699 remote_id_rx,
700 ..
701 } = &mut this.client_state
702 {
703 *is_shared = true;
704
705 for open_buffer in this.opened_buffers.values_mut() {
706 match open_buffer {
707 OpenBuffer::Strong(_) => {}
708 OpenBuffer::Weak(buffer) => {
709 if let Some(buffer) = buffer.upgrade(cx) {
710 *open_buffer = OpenBuffer::Strong(buffer);
711 }
712 }
713 OpenBuffer::Loading(_) => unreachable!(),
714 }
715 }
716
717 for worktree_handle in this.worktrees.iter_mut() {
718 match worktree_handle {
719 WorktreeHandle::Strong(_) => {}
720 WorktreeHandle::Weak(worktree) => {
721 if let Some(worktree) = worktree.upgrade(cx) {
722 *worktree_handle = WorktreeHandle::Strong(worktree);
723 }
724 }
725 }
726 }
727
728 remote_id_rx
729 .borrow()
730 .ok_or_else(|| anyhow!("no project id"))
731 } else {
732 Err(anyhow!("can't share a remote project"))
733 }
734 })?;
735
736 rpc.request(proto::ShareProject { project_id }).await?;
737
738 let mut tasks = Vec::new();
739 this.update(&mut cx, |this, cx| {
740 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
741 worktree.update(cx, |worktree, cx| {
742 let worktree = worktree.as_local_mut().unwrap();
743 tasks.push(worktree.share(project_id, cx));
744 });
745 }
746 });
747 for task in tasks {
748 task.await?;
749 }
750 this.update(&mut cx, |_, cx| cx.notify());
751 Ok(())
752 })
753 }
754
755 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
756 let rpc = self.client.clone();
757
758 if let ProjectClientState::Local {
759 is_shared,
760 remote_id_rx,
761 ..
762 } = &mut self.client_state
763 {
764 if !*is_shared {
765 return;
766 }
767
768 *is_shared = false;
769 self.collaborators.clear();
770 self.shared_buffers.clear();
771 for worktree_handle in self.worktrees.iter_mut() {
772 if let WorktreeHandle::Strong(worktree) = worktree_handle {
773 let is_visible = worktree.update(cx, |worktree, _| {
774 worktree.as_local_mut().unwrap().unshare();
775 worktree.is_visible()
776 });
777 if !is_visible {
778 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
779 }
780 }
781 }
782
783 for open_buffer in self.opened_buffers.values_mut() {
784 match open_buffer {
785 OpenBuffer::Strong(buffer) => {
786 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
787 }
788 _ => {}
789 }
790 }
791
792 if let Some(project_id) = *remote_id_rx.borrow() {
793 rpc.send(proto::UnshareProject { project_id }).log_err();
794 }
795
796 cx.notify();
797 } else {
798 log::error!("attempted to unshare a remote project");
799 }
800 }
801
802 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
803 if let ProjectClientState::Remote {
804 sharing_has_stopped,
805 ..
806 } = &mut self.client_state
807 {
808 *sharing_has_stopped = true;
809 self.collaborators.clear();
810 cx.notify();
811 }
812 }
813
814 pub fn is_read_only(&self) -> bool {
815 match &self.client_state {
816 ProjectClientState::Local { .. } => false,
817 ProjectClientState::Remote {
818 sharing_has_stopped,
819 ..
820 } => *sharing_has_stopped,
821 }
822 }
823
824 pub fn is_local(&self) -> bool {
825 match &self.client_state {
826 ProjectClientState::Local { .. } => true,
827 ProjectClientState::Remote { .. } => false,
828 }
829 }
830
831 pub fn is_remote(&self) -> bool {
832 !self.is_local()
833 }
834
835 pub fn create_buffer(
836 &mut self,
837 text: &str,
838 language: Option<Arc<Language>>,
839 cx: &mut ModelContext<Self>,
840 ) -> Result<ModelHandle<Buffer>> {
841 if self.is_remote() {
842 return Err(anyhow!("creating buffers as a guest is not supported yet"));
843 }
844
845 let buffer = cx.add_model(|cx| {
846 Buffer::new(self.replica_id(), text, cx)
847 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
848 });
849 self.register_buffer(&buffer, cx)?;
850 Ok(buffer)
851 }
852
853 pub fn open_path(
854 &mut self,
855 path: impl Into<ProjectPath>,
856 cx: &mut ModelContext<Self>,
857 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
858 let task = self.open_buffer(path, cx);
859 cx.spawn_weak(|_, cx| async move {
860 let buffer = task.await?;
861 let project_entry_id = buffer
862 .read_with(&cx, |buffer, cx| {
863 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
864 })
865 .ok_or_else(|| anyhow!("no project entry"))?;
866 Ok((project_entry_id, buffer.into()))
867 })
868 }
869
870 pub fn open_local_buffer(
871 &mut self,
872 abs_path: impl AsRef<Path>,
873 cx: &mut ModelContext<Self>,
874 ) -> Task<Result<ModelHandle<Buffer>>> {
875 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
876 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
877 } else {
878 Task::ready(Err(anyhow!("no such path")))
879 }
880 }
881
882 pub fn open_buffer(
883 &mut self,
884 path: impl Into<ProjectPath>,
885 cx: &mut ModelContext<Self>,
886 ) -> Task<Result<ModelHandle<Buffer>>> {
887 let project_path = path.into();
888 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
889 worktree
890 } else {
891 return Task::ready(Err(anyhow!("no such worktree")));
892 };
893
894 // If there is already a buffer for the given path, then return it.
895 let existing_buffer = self.get_open_buffer(&project_path, cx);
896 if let Some(existing_buffer) = existing_buffer {
897 return Task::ready(Ok(existing_buffer));
898 }
899
900 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
901 // If the given path is already being loaded, then wait for that existing
902 // task to complete and return the same buffer.
903 hash_map::Entry::Occupied(e) => e.get().clone(),
904
905 // Otherwise, record the fact that this path is now being loaded.
906 hash_map::Entry::Vacant(entry) => {
907 let (mut tx, rx) = postage::watch::channel();
908 entry.insert(rx.clone());
909
910 let load_buffer = if worktree.read(cx).is_local() {
911 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
912 } else {
913 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
914 };
915
916 cx.spawn(move |this, mut cx| async move {
917 let load_result = load_buffer.await;
918 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
919 // Record the fact that the buffer is no longer loading.
920 this.loading_buffers.remove(&project_path);
921 let buffer = load_result.map_err(Arc::new)?;
922 Ok(buffer)
923 }));
924 })
925 .detach();
926 rx
927 }
928 };
929
930 cx.foreground().spawn(async move {
931 loop {
932 if let Some(result) = loading_watch.borrow().as_ref() {
933 match result {
934 Ok(buffer) => return Ok(buffer.clone()),
935 Err(error) => return Err(anyhow!("{}", error)),
936 }
937 }
938 loading_watch.next().await;
939 }
940 })
941 }
942
943 fn open_local_buffer_internal(
944 &mut self,
945 path: &Arc<Path>,
946 worktree: &ModelHandle<Worktree>,
947 cx: &mut ModelContext<Self>,
948 ) -> Task<Result<ModelHandle<Buffer>>> {
949 let load_buffer = worktree.update(cx, |worktree, cx| {
950 let worktree = worktree.as_local_mut().unwrap();
951 worktree.load_buffer(path, cx)
952 });
953 cx.spawn(|this, mut cx| async move {
954 let buffer = load_buffer.await?;
955 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
956 Ok(buffer)
957 })
958 }
959
960 fn open_remote_buffer_internal(
961 &mut self,
962 path: &Arc<Path>,
963 worktree: &ModelHandle<Worktree>,
964 cx: &mut ModelContext<Self>,
965 ) -> Task<Result<ModelHandle<Buffer>>> {
966 let rpc = self.client.clone();
967 let project_id = self.remote_id().unwrap();
968 let remote_worktree_id = worktree.read(cx).id();
969 let path = path.clone();
970 let path_string = path.to_string_lossy().to_string();
971 cx.spawn(|this, mut cx| async move {
972 let response = rpc
973 .request(proto::OpenBufferByPath {
974 project_id,
975 worktree_id: remote_worktree_id.to_proto(),
976 path: path_string,
977 })
978 .await?;
979 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
980 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
981 .await
982 })
983 }
984
985 fn open_local_buffer_via_lsp(
986 &mut self,
987 abs_path: lsp::Url,
988 lsp_adapter: Arc<dyn LspAdapter>,
989 lsp_server: Arc<LanguageServer>,
990 cx: &mut ModelContext<Self>,
991 ) -> Task<Result<ModelHandle<Buffer>>> {
992 cx.spawn(|this, mut cx| async move {
993 let abs_path = abs_path
994 .to_file_path()
995 .map_err(|_| anyhow!("can't convert URI to path"))?;
996 let (worktree, relative_path) = if let Some(result) =
997 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
998 {
999 result
1000 } else {
1001 let worktree = this
1002 .update(&mut cx, |this, cx| {
1003 this.create_local_worktree(&abs_path, false, cx)
1004 })
1005 .await?;
1006 this.update(&mut cx, |this, cx| {
1007 this.language_servers.insert(
1008 (worktree.read(cx).id(), lsp_adapter.name()),
1009 (lsp_adapter, lsp_server),
1010 );
1011 });
1012 (worktree, PathBuf::new())
1013 };
1014
1015 let project_path = ProjectPath {
1016 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1017 path: relative_path.into(),
1018 };
1019 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1020 .await
1021 })
1022 }
1023
1024 pub fn open_buffer_by_id(
1025 &mut self,
1026 id: u64,
1027 cx: &mut ModelContext<Self>,
1028 ) -> Task<Result<ModelHandle<Buffer>>> {
1029 if let Some(buffer) = self.buffer_for_id(id, cx) {
1030 Task::ready(Ok(buffer))
1031 } else if self.is_local() {
1032 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1033 } else if let Some(project_id) = self.remote_id() {
1034 let request = self
1035 .client
1036 .request(proto::OpenBufferById { project_id, id });
1037 cx.spawn(|this, mut cx| async move {
1038 let buffer = request
1039 .await?
1040 .buffer
1041 .ok_or_else(|| anyhow!("invalid buffer"))?;
1042 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1043 .await
1044 })
1045 } else {
1046 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1047 }
1048 }
1049
1050 pub fn save_buffer_as(
1051 &mut self,
1052 buffer: ModelHandle<Buffer>,
1053 abs_path: PathBuf,
1054 cx: &mut ModelContext<Project>,
1055 ) -> Task<Result<()>> {
1056 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1057 let old_path =
1058 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1059 cx.spawn(|this, mut cx| async move {
1060 if let Some(old_path) = old_path {
1061 this.update(&mut cx, |this, cx| {
1062 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1063 });
1064 }
1065 let (worktree, path) = worktree_task.await?;
1066 worktree
1067 .update(&mut cx, |worktree, cx| {
1068 worktree
1069 .as_local_mut()
1070 .unwrap()
1071 .save_buffer_as(buffer.clone(), path, cx)
1072 })
1073 .await?;
1074 this.update(&mut cx, |this, cx| {
1075 this.assign_language_to_buffer(&buffer, cx);
1076 this.register_buffer_with_language_server(&buffer, cx);
1077 });
1078 Ok(())
1079 })
1080 }
1081
1082 pub fn get_open_buffer(
1083 &mut self,
1084 path: &ProjectPath,
1085 cx: &mut ModelContext<Self>,
1086 ) -> Option<ModelHandle<Buffer>> {
1087 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1088 self.opened_buffers.values().find_map(|buffer| {
1089 let buffer = buffer.upgrade(cx)?;
1090 let file = File::from_dyn(buffer.read(cx).file())?;
1091 if file.worktree == worktree && file.path() == &path.path {
1092 Some(buffer)
1093 } else {
1094 None
1095 }
1096 })
1097 }
1098
1099 fn register_buffer(
1100 &mut self,
1101 buffer: &ModelHandle<Buffer>,
1102 cx: &mut ModelContext<Self>,
1103 ) -> Result<()> {
1104 let remote_id = buffer.read(cx).remote_id();
1105 let open_buffer = if self.is_remote() || self.is_shared() {
1106 OpenBuffer::Strong(buffer.clone())
1107 } else {
1108 OpenBuffer::Weak(buffer.downgrade())
1109 };
1110
1111 match self.opened_buffers.insert(remote_id, open_buffer) {
1112 None => {}
1113 Some(OpenBuffer::Loading(operations)) => {
1114 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1115 }
1116 Some(OpenBuffer::Weak(existing_handle)) => {
1117 if existing_handle.upgrade(cx).is_some() {
1118 Err(anyhow!(
1119 "already registered buffer with remote id {}",
1120 remote_id
1121 ))?
1122 }
1123 }
1124 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1125 "already registered buffer with remote id {}",
1126 remote_id
1127 ))?,
1128 }
1129 cx.subscribe(buffer, |this, buffer, event, cx| {
1130 this.on_buffer_event(buffer, event, cx);
1131 })
1132 .detach();
1133
1134 self.assign_language_to_buffer(buffer, cx);
1135 self.register_buffer_with_language_server(buffer, cx);
1136 cx.observe_release(buffer, |this, buffer, cx| {
1137 if let Some(file) = File::from_dyn(buffer.file()) {
1138 if file.is_local() {
1139 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1140 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1141 server
1142 .notify::<lsp::notification::DidCloseTextDocument>(
1143 lsp::DidCloseTextDocumentParams {
1144 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1145 },
1146 )
1147 .log_err();
1148 }
1149 }
1150 }
1151 })
1152 .detach();
1153
1154 Ok(())
1155 }
1156
1157 fn register_buffer_with_language_server(
1158 &mut self,
1159 buffer_handle: &ModelHandle<Buffer>,
1160 cx: &mut ModelContext<Self>,
1161 ) {
1162 let buffer = buffer_handle.read(cx);
1163 let buffer_id = buffer.remote_id();
1164 if let Some(file) = File::from_dyn(buffer.file()) {
1165 if file.is_local() {
1166 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1167 let initial_snapshot = buffer.text_snapshot();
1168
1169 let mut language_server = None;
1170 let mut language_id = None;
1171 if let Some(language) = buffer.language() {
1172 let worktree_id = file.worktree_id(cx);
1173 if let Some(adapter) = language.lsp_adapter() {
1174 language_id = adapter.id_for_language(language.name().as_ref());
1175 language_server = self
1176 .language_servers
1177 .get(&(worktree_id, adapter.name()))
1178 .cloned();
1179 }
1180 }
1181
1182 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1183 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1184 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1185 .log_err();
1186 }
1187 }
1188
1189 if let Some((_, server)) = language_server {
1190 server
1191 .notify::<lsp::notification::DidOpenTextDocument>(
1192 lsp::DidOpenTextDocumentParams {
1193 text_document: lsp::TextDocumentItem::new(
1194 uri,
1195 language_id.unwrap_or_default(),
1196 0,
1197 initial_snapshot.text(),
1198 ),
1199 }
1200 .clone(),
1201 )
1202 .log_err();
1203 buffer_handle.update(cx, |buffer, cx| {
1204 buffer.set_completion_triggers(
1205 server
1206 .capabilities()
1207 .completion_provider
1208 .as_ref()
1209 .and_then(|provider| provider.trigger_characters.clone())
1210 .unwrap_or(Vec::new()),
1211 cx,
1212 )
1213 });
1214 self.buffer_snapshots
1215 .insert(buffer_id, vec![(0, initial_snapshot)]);
1216 }
1217 }
1218 }
1219 }
1220
1221 fn unregister_buffer_from_language_server(
1222 &mut self,
1223 buffer: &ModelHandle<Buffer>,
1224 old_path: PathBuf,
1225 cx: &mut ModelContext<Self>,
1226 ) {
1227 buffer.update(cx, |buffer, cx| {
1228 buffer.update_diagnostics(Default::default(), cx);
1229 self.buffer_snapshots.remove(&buffer.remote_id());
1230 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1231 language_server
1232 .notify::<lsp::notification::DidCloseTextDocument>(
1233 lsp::DidCloseTextDocumentParams {
1234 text_document: lsp::TextDocumentIdentifier::new(
1235 lsp::Url::from_file_path(old_path).unwrap(),
1236 ),
1237 },
1238 )
1239 .log_err();
1240 }
1241 });
1242 }
1243
1244 fn on_buffer_event(
1245 &mut self,
1246 buffer: ModelHandle<Buffer>,
1247 event: &BufferEvent,
1248 cx: &mut ModelContext<Self>,
1249 ) -> Option<()> {
1250 match event {
1251 BufferEvent::Operation(operation) => {
1252 let project_id = self.remote_id()?;
1253 let request = self.client.request(proto::UpdateBuffer {
1254 project_id,
1255 buffer_id: buffer.read(cx).remote_id(),
1256 operations: vec![language::proto::serialize_operation(&operation)],
1257 });
1258 cx.background().spawn(request).detach_and_log_err(cx);
1259 }
1260 BufferEvent::Edited { .. } => {
1261 let (_, language_server) = self
1262 .language_server_for_buffer(buffer.read(cx), cx)?
1263 .clone();
1264 let buffer = buffer.read(cx);
1265 let file = File::from_dyn(buffer.file())?;
1266 let abs_path = file.as_local()?.abs_path(cx);
1267 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1268 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1269 let (version, prev_snapshot) = buffer_snapshots.last()?;
1270 let next_snapshot = buffer.text_snapshot();
1271 let next_version = version + 1;
1272
1273 let content_changes = buffer
1274 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1275 .map(|edit| {
1276 let edit_start = edit.new.start.0;
1277 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1278 let new_text = next_snapshot
1279 .text_for_range(edit.new.start.1..edit.new.end.1)
1280 .collect();
1281 lsp::TextDocumentContentChangeEvent {
1282 range: Some(lsp::Range::new(
1283 point_to_lsp(edit_start),
1284 point_to_lsp(edit_end),
1285 )),
1286 range_length: None,
1287 text: new_text,
1288 }
1289 })
1290 .collect();
1291
1292 buffer_snapshots.push((next_version, next_snapshot));
1293
1294 language_server
1295 .notify::<lsp::notification::DidChangeTextDocument>(
1296 lsp::DidChangeTextDocumentParams {
1297 text_document: lsp::VersionedTextDocumentIdentifier::new(
1298 uri,
1299 next_version,
1300 ),
1301 content_changes,
1302 },
1303 )
1304 .log_err();
1305 }
1306 BufferEvent::Saved => {
1307 let file = File::from_dyn(buffer.read(cx).file())?;
1308 let worktree_id = file.worktree_id(cx);
1309 let abs_path = file.as_local()?.abs_path(cx);
1310 let text_document = lsp::TextDocumentIdentifier {
1311 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1312 };
1313
1314 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1315 server
1316 .notify::<lsp::notification::DidSaveTextDocument>(
1317 lsp::DidSaveTextDocumentParams {
1318 text_document: text_document.clone(),
1319 text: None,
1320 },
1321 )
1322 .log_err();
1323 }
1324 }
1325 _ => {}
1326 }
1327
1328 None
1329 }
1330
1331 fn language_servers_for_worktree(
1332 &self,
1333 worktree_id: WorktreeId,
1334 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1335 self.language_servers.iter().filter_map(
1336 move |((language_server_worktree_id, _), server)| {
1337 if *language_server_worktree_id == worktree_id {
1338 Some(server)
1339 } else {
1340 None
1341 }
1342 },
1343 )
1344 }
1345
1346 fn assign_language_to_buffer(
1347 &mut self,
1348 buffer: &ModelHandle<Buffer>,
1349 cx: &mut ModelContext<Self>,
1350 ) -> Option<()> {
1351 // If the buffer has a language, set it and start the language server if we haven't already.
1352 let full_path = buffer.read(cx).file()?.full_path(cx);
1353 let language = self.languages.select_language(&full_path)?;
1354 buffer.update(cx, |buffer, cx| {
1355 buffer.set_language(Some(language.clone()), cx);
1356 });
1357
1358 let file = File::from_dyn(buffer.read(cx).file())?;
1359 let worktree = file.worktree.read(cx).as_local()?;
1360 let worktree_id = worktree.id();
1361 let worktree_abs_path = worktree.abs_path().clone();
1362 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1363
1364 None
1365 }
1366
1367 fn start_language_server(
1368 &mut self,
1369 worktree_id: WorktreeId,
1370 worktree_path: Arc<Path>,
1371 language: Arc<Language>,
1372 cx: &mut ModelContext<Self>,
1373 ) {
1374 let adapter = if let Some(adapter) = language.lsp_adapter() {
1375 adapter
1376 } else {
1377 return;
1378 };
1379 let key = (worktree_id, adapter.name());
1380 self.started_language_servers
1381 .entry(key.clone())
1382 .or_insert_with(|| {
1383 let server_id = post_inc(&mut self.next_language_server_id);
1384 let language_server = self.languages.start_language_server(
1385 server_id,
1386 language.clone(),
1387 worktree_path,
1388 self.client.http_client(),
1389 cx,
1390 );
1391 cx.spawn_weak(|this, mut cx| async move {
1392 let language_server = language_server?.await.log_err()?;
1393 let language_server = language_server
1394 .initialize(adapter.initialization_options())
1395 .await
1396 .log_err()?;
1397 let this = this.upgrade(&cx)?;
1398 let disk_based_diagnostics_progress_token =
1399 adapter.disk_based_diagnostics_progress_token();
1400
1401 language_server
1402 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1403 let this = this.downgrade();
1404 let adapter = adapter.clone();
1405 move |params, mut cx| {
1406 if let Some(this) = this.upgrade(&cx) {
1407 this.update(&mut cx, |this, cx| {
1408 this.on_lsp_diagnostics_published(
1409 server_id,
1410 params,
1411 &adapter,
1412 disk_based_diagnostics_progress_token,
1413 cx,
1414 );
1415 });
1416 }
1417 }
1418 })
1419 .detach();
1420
1421 language_server
1422 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1423 let settings = this
1424 .read_with(&cx, |this, _| this.language_server_settings.clone());
1425 move |params, _| {
1426 let settings = settings.lock().clone();
1427 async move {
1428 Ok(params
1429 .items
1430 .into_iter()
1431 .map(|item| {
1432 if let Some(section) = &item.section {
1433 settings
1434 .get(section)
1435 .cloned()
1436 .unwrap_or(serde_json::Value::Null)
1437 } else {
1438 settings.clone()
1439 }
1440 })
1441 .collect())
1442 }
1443 }
1444 })
1445 .detach();
1446
1447 language_server
1448 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1449 let this = this.downgrade();
1450 let adapter = adapter.clone();
1451 let language_server = language_server.clone();
1452 move |params, cx| {
1453 Self::on_lsp_workspace_edit(
1454 this,
1455 params,
1456 server_id,
1457 adapter.clone(),
1458 language_server.clone(),
1459 cx,
1460 )
1461 }
1462 })
1463 .detach();
1464
1465 language_server
1466 .on_notification::<lsp::notification::Progress, _>({
1467 let this = this.downgrade();
1468 move |params, mut cx| {
1469 if let Some(this) = this.upgrade(&cx) {
1470 this.update(&mut cx, |this, cx| {
1471 this.on_lsp_progress(
1472 params,
1473 server_id,
1474 disk_based_diagnostics_progress_token,
1475 cx,
1476 );
1477 });
1478 }
1479 }
1480 })
1481 .detach();
1482
1483 this.update(&mut cx, |this, cx| {
1484 this.language_servers
1485 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1486 this.language_server_statuses.insert(
1487 server_id,
1488 LanguageServerStatus {
1489 name: language_server.name().to_string(),
1490 pending_work: Default::default(),
1491 pending_diagnostic_updates: 0,
1492 },
1493 );
1494 language_server
1495 .notify::<lsp::notification::DidChangeConfiguration>(
1496 lsp::DidChangeConfigurationParams {
1497 settings: this.language_server_settings.lock().clone(),
1498 },
1499 )
1500 .ok();
1501
1502 if let Some(project_id) = this.remote_id() {
1503 this.client
1504 .send(proto::StartLanguageServer {
1505 project_id,
1506 server: Some(proto::LanguageServer {
1507 id: server_id as u64,
1508 name: language_server.name().to_string(),
1509 }),
1510 })
1511 .log_err();
1512 }
1513
1514 // Tell the language server about every open buffer in the worktree that matches the language.
1515 for buffer in this.opened_buffers.values() {
1516 if let Some(buffer_handle) = buffer.upgrade(cx) {
1517 let buffer = buffer_handle.read(cx);
1518 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1519 file
1520 } else {
1521 continue;
1522 };
1523 let language = if let Some(language) = buffer.language() {
1524 language
1525 } else {
1526 continue;
1527 };
1528 if file.worktree.read(cx).id() != key.0
1529 || language.lsp_adapter().map(|a| a.name())
1530 != Some(key.1.clone())
1531 {
1532 continue;
1533 }
1534
1535 let file = file.as_local()?;
1536 let versions = this
1537 .buffer_snapshots
1538 .entry(buffer.remote_id())
1539 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1540 let (version, initial_snapshot) = versions.last().unwrap();
1541 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1542 let language_id = adapter.id_for_language(language.name().as_ref());
1543 language_server
1544 .notify::<lsp::notification::DidOpenTextDocument>(
1545 lsp::DidOpenTextDocumentParams {
1546 text_document: lsp::TextDocumentItem::new(
1547 uri,
1548 language_id.unwrap_or_default(),
1549 *version,
1550 initial_snapshot.text(),
1551 ),
1552 },
1553 )
1554 .log_err()?;
1555 buffer_handle.update(cx, |buffer, cx| {
1556 buffer.set_completion_triggers(
1557 language_server
1558 .capabilities()
1559 .completion_provider
1560 .as_ref()
1561 .and_then(|provider| {
1562 provider.trigger_characters.clone()
1563 })
1564 .unwrap_or(Vec::new()),
1565 cx,
1566 )
1567 });
1568 }
1569 }
1570
1571 cx.notify();
1572 Some(())
1573 });
1574
1575 Some(language_server)
1576 })
1577 });
1578 }
1579
1580 pub fn restart_language_servers_for_buffers(
1581 &mut self,
1582 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1583 cx: &mut ModelContext<Self>,
1584 ) -> Option<()> {
1585 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1586 .into_iter()
1587 .filter_map(|buffer| {
1588 let file = File::from_dyn(buffer.read(cx).file())?;
1589 let worktree = file.worktree.read(cx).as_local()?;
1590 let worktree_id = worktree.id();
1591 let worktree_abs_path = worktree.abs_path().clone();
1592 let full_path = file.full_path(cx);
1593 Some((worktree_id, worktree_abs_path, full_path))
1594 })
1595 .collect();
1596 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1597 let language = self.languages.select_language(&full_path)?;
1598 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1599 }
1600
1601 None
1602 }
1603
1604 fn restart_language_server(
1605 &mut self,
1606 worktree_id: WorktreeId,
1607 worktree_path: Arc<Path>,
1608 language: Arc<Language>,
1609 cx: &mut ModelContext<Self>,
1610 ) {
1611 let adapter = if let Some(adapter) = language.lsp_adapter() {
1612 adapter
1613 } else {
1614 return;
1615 };
1616 let key = (worktree_id, adapter.name());
1617 let server_to_shutdown = self.language_servers.remove(&key);
1618 self.started_language_servers.remove(&key);
1619 server_to_shutdown
1620 .as_ref()
1621 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1622 cx.spawn_weak(|this, mut cx| async move {
1623 if let Some(this) = this.upgrade(&cx) {
1624 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1625 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1626 shutdown_task.await;
1627 }
1628 }
1629
1630 this.update(&mut cx, |this, cx| {
1631 this.start_language_server(worktree_id, worktree_path, language, cx);
1632 });
1633 }
1634 })
1635 .detach();
1636 }
1637
1638 fn on_lsp_diagnostics_published(
1639 &mut self,
1640 server_id: usize,
1641 mut params: lsp::PublishDiagnosticsParams,
1642 adapter: &Arc<dyn LspAdapter>,
1643 disk_based_diagnostics_progress_token: Option<&str>,
1644 cx: &mut ModelContext<Self>,
1645 ) {
1646 adapter.process_diagnostics(&mut params);
1647 if disk_based_diagnostics_progress_token.is_none() {
1648 self.disk_based_diagnostics_started(cx);
1649 self.broadcast_language_server_update(
1650 server_id,
1651 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1652 proto::LspDiskBasedDiagnosticsUpdating {},
1653 ),
1654 );
1655 }
1656 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1657 .log_err();
1658 if disk_based_diagnostics_progress_token.is_none() {
1659 self.disk_based_diagnostics_finished(cx);
1660 self.broadcast_language_server_update(
1661 server_id,
1662 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1663 proto::LspDiskBasedDiagnosticsUpdated {},
1664 ),
1665 );
1666 }
1667 }
1668
1669 fn on_lsp_progress(
1670 &mut self,
1671 progress: lsp::ProgressParams,
1672 server_id: usize,
1673 disk_based_diagnostics_progress_token: Option<&str>,
1674 cx: &mut ModelContext<Self>,
1675 ) {
1676 let token = match progress.token {
1677 lsp::NumberOrString::String(token) => token,
1678 lsp::NumberOrString::Number(token) => {
1679 log::info!("skipping numeric progress token {}", token);
1680 return;
1681 }
1682 };
1683 let progress = match progress.value {
1684 lsp::ProgressParamsValue::WorkDone(value) => value,
1685 };
1686 let language_server_status =
1687 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1688 status
1689 } else {
1690 return;
1691 };
1692 match progress {
1693 lsp::WorkDoneProgress::Begin(_) => {
1694 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1695 language_server_status.pending_diagnostic_updates += 1;
1696 if language_server_status.pending_diagnostic_updates == 1 {
1697 self.disk_based_diagnostics_started(cx);
1698 self.broadcast_language_server_update(
1699 server_id,
1700 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1701 proto::LspDiskBasedDiagnosticsUpdating {},
1702 ),
1703 );
1704 }
1705 } else {
1706 self.on_lsp_work_start(server_id, token.clone(), cx);
1707 self.broadcast_language_server_update(
1708 server_id,
1709 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1710 token,
1711 }),
1712 );
1713 }
1714 }
1715 lsp::WorkDoneProgress::Report(report) => {
1716 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1717 self.on_lsp_work_progress(
1718 server_id,
1719 token.clone(),
1720 LanguageServerProgress {
1721 message: report.message.clone(),
1722 percentage: report.percentage.map(|p| p as usize),
1723 last_update_at: Instant::now(),
1724 },
1725 cx,
1726 );
1727 self.broadcast_language_server_update(
1728 server_id,
1729 proto::update_language_server::Variant::WorkProgress(
1730 proto::LspWorkProgress {
1731 token,
1732 message: report.message,
1733 percentage: report.percentage.map(|p| p as u32),
1734 },
1735 ),
1736 );
1737 }
1738 }
1739 lsp::WorkDoneProgress::End(_) => {
1740 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1741 language_server_status.pending_diagnostic_updates -= 1;
1742 if language_server_status.pending_diagnostic_updates == 0 {
1743 self.disk_based_diagnostics_finished(cx);
1744 self.broadcast_language_server_update(
1745 server_id,
1746 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1747 proto::LspDiskBasedDiagnosticsUpdated {},
1748 ),
1749 );
1750 }
1751 } else {
1752 self.on_lsp_work_end(server_id, token.clone(), cx);
1753 self.broadcast_language_server_update(
1754 server_id,
1755 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1756 token,
1757 }),
1758 );
1759 }
1760 }
1761 }
1762 }
1763
1764 fn on_lsp_work_start(
1765 &mut self,
1766 language_server_id: usize,
1767 token: String,
1768 cx: &mut ModelContext<Self>,
1769 ) {
1770 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1771 status.pending_work.insert(
1772 token,
1773 LanguageServerProgress {
1774 message: None,
1775 percentage: None,
1776 last_update_at: Instant::now(),
1777 },
1778 );
1779 cx.notify();
1780 }
1781 }
1782
1783 fn on_lsp_work_progress(
1784 &mut self,
1785 language_server_id: usize,
1786 token: String,
1787 progress: LanguageServerProgress,
1788 cx: &mut ModelContext<Self>,
1789 ) {
1790 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1791 status.pending_work.insert(token, progress);
1792 cx.notify();
1793 }
1794 }
1795
1796 fn on_lsp_work_end(
1797 &mut self,
1798 language_server_id: usize,
1799 token: String,
1800 cx: &mut ModelContext<Self>,
1801 ) {
1802 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1803 status.pending_work.remove(&token);
1804 cx.notify();
1805 }
1806 }
1807
1808 async fn on_lsp_workspace_edit(
1809 this: WeakModelHandle<Self>,
1810 params: lsp::ApplyWorkspaceEditParams,
1811 server_id: usize,
1812 adapter: Arc<dyn LspAdapter>,
1813 language_server: Arc<LanguageServer>,
1814 mut cx: AsyncAppContext,
1815 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1816 let this = this
1817 .upgrade(&cx)
1818 .ok_or_else(|| anyhow!("project project closed"))?;
1819 let transaction = Self::deserialize_workspace_edit(
1820 this.clone(),
1821 params.edit,
1822 true,
1823 adapter.clone(),
1824 language_server.clone(),
1825 &mut cx,
1826 )
1827 .await
1828 .log_err();
1829 this.update(&mut cx, |this, _| {
1830 if let Some(transaction) = transaction {
1831 this.last_workspace_edits_by_language_server
1832 .insert(server_id, transaction);
1833 }
1834 });
1835 Ok(lsp::ApplyWorkspaceEditResponse {
1836 applied: true,
1837 failed_change: None,
1838 failure_reason: None,
1839 })
1840 }
1841
1842 fn broadcast_language_server_update(
1843 &self,
1844 language_server_id: usize,
1845 event: proto::update_language_server::Variant,
1846 ) {
1847 if let Some(project_id) = self.remote_id() {
1848 self.client
1849 .send(proto::UpdateLanguageServer {
1850 project_id,
1851 language_server_id: language_server_id as u64,
1852 variant: Some(event),
1853 })
1854 .log_err();
1855 }
1856 }
1857
1858 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1859 for (_, server) in self.language_servers.values() {
1860 server
1861 .notify::<lsp::notification::DidChangeConfiguration>(
1862 lsp::DidChangeConfigurationParams {
1863 settings: settings.clone(),
1864 },
1865 )
1866 .ok();
1867 }
1868 *self.language_server_settings.lock() = settings;
1869 }
1870
1871 pub fn language_server_statuses(
1872 &self,
1873 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1874 self.language_server_statuses.values()
1875 }
1876
1877 pub fn update_diagnostics(
1878 &mut self,
1879 params: lsp::PublishDiagnosticsParams,
1880 disk_based_sources: &[&str],
1881 cx: &mut ModelContext<Self>,
1882 ) -> Result<()> {
1883 let abs_path = params
1884 .uri
1885 .to_file_path()
1886 .map_err(|_| anyhow!("URI is not a file"))?;
1887 let mut next_group_id = 0;
1888 let mut diagnostics = Vec::default();
1889 let mut primary_diagnostic_group_ids = HashMap::default();
1890 let mut sources_by_group_id = HashMap::default();
1891 let mut supporting_diagnostics = HashMap::default();
1892 for diagnostic in ¶ms.diagnostics {
1893 let source = diagnostic.source.as_ref();
1894 let code = diagnostic.code.as_ref().map(|code| match code {
1895 lsp::NumberOrString::Number(code) => code.to_string(),
1896 lsp::NumberOrString::String(code) => code.clone(),
1897 });
1898 let range = range_from_lsp(diagnostic.range);
1899 let is_supporting = diagnostic
1900 .related_information
1901 .as_ref()
1902 .map_or(false, |infos| {
1903 infos.iter().any(|info| {
1904 primary_diagnostic_group_ids.contains_key(&(
1905 source,
1906 code.clone(),
1907 range_from_lsp(info.location.range),
1908 ))
1909 })
1910 });
1911
1912 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1913 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1914 });
1915
1916 if is_supporting {
1917 supporting_diagnostics.insert(
1918 (source, code.clone(), range),
1919 (diagnostic.severity, is_unnecessary),
1920 );
1921 } else {
1922 let group_id = post_inc(&mut next_group_id);
1923 let is_disk_based = source.map_or(false, |source| {
1924 disk_based_sources.contains(&source.as_str())
1925 });
1926
1927 sources_by_group_id.insert(group_id, source);
1928 primary_diagnostic_group_ids
1929 .insert((source, code.clone(), range.clone()), group_id);
1930
1931 diagnostics.push(DiagnosticEntry {
1932 range,
1933 diagnostic: Diagnostic {
1934 code: code.clone(),
1935 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1936 message: diagnostic.message.clone(),
1937 group_id,
1938 is_primary: true,
1939 is_valid: true,
1940 is_disk_based,
1941 is_unnecessary,
1942 },
1943 });
1944 if let Some(infos) = &diagnostic.related_information {
1945 for info in infos {
1946 if info.location.uri == params.uri && !info.message.is_empty() {
1947 let range = range_from_lsp(info.location.range);
1948 diagnostics.push(DiagnosticEntry {
1949 range,
1950 diagnostic: Diagnostic {
1951 code: code.clone(),
1952 severity: DiagnosticSeverity::INFORMATION,
1953 message: info.message.clone(),
1954 group_id,
1955 is_primary: false,
1956 is_valid: true,
1957 is_disk_based,
1958 is_unnecessary: false,
1959 },
1960 });
1961 }
1962 }
1963 }
1964 }
1965 }
1966
1967 for entry in &mut diagnostics {
1968 let diagnostic = &mut entry.diagnostic;
1969 if !diagnostic.is_primary {
1970 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1971 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1972 source,
1973 diagnostic.code.clone(),
1974 entry.range.clone(),
1975 )) {
1976 if let Some(severity) = severity {
1977 diagnostic.severity = severity;
1978 }
1979 diagnostic.is_unnecessary = is_unnecessary;
1980 }
1981 }
1982 }
1983
1984 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1985 Ok(())
1986 }
1987
1988 pub fn update_diagnostic_entries(
1989 &mut self,
1990 abs_path: PathBuf,
1991 version: Option<i32>,
1992 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1993 cx: &mut ModelContext<Project>,
1994 ) -> Result<(), anyhow::Error> {
1995 let (worktree, relative_path) = self
1996 .find_local_worktree(&abs_path, cx)
1997 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1998 if !worktree.read(cx).is_visible() {
1999 return Ok(());
2000 }
2001
2002 let project_path = ProjectPath {
2003 worktree_id: worktree.read(cx).id(),
2004 path: relative_path.into(),
2005 };
2006 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2007 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2008 }
2009
2010 let updated = worktree.update(cx, |worktree, cx| {
2011 worktree
2012 .as_local_mut()
2013 .ok_or_else(|| anyhow!("not a local worktree"))?
2014 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2015 })?;
2016 if updated {
2017 cx.emit(Event::DiagnosticsUpdated(project_path));
2018 }
2019 Ok(())
2020 }
2021
2022 fn update_buffer_diagnostics(
2023 &mut self,
2024 buffer: &ModelHandle<Buffer>,
2025 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2026 version: Option<i32>,
2027 cx: &mut ModelContext<Self>,
2028 ) -> Result<()> {
2029 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2030 Ordering::Equal
2031 .then_with(|| b.is_primary.cmp(&a.is_primary))
2032 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2033 .then_with(|| a.severity.cmp(&b.severity))
2034 .then_with(|| a.message.cmp(&b.message))
2035 }
2036
2037 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2038
2039 diagnostics.sort_unstable_by(|a, b| {
2040 Ordering::Equal
2041 .then_with(|| a.range.start.cmp(&b.range.start))
2042 .then_with(|| b.range.end.cmp(&a.range.end))
2043 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2044 });
2045
2046 let mut sanitized_diagnostics = Vec::new();
2047 let edits_since_save = Patch::new(
2048 snapshot
2049 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2050 .collect(),
2051 );
2052 for entry in diagnostics {
2053 let start;
2054 let end;
2055 if entry.diagnostic.is_disk_based {
2056 // Some diagnostics are based on files on disk instead of buffers'
2057 // current contents. Adjust these diagnostics' ranges to reflect
2058 // any unsaved edits.
2059 start = edits_since_save.old_to_new(entry.range.start);
2060 end = edits_since_save.old_to_new(entry.range.end);
2061 } else {
2062 start = entry.range.start;
2063 end = entry.range.end;
2064 }
2065
2066 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2067 ..snapshot.clip_point_utf16(end, Bias::Right);
2068
2069 // Expand empty ranges by one character
2070 if range.start == range.end {
2071 range.end.column += 1;
2072 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2073 if range.start == range.end && range.end.column > 0 {
2074 range.start.column -= 1;
2075 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2076 }
2077 }
2078
2079 sanitized_diagnostics.push(DiagnosticEntry {
2080 range,
2081 diagnostic: entry.diagnostic,
2082 });
2083 }
2084 drop(edits_since_save);
2085
2086 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2087 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2088 Ok(())
2089 }
2090
2091 pub fn reload_buffers(
2092 &self,
2093 buffers: HashSet<ModelHandle<Buffer>>,
2094 push_to_history: bool,
2095 cx: &mut ModelContext<Self>,
2096 ) -> Task<Result<ProjectTransaction>> {
2097 let mut local_buffers = Vec::new();
2098 let mut remote_buffers = None;
2099 for buffer_handle in buffers {
2100 let buffer = buffer_handle.read(cx);
2101 if buffer.is_dirty() {
2102 if let Some(file) = File::from_dyn(buffer.file()) {
2103 if file.is_local() {
2104 local_buffers.push(buffer_handle);
2105 } else {
2106 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2107 }
2108 }
2109 }
2110 }
2111
2112 let remote_buffers = self.remote_id().zip(remote_buffers);
2113 let client = self.client.clone();
2114
2115 cx.spawn(|this, mut cx| async move {
2116 let mut project_transaction = ProjectTransaction::default();
2117
2118 if let Some((project_id, remote_buffers)) = remote_buffers {
2119 let response = client
2120 .request(proto::ReloadBuffers {
2121 project_id,
2122 buffer_ids: remote_buffers
2123 .iter()
2124 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2125 .collect(),
2126 })
2127 .await?
2128 .transaction
2129 .ok_or_else(|| anyhow!("missing transaction"))?;
2130 project_transaction = this
2131 .update(&mut cx, |this, cx| {
2132 this.deserialize_project_transaction(response, push_to_history, cx)
2133 })
2134 .await?;
2135 }
2136
2137 for buffer in local_buffers {
2138 let transaction = buffer
2139 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2140 .await?;
2141 buffer.update(&mut cx, |buffer, cx| {
2142 if let Some(transaction) = transaction {
2143 if !push_to_history {
2144 buffer.forget_transaction(transaction.id);
2145 }
2146 project_transaction.0.insert(cx.handle(), transaction);
2147 }
2148 });
2149 }
2150
2151 Ok(project_transaction)
2152 })
2153 }
2154
2155 pub fn format(
2156 &self,
2157 buffers: HashSet<ModelHandle<Buffer>>,
2158 push_to_history: bool,
2159 cx: &mut ModelContext<Project>,
2160 ) -> Task<Result<ProjectTransaction>> {
2161 let mut local_buffers = Vec::new();
2162 let mut remote_buffers = None;
2163 for buffer_handle in buffers {
2164 let buffer = buffer_handle.read(cx);
2165 if let Some(file) = File::from_dyn(buffer.file()) {
2166 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2167 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2168 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2169 }
2170 } else {
2171 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2172 }
2173 } else {
2174 return Task::ready(Ok(Default::default()));
2175 }
2176 }
2177
2178 let remote_buffers = self.remote_id().zip(remote_buffers);
2179 let client = self.client.clone();
2180
2181 cx.spawn(|this, mut cx| async move {
2182 let mut project_transaction = ProjectTransaction::default();
2183
2184 if let Some((project_id, remote_buffers)) = remote_buffers {
2185 let response = client
2186 .request(proto::FormatBuffers {
2187 project_id,
2188 buffer_ids: remote_buffers
2189 .iter()
2190 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2191 .collect(),
2192 })
2193 .await?
2194 .transaction
2195 .ok_or_else(|| anyhow!("missing transaction"))?;
2196 project_transaction = this
2197 .update(&mut cx, |this, cx| {
2198 this.deserialize_project_transaction(response, push_to_history, cx)
2199 })
2200 .await?;
2201 }
2202
2203 for (buffer, buffer_abs_path, language_server) in local_buffers {
2204 let text_document = lsp::TextDocumentIdentifier::new(
2205 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2206 );
2207 let capabilities = &language_server.capabilities();
2208 let tab_size = cx.update(|cx| {
2209 let language_name = buffer.read(cx).language().map(|language| language.name());
2210 cx.global::<Settings>().tab_size(language_name.as_deref())
2211 });
2212 let lsp_edits = if capabilities
2213 .document_formatting_provider
2214 .as_ref()
2215 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2216 {
2217 language_server
2218 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2219 text_document,
2220 options: lsp::FormattingOptions {
2221 tab_size,
2222 insert_spaces: true,
2223 insert_final_newline: Some(true),
2224 ..Default::default()
2225 },
2226 work_done_progress_params: Default::default(),
2227 })
2228 .await?
2229 } else if capabilities
2230 .document_range_formatting_provider
2231 .as_ref()
2232 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2233 {
2234 let buffer_start = lsp::Position::new(0, 0);
2235 let buffer_end =
2236 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2237 language_server
2238 .request::<lsp::request::RangeFormatting>(
2239 lsp::DocumentRangeFormattingParams {
2240 text_document,
2241 range: lsp::Range::new(buffer_start, buffer_end),
2242 options: lsp::FormattingOptions {
2243 tab_size: 4,
2244 insert_spaces: true,
2245 insert_final_newline: Some(true),
2246 ..Default::default()
2247 },
2248 work_done_progress_params: Default::default(),
2249 },
2250 )
2251 .await?
2252 } else {
2253 continue;
2254 };
2255
2256 if let Some(lsp_edits) = lsp_edits {
2257 let edits = this
2258 .update(&mut cx, |this, cx| {
2259 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2260 })
2261 .await?;
2262 buffer.update(&mut cx, |buffer, cx| {
2263 buffer.finalize_last_transaction();
2264 buffer.start_transaction();
2265 for (range, text) in edits {
2266 buffer.edit([(range, text)], cx);
2267 }
2268 if buffer.end_transaction(cx).is_some() {
2269 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2270 if !push_to_history {
2271 buffer.forget_transaction(transaction.id);
2272 }
2273 project_transaction.0.insert(cx.handle(), transaction);
2274 }
2275 });
2276 }
2277 }
2278
2279 Ok(project_transaction)
2280 })
2281 }
2282
2283 pub fn definition<T: ToPointUtf16>(
2284 &self,
2285 buffer: &ModelHandle<Buffer>,
2286 position: T,
2287 cx: &mut ModelContext<Self>,
2288 ) -> Task<Result<Vec<Location>>> {
2289 let position = position.to_point_utf16(buffer.read(cx));
2290 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2291 }
2292
2293 pub fn references<T: ToPointUtf16>(
2294 &self,
2295 buffer: &ModelHandle<Buffer>,
2296 position: T,
2297 cx: &mut ModelContext<Self>,
2298 ) -> Task<Result<Vec<Location>>> {
2299 let position = position.to_point_utf16(buffer.read(cx));
2300 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2301 }
2302
2303 pub fn document_highlights<T: ToPointUtf16>(
2304 &self,
2305 buffer: &ModelHandle<Buffer>,
2306 position: T,
2307 cx: &mut ModelContext<Self>,
2308 ) -> Task<Result<Vec<DocumentHighlight>>> {
2309 let position = position.to_point_utf16(buffer.read(cx));
2310
2311 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2312 }
2313
2314 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2315 if self.is_local() {
2316 let mut requests = Vec::new();
2317 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2318 let worktree_id = *worktree_id;
2319 if let Some(worktree) = self
2320 .worktree_for_id(worktree_id, cx)
2321 .and_then(|worktree| worktree.read(cx).as_local())
2322 {
2323 let lsp_adapter = lsp_adapter.clone();
2324 let worktree_abs_path = worktree.abs_path().clone();
2325 requests.push(
2326 language_server
2327 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2328 query: query.to_string(),
2329 ..Default::default()
2330 })
2331 .log_err()
2332 .map(move |response| {
2333 (
2334 lsp_adapter,
2335 worktree_id,
2336 worktree_abs_path,
2337 response.unwrap_or_default(),
2338 )
2339 }),
2340 );
2341 }
2342 }
2343
2344 cx.spawn_weak(|this, cx| async move {
2345 let responses = futures::future::join_all(requests).await;
2346 let this = if let Some(this) = this.upgrade(&cx) {
2347 this
2348 } else {
2349 return Ok(Default::default());
2350 };
2351 this.read_with(&cx, |this, cx| {
2352 let mut symbols = Vec::new();
2353 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2354 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2355 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2356 let mut worktree_id = source_worktree_id;
2357 let path;
2358 if let Some((worktree, rel_path)) =
2359 this.find_local_worktree(&abs_path, cx)
2360 {
2361 worktree_id = worktree.read(cx).id();
2362 path = rel_path;
2363 } else {
2364 path = relativize_path(&worktree_abs_path, &abs_path);
2365 }
2366
2367 let label = this
2368 .languages
2369 .select_language(&path)
2370 .and_then(|language| {
2371 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2372 })
2373 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2374 let signature = this.symbol_signature(worktree_id, &path);
2375
2376 Some(Symbol {
2377 source_worktree_id,
2378 worktree_id,
2379 language_server_name: adapter.name(),
2380 name: lsp_symbol.name,
2381 kind: lsp_symbol.kind,
2382 label,
2383 path,
2384 range: range_from_lsp(lsp_symbol.location.range),
2385 signature,
2386 })
2387 }));
2388 }
2389 Ok(symbols)
2390 })
2391 })
2392 } else if let Some(project_id) = self.remote_id() {
2393 let request = self.client.request(proto::GetProjectSymbols {
2394 project_id,
2395 query: query.to_string(),
2396 });
2397 cx.spawn_weak(|this, cx| async move {
2398 let response = request.await?;
2399 let mut symbols = Vec::new();
2400 if let Some(this) = this.upgrade(&cx) {
2401 this.read_with(&cx, |this, _| {
2402 symbols.extend(
2403 response
2404 .symbols
2405 .into_iter()
2406 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2407 );
2408 })
2409 }
2410 Ok(symbols)
2411 })
2412 } else {
2413 Task::ready(Ok(Default::default()))
2414 }
2415 }
2416
2417 pub fn open_buffer_for_symbol(
2418 &mut self,
2419 symbol: &Symbol,
2420 cx: &mut ModelContext<Self>,
2421 ) -> Task<Result<ModelHandle<Buffer>>> {
2422 if self.is_local() {
2423 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2424 symbol.source_worktree_id,
2425 symbol.language_server_name.clone(),
2426 )) {
2427 server.clone()
2428 } else {
2429 return Task::ready(Err(anyhow!(
2430 "language server for worktree and language not found"
2431 )));
2432 };
2433
2434 let worktree_abs_path = if let Some(worktree_abs_path) = self
2435 .worktree_for_id(symbol.worktree_id, cx)
2436 .and_then(|worktree| worktree.read(cx).as_local())
2437 .map(|local_worktree| local_worktree.abs_path())
2438 {
2439 worktree_abs_path
2440 } else {
2441 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2442 };
2443 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2444 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2445 uri
2446 } else {
2447 return Task::ready(Err(anyhow!("invalid symbol path")));
2448 };
2449
2450 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2451 } else if let Some(project_id) = self.remote_id() {
2452 let request = self.client.request(proto::OpenBufferForSymbol {
2453 project_id,
2454 symbol: Some(serialize_symbol(symbol)),
2455 });
2456 cx.spawn(|this, mut cx| async move {
2457 let response = request.await?;
2458 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2459 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2460 .await
2461 })
2462 } else {
2463 Task::ready(Err(anyhow!("project does not have a remote id")))
2464 }
2465 }
2466
2467 pub fn completions<T: ToPointUtf16>(
2468 &self,
2469 source_buffer_handle: &ModelHandle<Buffer>,
2470 position: T,
2471 cx: &mut ModelContext<Self>,
2472 ) -> Task<Result<Vec<Completion>>> {
2473 let source_buffer_handle = source_buffer_handle.clone();
2474 let source_buffer = source_buffer_handle.read(cx);
2475 let buffer_id = source_buffer.remote_id();
2476 let language = source_buffer.language().cloned();
2477 let worktree;
2478 let buffer_abs_path;
2479 if let Some(file) = File::from_dyn(source_buffer.file()) {
2480 worktree = file.worktree.clone();
2481 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2482 } else {
2483 return Task::ready(Ok(Default::default()));
2484 };
2485
2486 let position = position.to_point_utf16(source_buffer);
2487 let anchor = source_buffer.anchor_after(position);
2488
2489 if worktree.read(cx).as_local().is_some() {
2490 let buffer_abs_path = buffer_abs_path.unwrap();
2491 let (_, lang_server) =
2492 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2493 server.clone()
2494 } else {
2495 return Task::ready(Ok(Default::default()));
2496 };
2497
2498 cx.spawn(|_, cx| async move {
2499 let completions = lang_server
2500 .request::<lsp::request::Completion>(lsp::CompletionParams {
2501 text_document_position: lsp::TextDocumentPositionParams::new(
2502 lsp::TextDocumentIdentifier::new(
2503 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2504 ),
2505 point_to_lsp(position),
2506 ),
2507 context: Default::default(),
2508 work_done_progress_params: Default::default(),
2509 partial_result_params: Default::default(),
2510 })
2511 .await
2512 .context("lsp completion request failed")?;
2513
2514 let completions = if let Some(completions) = completions {
2515 match completions {
2516 lsp::CompletionResponse::Array(completions) => completions,
2517 lsp::CompletionResponse::List(list) => list.items,
2518 }
2519 } else {
2520 Default::default()
2521 };
2522
2523 source_buffer_handle.read_with(&cx, |this, _| {
2524 let snapshot = this.snapshot();
2525 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2526 let mut range_for_token = None;
2527 Ok(completions
2528 .into_iter()
2529 .filter_map(|lsp_completion| {
2530 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2531 // If the language server provides a range to overwrite, then
2532 // check that the range is valid.
2533 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2534 let range = range_from_lsp(edit.range);
2535 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2536 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2537 if start != range.start || end != range.end {
2538 log::info!("completion out of expected range");
2539 return None;
2540 }
2541 (
2542 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2543 edit.new_text.clone(),
2544 )
2545 }
2546 // If the language server does not provide a range, then infer
2547 // the range based on the syntax tree.
2548 None => {
2549 if position != clipped_position {
2550 log::info!("completion out of expected range");
2551 return None;
2552 }
2553 let Range { start, end } = range_for_token
2554 .get_or_insert_with(|| {
2555 let offset = position.to_offset(&snapshot);
2556 snapshot
2557 .range_for_word_token_at(offset)
2558 .unwrap_or_else(|| offset..offset)
2559 })
2560 .clone();
2561 let text = lsp_completion
2562 .insert_text
2563 .as_ref()
2564 .unwrap_or(&lsp_completion.label)
2565 .clone();
2566 (
2567 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2568 text.clone(),
2569 )
2570 }
2571 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2572 log::info!("unsupported insert/replace completion");
2573 return None;
2574 }
2575 };
2576
2577 Some(Completion {
2578 old_range,
2579 new_text,
2580 label: language
2581 .as_ref()
2582 .and_then(|l| l.label_for_completion(&lsp_completion))
2583 .unwrap_or_else(|| {
2584 CodeLabel::plain(
2585 lsp_completion.label.clone(),
2586 lsp_completion.filter_text.as_deref(),
2587 )
2588 }),
2589 lsp_completion,
2590 })
2591 })
2592 .collect())
2593 })
2594 })
2595 } else if let Some(project_id) = self.remote_id() {
2596 let rpc = self.client.clone();
2597 let message = proto::GetCompletions {
2598 project_id,
2599 buffer_id,
2600 position: Some(language::proto::serialize_anchor(&anchor)),
2601 version: serialize_version(&source_buffer.version()),
2602 };
2603 cx.spawn_weak(|_, mut cx| async move {
2604 let response = rpc.request(message).await?;
2605
2606 source_buffer_handle
2607 .update(&mut cx, |buffer, _| {
2608 buffer.wait_for_version(deserialize_version(response.version))
2609 })
2610 .await;
2611
2612 response
2613 .completions
2614 .into_iter()
2615 .map(|completion| {
2616 language::proto::deserialize_completion(completion, language.as_ref())
2617 })
2618 .collect()
2619 })
2620 } else {
2621 Task::ready(Ok(Default::default()))
2622 }
2623 }
2624
2625 pub fn apply_additional_edits_for_completion(
2626 &self,
2627 buffer_handle: ModelHandle<Buffer>,
2628 completion: Completion,
2629 push_to_history: bool,
2630 cx: &mut ModelContext<Self>,
2631 ) -> Task<Result<Option<Transaction>>> {
2632 let buffer = buffer_handle.read(cx);
2633 let buffer_id = buffer.remote_id();
2634
2635 if self.is_local() {
2636 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2637 {
2638 server.clone()
2639 } else {
2640 return Task::ready(Ok(Default::default()));
2641 };
2642
2643 cx.spawn(|this, mut cx| async move {
2644 let resolved_completion = lang_server
2645 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2646 .await?;
2647 if let Some(edits) = resolved_completion.additional_text_edits {
2648 let edits = this
2649 .update(&mut cx, |this, cx| {
2650 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2651 })
2652 .await?;
2653 buffer_handle.update(&mut cx, |buffer, cx| {
2654 buffer.finalize_last_transaction();
2655 buffer.start_transaction();
2656 for (range, text) in edits {
2657 buffer.edit([(range, text)], cx);
2658 }
2659 let transaction = if buffer.end_transaction(cx).is_some() {
2660 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2661 if !push_to_history {
2662 buffer.forget_transaction(transaction.id);
2663 }
2664 Some(transaction)
2665 } else {
2666 None
2667 };
2668 Ok(transaction)
2669 })
2670 } else {
2671 Ok(None)
2672 }
2673 })
2674 } else if let Some(project_id) = self.remote_id() {
2675 let client = self.client.clone();
2676 cx.spawn(|_, mut cx| async move {
2677 let response = client
2678 .request(proto::ApplyCompletionAdditionalEdits {
2679 project_id,
2680 buffer_id,
2681 completion: Some(language::proto::serialize_completion(&completion)),
2682 })
2683 .await?;
2684
2685 if let Some(transaction) = response.transaction {
2686 let transaction = language::proto::deserialize_transaction(transaction)?;
2687 buffer_handle
2688 .update(&mut cx, |buffer, _| {
2689 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2690 })
2691 .await;
2692 if push_to_history {
2693 buffer_handle.update(&mut cx, |buffer, _| {
2694 buffer.push_transaction(transaction.clone(), Instant::now());
2695 });
2696 }
2697 Ok(Some(transaction))
2698 } else {
2699 Ok(None)
2700 }
2701 })
2702 } else {
2703 Task::ready(Err(anyhow!("project does not have a remote id")))
2704 }
2705 }
2706
2707 pub fn code_actions<T: Clone + ToOffset>(
2708 &self,
2709 buffer_handle: &ModelHandle<Buffer>,
2710 range: Range<T>,
2711 cx: &mut ModelContext<Self>,
2712 ) -> Task<Result<Vec<CodeAction>>> {
2713 let buffer_handle = buffer_handle.clone();
2714 let buffer = buffer_handle.read(cx);
2715 let snapshot = buffer.snapshot();
2716 let relevant_diagnostics = snapshot
2717 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2718 .map(|entry| entry.to_lsp_diagnostic_stub())
2719 .collect();
2720 let buffer_id = buffer.remote_id();
2721 let worktree;
2722 let buffer_abs_path;
2723 if let Some(file) = File::from_dyn(buffer.file()) {
2724 worktree = file.worktree.clone();
2725 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2726 } else {
2727 return Task::ready(Ok(Default::default()));
2728 };
2729 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2730
2731 if worktree.read(cx).as_local().is_some() {
2732 let buffer_abs_path = buffer_abs_path.unwrap();
2733 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2734 {
2735 server.clone()
2736 } else {
2737 return Task::ready(Ok(Default::default()));
2738 };
2739
2740 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2741 cx.foreground().spawn(async move {
2742 if !lang_server.capabilities().code_action_provider.is_some() {
2743 return Ok(Default::default());
2744 }
2745
2746 Ok(lang_server
2747 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2748 text_document: lsp::TextDocumentIdentifier::new(
2749 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2750 ),
2751 range: lsp_range,
2752 work_done_progress_params: Default::default(),
2753 partial_result_params: Default::default(),
2754 context: lsp::CodeActionContext {
2755 diagnostics: relevant_diagnostics,
2756 only: Some(vec![
2757 lsp::CodeActionKind::QUICKFIX,
2758 lsp::CodeActionKind::REFACTOR,
2759 lsp::CodeActionKind::REFACTOR_EXTRACT,
2760 lsp::CodeActionKind::SOURCE,
2761 ]),
2762 },
2763 })
2764 .await?
2765 .unwrap_or_default()
2766 .into_iter()
2767 .filter_map(|entry| {
2768 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2769 Some(CodeAction {
2770 range: range.clone(),
2771 lsp_action,
2772 })
2773 } else {
2774 None
2775 }
2776 })
2777 .collect())
2778 })
2779 } else if let Some(project_id) = self.remote_id() {
2780 let rpc = self.client.clone();
2781 let version = buffer.version();
2782 cx.spawn_weak(|_, mut cx| async move {
2783 let response = rpc
2784 .request(proto::GetCodeActions {
2785 project_id,
2786 buffer_id,
2787 start: Some(language::proto::serialize_anchor(&range.start)),
2788 end: Some(language::proto::serialize_anchor(&range.end)),
2789 version: serialize_version(&version),
2790 })
2791 .await?;
2792
2793 buffer_handle
2794 .update(&mut cx, |buffer, _| {
2795 buffer.wait_for_version(deserialize_version(response.version))
2796 })
2797 .await;
2798
2799 response
2800 .actions
2801 .into_iter()
2802 .map(language::proto::deserialize_code_action)
2803 .collect()
2804 })
2805 } else {
2806 Task::ready(Ok(Default::default()))
2807 }
2808 }
2809
2810 pub fn apply_code_action(
2811 &self,
2812 buffer_handle: ModelHandle<Buffer>,
2813 mut action: CodeAction,
2814 push_to_history: bool,
2815 cx: &mut ModelContext<Self>,
2816 ) -> Task<Result<ProjectTransaction>> {
2817 if self.is_local() {
2818 let buffer = buffer_handle.read(cx);
2819 let (lsp_adapter, lang_server) =
2820 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2821 server.clone()
2822 } else {
2823 return Task::ready(Ok(Default::default()));
2824 };
2825 let range = action.range.to_point_utf16(buffer);
2826
2827 cx.spawn(|this, mut cx| async move {
2828 if let Some(lsp_range) = action
2829 .lsp_action
2830 .data
2831 .as_mut()
2832 .and_then(|d| d.get_mut("codeActionParams"))
2833 .and_then(|d| d.get_mut("range"))
2834 {
2835 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2836 action.lsp_action = lang_server
2837 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2838 .await?;
2839 } else {
2840 let actions = this
2841 .update(&mut cx, |this, cx| {
2842 this.code_actions(&buffer_handle, action.range, cx)
2843 })
2844 .await?;
2845 action.lsp_action = actions
2846 .into_iter()
2847 .find(|a| a.lsp_action.title == action.lsp_action.title)
2848 .ok_or_else(|| anyhow!("code action is outdated"))?
2849 .lsp_action;
2850 }
2851
2852 if let Some(edit) = action.lsp_action.edit {
2853 Self::deserialize_workspace_edit(
2854 this,
2855 edit,
2856 push_to_history,
2857 lsp_adapter,
2858 lang_server,
2859 &mut cx,
2860 )
2861 .await
2862 } else if let Some(command) = action.lsp_action.command {
2863 this.update(&mut cx, |this, _| {
2864 this.last_workspace_edits_by_language_server
2865 .remove(&lang_server.server_id());
2866 });
2867 lang_server
2868 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2869 command: command.command,
2870 arguments: command.arguments.unwrap_or_default(),
2871 ..Default::default()
2872 })
2873 .await?;
2874 Ok(this.update(&mut cx, |this, _| {
2875 this.last_workspace_edits_by_language_server
2876 .remove(&lang_server.server_id())
2877 .unwrap_or_default()
2878 }))
2879 } else {
2880 Ok(ProjectTransaction::default())
2881 }
2882 })
2883 } else if let Some(project_id) = self.remote_id() {
2884 let client = self.client.clone();
2885 let request = proto::ApplyCodeAction {
2886 project_id,
2887 buffer_id: buffer_handle.read(cx).remote_id(),
2888 action: Some(language::proto::serialize_code_action(&action)),
2889 };
2890 cx.spawn(|this, mut cx| async move {
2891 let response = client
2892 .request(request)
2893 .await?
2894 .transaction
2895 .ok_or_else(|| anyhow!("missing transaction"))?;
2896 this.update(&mut cx, |this, cx| {
2897 this.deserialize_project_transaction(response, push_to_history, cx)
2898 })
2899 .await
2900 })
2901 } else {
2902 Task::ready(Err(anyhow!("project does not have a remote id")))
2903 }
2904 }
2905
2906 async fn deserialize_workspace_edit(
2907 this: ModelHandle<Self>,
2908 edit: lsp::WorkspaceEdit,
2909 push_to_history: bool,
2910 lsp_adapter: Arc<dyn LspAdapter>,
2911 language_server: Arc<LanguageServer>,
2912 cx: &mut AsyncAppContext,
2913 ) -> Result<ProjectTransaction> {
2914 let fs = this.read_with(cx, |this, _| this.fs.clone());
2915 let mut operations = Vec::new();
2916 if let Some(document_changes) = edit.document_changes {
2917 match document_changes {
2918 lsp::DocumentChanges::Edits(edits) => {
2919 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2920 }
2921 lsp::DocumentChanges::Operations(ops) => operations = ops,
2922 }
2923 } else if let Some(changes) = edit.changes {
2924 operations.extend(changes.into_iter().map(|(uri, edits)| {
2925 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2926 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2927 uri,
2928 version: None,
2929 },
2930 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2931 })
2932 }));
2933 }
2934
2935 let mut project_transaction = ProjectTransaction::default();
2936 for operation in operations {
2937 match operation {
2938 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2939 let abs_path = op
2940 .uri
2941 .to_file_path()
2942 .map_err(|_| anyhow!("can't convert URI to path"))?;
2943
2944 if let Some(parent_path) = abs_path.parent() {
2945 fs.create_dir(parent_path).await?;
2946 }
2947 if abs_path.ends_with("/") {
2948 fs.create_dir(&abs_path).await?;
2949 } else {
2950 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2951 .await?;
2952 }
2953 }
2954 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2955 let source_abs_path = op
2956 .old_uri
2957 .to_file_path()
2958 .map_err(|_| anyhow!("can't convert URI to path"))?;
2959 let target_abs_path = op
2960 .new_uri
2961 .to_file_path()
2962 .map_err(|_| anyhow!("can't convert URI to path"))?;
2963 fs.rename(
2964 &source_abs_path,
2965 &target_abs_path,
2966 op.options.map(Into::into).unwrap_or_default(),
2967 )
2968 .await?;
2969 }
2970 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2971 let abs_path = op
2972 .uri
2973 .to_file_path()
2974 .map_err(|_| anyhow!("can't convert URI to path"))?;
2975 let options = op.options.map(Into::into).unwrap_or_default();
2976 if abs_path.ends_with("/") {
2977 fs.remove_dir(&abs_path, options).await?;
2978 } else {
2979 fs.remove_file(&abs_path, options).await?;
2980 }
2981 }
2982 lsp::DocumentChangeOperation::Edit(op) => {
2983 let buffer_to_edit = this
2984 .update(cx, |this, cx| {
2985 this.open_local_buffer_via_lsp(
2986 op.text_document.uri,
2987 lsp_adapter.clone(),
2988 language_server.clone(),
2989 cx,
2990 )
2991 })
2992 .await?;
2993
2994 let edits = this
2995 .update(cx, |this, cx| {
2996 let edits = op.edits.into_iter().map(|edit| match edit {
2997 lsp::OneOf::Left(edit) => edit,
2998 lsp::OneOf::Right(edit) => edit.text_edit,
2999 });
3000 this.edits_from_lsp(
3001 &buffer_to_edit,
3002 edits,
3003 op.text_document.version,
3004 cx,
3005 )
3006 })
3007 .await?;
3008
3009 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3010 buffer.finalize_last_transaction();
3011 buffer.start_transaction();
3012 for (range, text) in edits {
3013 buffer.edit([(range, text)], cx);
3014 }
3015 let transaction = if buffer.end_transaction(cx).is_some() {
3016 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3017 if !push_to_history {
3018 buffer.forget_transaction(transaction.id);
3019 }
3020 Some(transaction)
3021 } else {
3022 None
3023 };
3024
3025 transaction
3026 });
3027 if let Some(transaction) = transaction {
3028 project_transaction.0.insert(buffer_to_edit, transaction);
3029 }
3030 }
3031 }
3032 }
3033
3034 Ok(project_transaction)
3035 }
3036
3037 pub fn prepare_rename<T: ToPointUtf16>(
3038 &self,
3039 buffer: ModelHandle<Buffer>,
3040 position: T,
3041 cx: &mut ModelContext<Self>,
3042 ) -> Task<Result<Option<Range<Anchor>>>> {
3043 let position = position.to_point_utf16(buffer.read(cx));
3044 self.request_lsp(buffer, PrepareRename { position }, cx)
3045 }
3046
3047 pub fn perform_rename<T: ToPointUtf16>(
3048 &self,
3049 buffer: ModelHandle<Buffer>,
3050 position: T,
3051 new_name: String,
3052 push_to_history: bool,
3053 cx: &mut ModelContext<Self>,
3054 ) -> Task<Result<ProjectTransaction>> {
3055 let position = position.to_point_utf16(buffer.read(cx));
3056 self.request_lsp(
3057 buffer,
3058 PerformRename {
3059 position,
3060 new_name,
3061 push_to_history,
3062 },
3063 cx,
3064 )
3065 }
3066
3067 pub fn search(
3068 &self,
3069 query: SearchQuery,
3070 cx: &mut ModelContext<Self>,
3071 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3072 if self.is_local() {
3073 let snapshots = self
3074 .visible_worktrees(cx)
3075 .filter_map(|tree| {
3076 let tree = tree.read(cx).as_local()?;
3077 Some(tree.snapshot())
3078 })
3079 .collect::<Vec<_>>();
3080
3081 let background = cx.background().clone();
3082 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3083 if path_count == 0 {
3084 return Task::ready(Ok(Default::default()));
3085 }
3086 let workers = background.num_cpus().min(path_count);
3087 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3088 cx.background()
3089 .spawn({
3090 let fs = self.fs.clone();
3091 let background = cx.background().clone();
3092 let query = query.clone();
3093 async move {
3094 let fs = &fs;
3095 let query = &query;
3096 let matching_paths_tx = &matching_paths_tx;
3097 let paths_per_worker = (path_count + workers - 1) / workers;
3098 let snapshots = &snapshots;
3099 background
3100 .scoped(|scope| {
3101 for worker_ix in 0..workers {
3102 let worker_start_ix = worker_ix * paths_per_worker;
3103 let worker_end_ix = worker_start_ix + paths_per_worker;
3104 scope.spawn(async move {
3105 let mut snapshot_start_ix = 0;
3106 let mut abs_path = PathBuf::new();
3107 for snapshot in snapshots {
3108 let snapshot_end_ix =
3109 snapshot_start_ix + snapshot.visible_file_count();
3110 if worker_end_ix <= snapshot_start_ix {
3111 break;
3112 } else if worker_start_ix > snapshot_end_ix {
3113 snapshot_start_ix = snapshot_end_ix;
3114 continue;
3115 } else {
3116 let start_in_snapshot = worker_start_ix
3117 .saturating_sub(snapshot_start_ix);
3118 let end_in_snapshot =
3119 cmp::min(worker_end_ix, snapshot_end_ix)
3120 - snapshot_start_ix;
3121
3122 for entry in snapshot
3123 .files(false, start_in_snapshot)
3124 .take(end_in_snapshot - start_in_snapshot)
3125 {
3126 if matching_paths_tx.is_closed() {
3127 break;
3128 }
3129
3130 abs_path.clear();
3131 abs_path.push(&snapshot.abs_path());
3132 abs_path.push(&entry.path);
3133 let matches = if let Some(file) =
3134 fs.open_sync(&abs_path).await.log_err()
3135 {
3136 query.detect(file).unwrap_or(false)
3137 } else {
3138 false
3139 };
3140
3141 if matches {
3142 let project_path =
3143 (snapshot.id(), entry.path.clone());
3144 if matching_paths_tx
3145 .send(project_path)
3146 .await
3147 .is_err()
3148 {
3149 break;
3150 }
3151 }
3152 }
3153
3154 snapshot_start_ix = snapshot_end_ix;
3155 }
3156 }
3157 });
3158 }
3159 })
3160 .await;
3161 }
3162 })
3163 .detach();
3164
3165 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3166 let open_buffers = self
3167 .opened_buffers
3168 .values()
3169 .filter_map(|b| b.upgrade(cx))
3170 .collect::<HashSet<_>>();
3171 cx.spawn(|this, cx| async move {
3172 for buffer in &open_buffers {
3173 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3174 buffers_tx.send((buffer.clone(), snapshot)).await?;
3175 }
3176
3177 let open_buffers = Rc::new(RefCell::new(open_buffers));
3178 while let Some(project_path) = matching_paths_rx.next().await {
3179 if buffers_tx.is_closed() {
3180 break;
3181 }
3182
3183 let this = this.clone();
3184 let open_buffers = open_buffers.clone();
3185 let buffers_tx = buffers_tx.clone();
3186 cx.spawn(|mut cx| async move {
3187 if let Some(buffer) = this
3188 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3189 .await
3190 .log_err()
3191 {
3192 if open_buffers.borrow_mut().insert(buffer.clone()) {
3193 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3194 buffers_tx.send((buffer, snapshot)).await?;
3195 }
3196 }
3197
3198 Ok::<_, anyhow::Error>(())
3199 })
3200 .detach();
3201 }
3202
3203 Ok::<_, anyhow::Error>(())
3204 })
3205 .detach_and_log_err(cx);
3206
3207 let background = cx.background().clone();
3208 cx.background().spawn(async move {
3209 let query = &query;
3210 let mut matched_buffers = Vec::new();
3211 for _ in 0..workers {
3212 matched_buffers.push(HashMap::default());
3213 }
3214 background
3215 .scoped(|scope| {
3216 for worker_matched_buffers in matched_buffers.iter_mut() {
3217 let mut buffers_rx = buffers_rx.clone();
3218 scope.spawn(async move {
3219 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3220 let buffer_matches = query
3221 .search(snapshot.as_rope())
3222 .await
3223 .iter()
3224 .map(|range| {
3225 snapshot.anchor_before(range.start)
3226 ..snapshot.anchor_after(range.end)
3227 })
3228 .collect::<Vec<_>>();
3229 if !buffer_matches.is_empty() {
3230 worker_matched_buffers
3231 .insert(buffer.clone(), buffer_matches);
3232 }
3233 }
3234 });
3235 }
3236 })
3237 .await;
3238 Ok(matched_buffers.into_iter().flatten().collect())
3239 })
3240 } else if let Some(project_id) = self.remote_id() {
3241 let request = self.client.request(query.to_proto(project_id));
3242 cx.spawn(|this, mut cx| async move {
3243 let response = request.await?;
3244 let mut result = HashMap::default();
3245 for location in response.locations {
3246 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3247 let target_buffer = this
3248 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3249 .await?;
3250 let start = location
3251 .start
3252 .and_then(deserialize_anchor)
3253 .ok_or_else(|| anyhow!("missing target start"))?;
3254 let end = location
3255 .end
3256 .and_then(deserialize_anchor)
3257 .ok_or_else(|| anyhow!("missing target end"))?;
3258 result
3259 .entry(target_buffer)
3260 .or_insert(Vec::new())
3261 .push(start..end)
3262 }
3263 Ok(result)
3264 })
3265 } else {
3266 Task::ready(Ok(Default::default()))
3267 }
3268 }
3269
3270 fn request_lsp<R: LspCommand>(
3271 &self,
3272 buffer_handle: ModelHandle<Buffer>,
3273 request: R,
3274 cx: &mut ModelContext<Self>,
3275 ) -> Task<Result<R::Response>>
3276 where
3277 <R::LspRequest as lsp::request::Request>::Result: Send,
3278 {
3279 let buffer = buffer_handle.read(cx);
3280 if self.is_local() {
3281 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3282 if let Some((file, (_, language_server))) =
3283 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3284 {
3285 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3286 return cx.spawn(|this, cx| async move {
3287 if !request.check_capabilities(&language_server.capabilities()) {
3288 return Ok(Default::default());
3289 }
3290
3291 let response = language_server
3292 .request::<R::LspRequest>(lsp_params)
3293 .await
3294 .context("lsp request failed")?;
3295 request
3296 .response_from_lsp(response, this, buffer_handle, cx)
3297 .await
3298 });
3299 }
3300 } else if let Some(project_id) = self.remote_id() {
3301 let rpc = self.client.clone();
3302 let message = request.to_proto(project_id, buffer);
3303 return cx.spawn(|this, cx| async move {
3304 let response = rpc.request(message).await?;
3305 request
3306 .response_from_proto(response, this, buffer_handle, cx)
3307 .await
3308 });
3309 }
3310 Task::ready(Ok(Default::default()))
3311 }
3312
3313 pub fn find_or_create_local_worktree(
3314 &mut self,
3315 abs_path: impl AsRef<Path>,
3316 visible: bool,
3317 cx: &mut ModelContext<Self>,
3318 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3319 let abs_path = abs_path.as_ref();
3320 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3321 Task::ready(Ok((tree.clone(), relative_path.into())))
3322 } else {
3323 let worktree = self.create_local_worktree(abs_path, visible, cx);
3324 cx.foreground()
3325 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3326 }
3327 }
3328
3329 pub fn find_local_worktree(
3330 &self,
3331 abs_path: &Path,
3332 cx: &AppContext,
3333 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3334 for tree in self.worktrees(cx) {
3335 if let Some(relative_path) = tree
3336 .read(cx)
3337 .as_local()
3338 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3339 {
3340 return Some((tree.clone(), relative_path.into()));
3341 }
3342 }
3343 None
3344 }
3345
3346 pub fn is_shared(&self) -> bool {
3347 match &self.client_state {
3348 ProjectClientState::Local { is_shared, .. } => *is_shared,
3349 ProjectClientState::Remote { .. } => false,
3350 }
3351 }
3352
3353 fn create_local_worktree(
3354 &mut self,
3355 abs_path: impl AsRef<Path>,
3356 visible: bool,
3357 cx: &mut ModelContext<Self>,
3358 ) -> Task<Result<ModelHandle<Worktree>>> {
3359 let fs = self.fs.clone();
3360 let client = self.client.clone();
3361 let next_entry_id = self.next_entry_id.clone();
3362 let path: Arc<Path> = abs_path.as_ref().into();
3363 let task = self
3364 .loading_local_worktrees
3365 .entry(path.clone())
3366 .or_insert_with(|| {
3367 cx.spawn(|project, mut cx| {
3368 async move {
3369 let worktree = Worktree::local(
3370 client.clone(),
3371 path.clone(),
3372 visible,
3373 fs,
3374 next_entry_id,
3375 &mut cx,
3376 )
3377 .await;
3378 project.update(&mut cx, |project, _| {
3379 project.loading_local_worktrees.remove(&path);
3380 });
3381 let worktree = worktree?;
3382
3383 let (remote_project_id, is_shared) =
3384 project.update(&mut cx, |project, cx| {
3385 project.add_worktree(&worktree, cx);
3386 (project.remote_id(), project.is_shared())
3387 });
3388
3389 if let Some(project_id) = remote_project_id {
3390 if is_shared {
3391 worktree
3392 .update(&mut cx, |worktree, cx| {
3393 worktree.as_local_mut().unwrap().share(project_id, cx)
3394 })
3395 .await?;
3396 } else {
3397 worktree
3398 .update(&mut cx, |worktree, cx| {
3399 worktree.as_local_mut().unwrap().register(project_id, cx)
3400 })
3401 .await?;
3402 }
3403 }
3404
3405 Ok(worktree)
3406 }
3407 .map_err(|err| Arc::new(err))
3408 })
3409 .shared()
3410 })
3411 .clone();
3412 cx.foreground().spawn(async move {
3413 match task.await {
3414 Ok(worktree) => Ok(worktree),
3415 Err(err) => Err(anyhow!("{}", err)),
3416 }
3417 })
3418 }
3419
3420 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3421 self.worktrees.retain(|worktree| {
3422 worktree
3423 .upgrade(cx)
3424 .map_or(false, |w| w.read(cx).id() != id)
3425 });
3426 cx.notify();
3427 }
3428
3429 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3430 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3431 if worktree.read(cx).is_local() {
3432 cx.subscribe(&worktree, |this, worktree, _, cx| {
3433 this.update_local_worktree_buffers(worktree, cx);
3434 })
3435 .detach();
3436 }
3437
3438 let push_strong_handle = {
3439 let worktree = worktree.read(cx);
3440 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3441 };
3442 if push_strong_handle {
3443 self.worktrees
3444 .push(WorktreeHandle::Strong(worktree.clone()));
3445 } else {
3446 cx.observe_release(&worktree, |this, _, cx| {
3447 this.worktrees
3448 .retain(|worktree| worktree.upgrade(cx).is_some());
3449 cx.notify();
3450 })
3451 .detach();
3452 self.worktrees
3453 .push(WorktreeHandle::Weak(worktree.downgrade()));
3454 }
3455 cx.notify();
3456 }
3457
3458 fn update_local_worktree_buffers(
3459 &mut self,
3460 worktree_handle: ModelHandle<Worktree>,
3461 cx: &mut ModelContext<Self>,
3462 ) {
3463 let snapshot = worktree_handle.read(cx).snapshot();
3464 let mut buffers_to_delete = Vec::new();
3465 let mut renamed_buffers = Vec::new();
3466 for (buffer_id, buffer) in &self.opened_buffers {
3467 if let Some(buffer) = buffer.upgrade(cx) {
3468 buffer.update(cx, |buffer, cx| {
3469 if let Some(old_file) = File::from_dyn(buffer.file()) {
3470 if old_file.worktree != worktree_handle {
3471 return;
3472 }
3473
3474 let new_file = if let Some(entry) = old_file
3475 .entry_id
3476 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3477 {
3478 File {
3479 is_local: true,
3480 entry_id: Some(entry.id),
3481 mtime: entry.mtime,
3482 path: entry.path.clone(),
3483 worktree: worktree_handle.clone(),
3484 }
3485 } else if let Some(entry) =
3486 snapshot.entry_for_path(old_file.path().as_ref())
3487 {
3488 File {
3489 is_local: true,
3490 entry_id: Some(entry.id),
3491 mtime: entry.mtime,
3492 path: entry.path.clone(),
3493 worktree: worktree_handle.clone(),
3494 }
3495 } else {
3496 File {
3497 is_local: true,
3498 entry_id: None,
3499 path: old_file.path().clone(),
3500 mtime: old_file.mtime(),
3501 worktree: worktree_handle.clone(),
3502 }
3503 };
3504
3505 let old_path = old_file.abs_path(cx);
3506 if new_file.abs_path(cx) != old_path {
3507 renamed_buffers.push((cx.handle(), old_path));
3508 }
3509
3510 if let Some(project_id) = self.remote_id() {
3511 self.client
3512 .send(proto::UpdateBufferFile {
3513 project_id,
3514 buffer_id: *buffer_id as u64,
3515 file: Some(new_file.to_proto()),
3516 })
3517 .log_err();
3518 }
3519 buffer.file_updated(Box::new(new_file), cx).detach();
3520 }
3521 });
3522 } else {
3523 buffers_to_delete.push(*buffer_id);
3524 }
3525 }
3526
3527 for buffer_id in buffers_to_delete {
3528 self.opened_buffers.remove(&buffer_id);
3529 }
3530
3531 for (buffer, old_path) in renamed_buffers {
3532 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3533 self.assign_language_to_buffer(&buffer, cx);
3534 self.register_buffer_with_language_server(&buffer, cx);
3535 }
3536 }
3537
3538 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3539 let new_active_entry = entry.and_then(|project_path| {
3540 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3541 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3542 Some(entry.id)
3543 });
3544 if new_active_entry != self.active_entry {
3545 self.active_entry = new_active_entry;
3546 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3547 }
3548 }
3549
3550 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3551 self.language_server_statuses
3552 .values()
3553 .any(|status| status.pending_diagnostic_updates > 0)
3554 }
3555
3556 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3557 let mut summary = DiagnosticSummary::default();
3558 for (_, path_summary) in self.diagnostic_summaries(cx) {
3559 summary.error_count += path_summary.error_count;
3560 summary.warning_count += path_summary.warning_count;
3561 }
3562 summary
3563 }
3564
3565 pub fn diagnostic_summaries<'a>(
3566 &'a self,
3567 cx: &'a AppContext,
3568 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3569 self.worktrees(cx).flat_map(move |worktree| {
3570 let worktree = worktree.read(cx);
3571 let worktree_id = worktree.id();
3572 worktree
3573 .diagnostic_summaries()
3574 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3575 })
3576 }
3577
3578 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3579 if self
3580 .language_server_statuses
3581 .values()
3582 .map(|status| status.pending_diagnostic_updates)
3583 .sum::<isize>()
3584 == 1
3585 {
3586 cx.emit(Event::DiskBasedDiagnosticsStarted);
3587 }
3588 }
3589
3590 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3591 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3592 if self
3593 .language_server_statuses
3594 .values()
3595 .map(|status| status.pending_diagnostic_updates)
3596 .sum::<isize>()
3597 == 0
3598 {
3599 cx.emit(Event::DiskBasedDiagnosticsFinished);
3600 }
3601 }
3602
3603 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3604 self.active_entry
3605 }
3606
3607 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3608 self.worktree_for_id(path.worktree_id, cx)?
3609 .read(cx)
3610 .entry_for_path(&path.path)
3611 .map(|entry| entry.id)
3612 }
3613
3614 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3615 let worktree = self.worktree_for_entry(entry_id, cx)?;
3616 let worktree = worktree.read(cx);
3617 let worktree_id = worktree.id();
3618 let path = worktree.entry_for_id(entry_id)?.path.clone();
3619 Some(ProjectPath { worktree_id, path })
3620 }
3621
3622 // RPC message handlers
3623
3624 async fn handle_unshare_project(
3625 this: ModelHandle<Self>,
3626 _: TypedEnvelope<proto::UnshareProject>,
3627 _: Arc<Client>,
3628 mut cx: AsyncAppContext,
3629 ) -> Result<()> {
3630 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3631 Ok(())
3632 }
3633
3634 async fn handle_add_collaborator(
3635 this: ModelHandle<Self>,
3636 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3637 _: Arc<Client>,
3638 mut cx: AsyncAppContext,
3639 ) -> Result<()> {
3640 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3641 let collaborator = envelope
3642 .payload
3643 .collaborator
3644 .take()
3645 .ok_or_else(|| anyhow!("empty collaborator"))?;
3646
3647 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3648 this.update(&mut cx, |this, cx| {
3649 this.collaborators
3650 .insert(collaborator.peer_id, collaborator);
3651 cx.notify();
3652 });
3653
3654 Ok(())
3655 }
3656
3657 async fn handle_remove_collaborator(
3658 this: ModelHandle<Self>,
3659 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3660 _: Arc<Client>,
3661 mut cx: AsyncAppContext,
3662 ) -> Result<()> {
3663 this.update(&mut cx, |this, cx| {
3664 let peer_id = PeerId(envelope.payload.peer_id);
3665 let replica_id = this
3666 .collaborators
3667 .remove(&peer_id)
3668 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3669 .replica_id;
3670 for (_, buffer) in &this.opened_buffers {
3671 if let Some(buffer) = buffer.upgrade(cx) {
3672 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3673 }
3674 }
3675 cx.emit(Event::CollaboratorLeft(peer_id));
3676 cx.notify();
3677 Ok(())
3678 })
3679 }
3680
3681 async fn handle_register_worktree(
3682 this: ModelHandle<Self>,
3683 envelope: TypedEnvelope<proto::RegisterWorktree>,
3684 client: Arc<Client>,
3685 mut cx: AsyncAppContext,
3686 ) -> Result<()> {
3687 this.update(&mut cx, |this, cx| {
3688 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3689 let replica_id = this.replica_id();
3690 let worktree = proto::Worktree {
3691 id: envelope.payload.worktree_id,
3692 root_name: envelope.payload.root_name,
3693 entries: Default::default(),
3694 diagnostic_summaries: Default::default(),
3695 visible: envelope.payload.visible,
3696 };
3697 let (worktree, load_task) =
3698 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3699 this.add_worktree(&worktree, cx);
3700 load_task.detach();
3701 Ok(())
3702 })
3703 }
3704
3705 async fn handle_unregister_worktree(
3706 this: ModelHandle<Self>,
3707 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3708 _: Arc<Client>,
3709 mut cx: AsyncAppContext,
3710 ) -> Result<()> {
3711 this.update(&mut cx, |this, cx| {
3712 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3713 this.remove_worktree(worktree_id, cx);
3714 Ok(())
3715 })
3716 }
3717
3718 async fn handle_update_worktree(
3719 this: ModelHandle<Self>,
3720 envelope: TypedEnvelope<proto::UpdateWorktree>,
3721 _: Arc<Client>,
3722 mut cx: AsyncAppContext,
3723 ) -> Result<()> {
3724 this.update(&mut cx, |this, cx| {
3725 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3726 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3727 worktree.update(cx, |worktree, _| {
3728 let worktree = worktree.as_remote_mut().unwrap();
3729 worktree.update_from_remote(envelope)
3730 })?;
3731 }
3732 Ok(())
3733 })
3734 }
3735
3736 async fn handle_update_diagnostic_summary(
3737 this: ModelHandle<Self>,
3738 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3739 _: Arc<Client>,
3740 mut cx: AsyncAppContext,
3741 ) -> Result<()> {
3742 this.update(&mut cx, |this, cx| {
3743 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3744 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3745 if let Some(summary) = envelope.payload.summary {
3746 let project_path = ProjectPath {
3747 worktree_id,
3748 path: Path::new(&summary.path).into(),
3749 };
3750 worktree.update(cx, |worktree, _| {
3751 worktree
3752 .as_remote_mut()
3753 .unwrap()
3754 .update_diagnostic_summary(project_path.path.clone(), &summary);
3755 });
3756 cx.emit(Event::DiagnosticsUpdated(project_path));
3757 }
3758 }
3759 Ok(())
3760 })
3761 }
3762
3763 async fn handle_start_language_server(
3764 this: ModelHandle<Self>,
3765 envelope: TypedEnvelope<proto::StartLanguageServer>,
3766 _: Arc<Client>,
3767 mut cx: AsyncAppContext,
3768 ) -> Result<()> {
3769 let server = envelope
3770 .payload
3771 .server
3772 .ok_or_else(|| anyhow!("invalid server"))?;
3773 this.update(&mut cx, |this, cx| {
3774 this.language_server_statuses.insert(
3775 server.id as usize,
3776 LanguageServerStatus {
3777 name: server.name,
3778 pending_work: Default::default(),
3779 pending_diagnostic_updates: 0,
3780 },
3781 );
3782 cx.notify();
3783 });
3784 Ok(())
3785 }
3786
3787 async fn handle_update_language_server(
3788 this: ModelHandle<Self>,
3789 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3790 _: Arc<Client>,
3791 mut cx: AsyncAppContext,
3792 ) -> Result<()> {
3793 let language_server_id = envelope.payload.language_server_id as usize;
3794 match envelope
3795 .payload
3796 .variant
3797 .ok_or_else(|| anyhow!("invalid variant"))?
3798 {
3799 proto::update_language_server::Variant::WorkStart(payload) => {
3800 this.update(&mut cx, |this, cx| {
3801 this.on_lsp_work_start(language_server_id, payload.token, cx);
3802 })
3803 }
3804 proto::update_language_server::Variant::WorkProgress(payload) => {
3805 this.update(&mut cx, |this, cx| {
3806 this.on_lsp_work_progress(
3807 language_server_id,
3808 payload.token,
3809 LanguageServerProgress {
3810 message: payload.message,
3811 percentage: payload.percentage.map(|p| p as usize),
3812 last_update_at: Instant::now(),
3813 },
3814 cx,
3815 );
3816 })
3817 }
3818 proto::update_language_server::Variant::WorkEnd(payload) => {
3819 this.update(&mut cx, |this, cx| {
3820 this.on_lsp_work_end(language_server_id, payload.token, cx);
3821 })
3822 }
3823 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3824 this.update(&mut cx, |this, cx| {
3825 this.disk_based_diagnostics_started(cx);
3826 })
3827 }
3828 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3829 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3830 }
3831 }
3832
3833 Ok(())
3834 }
3835
3836 async fn handle_update_buffer(
3837 this: ModelHandle<Self>,
3838 envelope: TypedEnvelope<proto::UpdateBuffer>,
3839 _: Arc<Client>,
3840 mut cx: AsyncAppContext,
3841 ) -> Result<()> {
3842 this.update(&mut cx, |this, cx| {
3843 let payload = envelope.payload.clone();
3844 let buffer_id = payload.buffer_id;
3845 let ops = payload
3846 .operations
3847 .into_iter()
3848 .map(|op| language::proto::deserialize_operation(op))
3849 .collect::<Result<Vec<_>, _>>()?;
3850 match this.opened_buffers.entry(buffer_id) {
3851 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3852 OpenBuffer::Strong(buffer) => {
3853 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3854 }
3855 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3856 OpenBuffer::Weak(_) => {}
3857 },
3858 hash_map::Entry::Vacant(e) => {
3859 e.insert(OpenBuffer::Loading(ops));
3860 }
3861 }
3862 Ok(())
3863 })
3864 }
3865
3866 async fn handle_update_buffer_file(
3867 this: ModelHandle<Self>,
3868 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3869 _: Arc<Client>,
3870 mut cx: AsyncAppContext,
3871 ) -> Result<()> {
3872 this.update(&mut cx, |this, cx| {
3873 let payload = envelope.payload.clone();
3874 let buffer_id = payload.buffer_id;
3875 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3876 let worktree = this
3877 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3878 .ok_or_else(|| anyhow!("no such worktree"))?;
3879 let file = File::from_proto(file, worktree.clone(), cx)?;
3880 let buffer = this
3881 .opened_buffers
3882 .get_mut(&buffer_id)
3883 .and_then(|b| b.upgrade(cx))
3884 .ok_or_else(|| anyhow!("no such buffer"))?;
3885 buffer.update(cx, |buffer, cx| {
3886 buffer.file_updated(Box::new(file), cx).detach();
3887 });
3888 Ok(())
3889 })
3890 }
3891
3892 async fn handle_save_buffer(
3893 this: ModelHandle<Self>,
3894 envelope: TypedEnvelope<proto::SaveBuffer>,
3895 _: Arc<Client>,
3896 mut cx: AsyncAppContext,
3897 ) -> Result<proto::BufferSaved> {
3898 let buffer_id = envelope.payload.buffer_id;
3899 let requested_version = deserialize_version(envelope.payload.version);
3900
3901 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3902 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3903 let buffer = this
3904 .opened_buffers
3905 .get(&buffer_id)
3906 .and_then(|buffer| buffer.upgrade(cx))
3907 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3908 Ok::<_, anyhow::Error>((project_id, buffer))
3909 })?;
3910 buffer
3911 .update(&mut cx, |buffer, _| {
3912 buffer.wait_for_version(requested_version)
3913 })
3914 .await;
3915
3916 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3917 Ok(proto::BufferSaved {
3918 project_id,
3919 buffer_id,
3920 version: serialize_version(&saved_version),
3921 mtime: Some(mtime.into()),
3922 })
3923 }
3924
3925 async fn handle_reload_buffers(
3926 this: ModelHandle<Self>,
3927 envelope: TypedEnvelope<proto::ReloadBuffers>,
3928 _: Arc<Client>,
3929 mut cx: AsyncAppContext,
3930 ) -> Result<proto::ReloadBuffersResponse> {
3931 let sender_id = envelope.original_sender_id()?;
3932 let reload = this.update(&mut cx, |this, cx| {
3933 let mut buffers = HashSet::default();
3934 for buffer_id in &envelope.payload.buffer_ids {
3935 buffers.insert(
3936 this.opened_buffers
3937 .get(buffer_id)
3938 .and_then(|buffer| buffer.upgrade(cx))
3939 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3940 );
3941 }
3942 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3943 })?;
3944
3945 let project_transaction = reload.await?;
3946 let project_transaction = this.update(&mut cx, |this, cx| {
3947 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3948 });
3949 Ok(proto::ReloadBuffersResponse {
3950 transaction: Some(project_transaction),
3951 })
3952 }
3953
3954 async fn handle_format_buffers(
3955 this: ModelHandle<Self>,
3956 envelope: TypedEnvelope<proto::FormatBuffers>,
3957 _: Arc<Client>,
3958 mut cx: AsyncAppContext,
3959 ) -> Result<proto::FormatBuffersResponse> {
3960 let sender_id = envelope.original_sender_id()?;
3961 let format = this.update(&mut cx, |this, cx| {
3962 let mut buffers = HashSet::default();
3963 for buffer_id in &envelope.payload.buffer_ids {
3964 buffers.insert(
3965 this.opened_buffers
3966 .get(buffer_id)
3967 .and_then(|buffer| buffer.upgrade(cx))
3968 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3969 );
3970 }
3971 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3972 })?;
3973
3974 let project_transaction = format.await?;
3975 let project_transaction = this.update(&mut cx, |this, cx| {
3976 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3977 });
3978 Ok(proto::FormatBuffersResponse {
3979 transaction: Some(project_transaction),
3980 })
3981 }
3982
3983 async fn handle_get_completions(
3984 this: ModelHandle<Self>,
3985 envelope: TypedEnvelope<proto::GetCompletions>,
3986 _: Arc<Client>,
3987 mut cx: AsyncAppContext,
3988 ) -> Result<proto::GetCompletionsResponse> {
3989 let position = envelope
3990 .payload
3991 .position
3992 .and_then(language::proto::deserialize_anchor)
3993 .ok_or_else(|| anyhow!("invalid position"))?;
3994 let version = deserialize_version(envelope.payload.version);
3995 let buffer = this.read_with(&cx, |this, cx| {
3996 this.opened_buffers
3997 .get(&envelope.payload.buffer_id)
3998 .and_then(|buffer| buffer.upgrade(cx))
3999 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4000 })?;
4001 buffer
4002 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4003 .await;
4004 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4005 let completions = this
4006 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4007 .await?;
4008
4009 Ok(proto::GetCompletionsResponse {
4010 completions: completions
4011 .iter()
4012 .map(language::proto::serialize_completion)
4013 .collect(),
4014 version: serialize_version(&version),
4015 })
4016 }
4017
4018 async fn handle_apply_additional_edits_for_completion(
4019 this: ModelHandle<Self>,
4020 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4021 _: Arc<Client>,
4022 mut cx: AsyncAppContext,
4023 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4024 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4025 let buffer = this
4026 .opened_buffers
4027 .get(&envelope.payload.buffer_id)
4028 .and_then(|buffer| buffer.upgrade(cx))
4029 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4030 let language = buffer.read(cx).language();
4031 let completion = language::proto::deserialize_completion(
4032 envelope
4033 .payload
4034 .completion
4035 .ok_or_else(|| anyhow!("invalid completion"))?,
4036 language,
4037 )?;
4038 Ok::<_, anyhow::Error>(
4039 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4040 )
4041 })?;
4042
4043 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4044 transaction: apply_additional_edits
4045 .await?
4046 .as_ref()
4047 .map(language::proto::serialize_transaction),
4048 })
4049 }
4050
4051 async fn handle_get_code_actions(
4052 this: ModelHandle<Self>,
4053 envelope: TypedEnvelope<proto::GetCodeActions>,
4054 _: Arc<Client>,
4055 mut cx: AsyncAppContext,
4056 ) -> Result<proto::GetCodeActionsResponse> {
4057 let start = envelope
4058 .payload
4059 .start
4060 .and_then(language::proto::deserialize_anchor)
4061 .ok_or_else(|| anyhow!("invalid start"))?;
4062 let end = envelope
4063 .payload
4064 .end
4065 .and_then(language::proto::deserialize_anchor)
4066 .ok_or_else(|| anyhow!("invalid end"))?;
4067 let buffer = this.update(&mut cx, |this, cx| {
4068 this.opened_buffers
4069 .get(&envelope.payload.buffer_id)
4070 .and_then(|buffer| buffer.upgrade(cx))
4071 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4072 })?;
4073 buffer
4074 .update(&mut cx, |buffer, _| {
4075 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4076 })
4077 .await;
4078
4079 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4080 let code_actions = this.update(&mut cx, |this, cx| {
4081 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4082 })?;
4083
4084 Ok(proto::GetCodeActionsResponse {
4085 actions: code_actions
4086 .await?
4087 .iter()
4088 .map(language::proto::serialize_code_action)
4089 .collect(),
4090 version: serialize_version(&version),
4091 })
4092 }
4093
4094 async fn handle_apply_code_action(
4095 this: ModelHandle<Self>,
4096 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4097 _: Arc<Client>,
4098 mut cx: AsyncAppContext,
4099 ) -> Result<proto::ApplyCodeActionResponse> {
4100 let sender_id = envelope.original_sender_id()?;
4101 let action = language::proto::deserialize_code_action(
4102 envelope
4103 .payload
4104 .action
4105 .ok_or_else(|| anyhow!("invalid action"))?,
4106 )?;
4107 let apply_code_action = this.update(&mut cx, |this, cx| {
4108 let buffer = this
4109 .opened_buffers
4110 .get(&envelope.payload.buffer_id)
4111 .and_then(|buffer| buffer.upgrade(cx))
4112 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4113 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4114 })?;
4115
4116 let project_transaction = apply_code_action.await?;
4117 let project_transaction = this.update(&mut cx, |this, cx| {
4118 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4119 });
4120 Ok(proto::ApplyCodeActionResponse {
4121 transaction: Some(project_transaction),
4122 })
4123 }
4124
4125 async fn handle_lsp_command<T: LspCommand>(
4126 this: ModelHandle<Self>,
4127 envelope: TypedEnvelope<T::ProtoRequest>,
4128 _: Arc<Client>,
4129 mut cx: AsyncAppContext,
4130 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4131 where
4132 <T::LspRequest as lsp::request::Request>::Result: Send,
4133 {
4134 let sender_id = envelope.original_sender_id()?;
4135 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4136 let buffer_handle = this.read_with(&cx, |this, _| {
4137 this.opened_buffers
4138 .get(&buffer_id)
4139 .and_then(|buffer| buffer.upgrade(&cx))
4140 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4141 })?;
4142 let request = T::from_proto(
4143 envelope.payload,
4144 this.clone(),
4145 buffer_handle.clone(),
4146 cx.clone(),
4147 )
4148 .await?;
4149 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4150 let response = this
4151 .update(&mut cx, |this, cx| {
4152 this.request_lsp(buffer_handle, request, cx)
4153 })
4154 .await?;
4155 this.update(&mut cx, |this, cx| {
4156 Ok(T::response_to_proto(
4157 response,
4158 this,
4159 sender_id,
4160 &buffer_version,
4161 cx,
4162 ))
4163 })
4164 }
4165
4166 async fn handle_get_project_symbols(
4167 this: ModelHandle<Self>,
4168 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4169 _: Arc<Client>,
4170 mut cx: AsyncAppContext,
4171 ) -> Result<proto::GetProjectSymbolsResponse> {
4172 let symbols = this
4173 .update(&mut cx, |this, cx| {
4174 this.symbols(&envelope.payload.query, cx)
4175 })
4176 .await?;
4177
4178 Ok(proto::GetProjectSymbolsResponse {
4179 symbols: symbols.iter().map(serialize_symbol).collect(),
4180 })
4181 }
4182
4183 async fn handle_search_project(
4184 this: ModelHandle<Self>,
4185 envelope: TypedEnvelope<proto::SearchProject>,
4186 _: Arc<Client>,
4187 mut cx: AsyncAppContext,
4188 ) -> Result<proto::SearchProjectResponse> {
4189 let peer_id = envelope.original_sender_id()?;
4190 let query = SearchQuery::from_proto(envelope.payload)?;
4191 let result = this
4192 .update(&mut cx, |this, cx| this.search(query, cx))
4193 .await?;
4194
4195 this.update(&mut cx, |this, cx| {
4196 let mut locations = Vec::new();
4197 for (buffer, ranges) in result {
4198 for range in ranges {
4199 let start = serialize_anchor(&range.start);
4200 let end = serialize_anchor(&range.end);
4201 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4202 locations.push(proto::Location {
4203 buffer: Some(buffer),
4204 start: Some(start),
4205 end: Some(end),
4206 });
4207 }
4208 }
4209 Ok(proto::SearchProjectResponse { locations })
4210 })
4211 }
4212
4213 async fn handle_open_buffer_for_symbol(
4214 this: ModelHandle<Self>,
4215 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4216 _: Arc<Client>,
4217 mut cx: AsyncAppContext,
4218 ) -> Result<proto::OpenBufferForSymbolResponse> {
4219 let peer_id = envelope.original_sender_id()?;
4220 let symbol = envelope
4221 .payload
4222 .symbol
4223 .ok_or_else(|| anyhow!("invalid symbol"))?;
4224 let symbol = this.read_with(&cx, |this, _| {
4225 let symbol = this.deserialize_symbol(symbol)?;
4226 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4227 if signature == symbol.signature {
4228 Ok(symbol)
4229 } else {
4230 Err(anyhow!("invalid symbol signature"))
4231 }
4232 })?;
4233 let buffer = this
4234 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4235 .await?;
4236
4237 Ok(proto::OpenBufferForSymbolResponse {
4238 buffer: Some(this.update(&mut cx, |this, cx| {
4239 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4240 })),
4241 })
4242 }
4243
4244 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4245 let mut hasher = Sha256::new();
4246 hasher.update(worktree_id.to_proto().to_be_bytes());
4247 hasher.update(path.to_string_lossy().as_bytes());
4248 hasher.update(self.nonce.to_be_bytes());
4249 hasher.finalize().as_slice().try_into().unwrap()
4250 }
4251
4252 async fn handle_open_buffer_by_id(
4253 this: ModelHandle<Self>,
4254 envelope: TypedEnvelope<proto::OpenBufferById>,
4255 _: Arc<Client>,
4256 mut cx: AsyncAppContext,
4257 ) -> Result<proto::OpenBufferResponse> {
4258 let peer_id = envelope.original_sender_id()?;
4259 let buffer = this
4260 .update(&mut cx, |this, cx| {
4261 this.open_buffer_by_id(envelope.payload.id, cx)
4262 })
4263 .await?;
4264 this.update(&mut cx, |this, cx| {
4265 Ok(proto::OpenBufferResponse {
4266 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4267 })
4268 })
4269 }
4270
4271 async fn handle_open_buffer_by_path(
4272 this: ModelHandle<Self>,
4273 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4274 _: Arc<Client>,
4275 mut cx: AsyncAppContext,
4276 ) -> Result<proto::OpenBufferResponse> {
4277 let peer_id = envelope.original_sender_id()?;
4278 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4279 let open_buffer = this.update(&mut cx, |this, cx| {
4280 this.open_buffer(
4281 ProjectPath {
4282 worktree_id,
4283 path: PathBuf::from(envelope.payload.path).into(),
4284 },
4285 cx,
4286 )
4287 });
4288
4289 let buffer = open_buffer.await?;
4290 this.update(&mut cx, |this, cx| {
4291 Ok(proto::OpenBufferResponse {
4292 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4293 })
4294 })
4295 }
4296
4297 fn serialize_project_transaction_for_peer(
4298 &mut self,
4299 project_transaction: ProjectTransaction,
4300 peer_id: PeerId,
4301 cx: &AppContext,
4302 ) -> proto::ProjectTransaction {
4303 let mut serialized_transaction = proto::ProjectTransaction {
4304 buffers: Default::default(),
4305 transactions: Default::default(),
4306 };
4307 for (buffer, transaction) in project_transaction.0 {
4308 serialized_transaction
4309 .buffers
4310 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4311 serialized_transaction
4312 .transactions
4313 .push(language::proto::serialize_transaction(&transaction));
4314 }
4315 serialized_transaction
4316 }
4317
4318 fn deserialize_project_transaction(
4319 &mut self,
4320 message: proto::ProjectTransaction,
4321 push_to_history: bool,
4322 cx: &mut ModelContext<Self>,
4323 ) -> Task<Result<ProjectTransaction>> {
4324 cx.spawn(|this, mut cx| async move {
4325 let mut project_transaction = ProjectTransaction::default();
4326 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4327 let buffer = this
4328 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4329 .await?;
4330 let transaction = language::proto::deserialize_transaction(transaction)?;
4331 project_transaction.0.insert(buffer, transaction);
4332 }
4333
4334 for (buffer, transaction) in &project_transaction.0 {
4335 buffer
4336 .update(&mut cx, |buffer, _| {
4337 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4338 })
4339 .await;
4340
4341 if push_to_history {
4342 buffer.update(&mut cx, |buffer, _| {
4343 buffer.push_transaction(transaction.clone(), Instant::now());
4344 });
4345 }
4346 }
4347
4348 Ok(project_transaction)
4349 })
4350 }
4351
4352 fn serialize_buffer_for_peer(
4353 &mut self,
4354 buffer: &ModelHandle<Buffer>,
4355 peer_id: PeerId,
4356 cx: &AppContext,
4357 ) -> proto::Buffer {
4358 let buffer_id = buffer.read(cx).remote_id();
4359 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4360 if shared_buffers.insert(buffer_id) {
4361 proto::Buffer {
4362 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4363 }
4364 } else {
4365 proto::Buffer {
4366 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4367 }
4368 }
4369 }
4370
4371 fn deserialize_buffer(
4372 &mut self,
4373 buffer: proto::Buffer,
4374 cx: &mut ModelContext<Self>,
4375 ) -> Task<Result<ModelHandle<Buffer>>> {
4376 let replica_id = self.replica_id();
4377
4378 let opened_buffer_tx = self.opened_buffer.0.clone();
4379 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4380 cx.spawn(|this, mut cx| async move {
4381 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4382 proto::buffer::Variant::Id(id) => {
4383 let buffer = loop {
4384 let buffer = this.read_with(&cx, |this, cx| {
4385 this.opened_buffers
4386 .get(&id)
4387 .and_then(|buffer| buffer.upgrade(cx))
4388 });
4389 if let Some(buffer) = buffer {
4390 break buffer;
4391 }
4392 opened_buffer_rx
4393 .next()
4394 .await
4395 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4396 };
4397 Ok(buffer)
4398 }
4399 proto::buffer::Variant::State(mut buffer) => {
4400 let mut buffer_worktree = None;
4401 let mut buffer_file = None;
4402 if let Some(file) = buffer.file.take() {
4403 this.read_with(&cx, |this, cx| {
4404 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4405 let worktree =
4406 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4407 anyhow!("no worktree found for id {}", file.worktree_id)
4408 })?;
4409 buffer_file =
4410 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4411 as Box<dyn language::File>);
4412 buffer_worktree = Some(worktree);
4413 Ok::<_, anyhow::Error>(())
4414 })?;
4415 }
4416
4417 let buffer = cx.add_model(|cx| {
4418 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4419 });
4420
4421 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4422
4423 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4424 Ok(buffer)
4425 }
4426 }
4427 })
4428 }
4429
4430 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4431 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4432 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4433 let start = serialized_symbol
4434 .start
4435 .ok_or_else(|| anyhow!("invalid start"))?;
4436 let end = serialized_symbol
4437 .end
4438 .ok_or_else(|| anyhow!("invalid end"))?;
4439 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4440 let path = PathBuf::from(serialized_symbol.path);
4441 let language = self.languages.select_language(&path);
4442 Ok(Symbol {
4443 source_worktree_id,
4444 worktree_id,
4445 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4446 label: language
4447 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4448 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4449 name: serialized_symbol.name,
4450 path,
4451 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4452 kind,
4453 signature: serialized_symbol
4454 .signature
4455 .try_into()
4456 .map_err(|_| anyhow!("invalid signature"))?,
4457 })
4458 }
4459
4460 async fn handle_buffer_saved(
4461 this: ModelHandle<Self>,
4462 envelope: TypedEnvelope<proto::BufferSaved>,
4463 _: Arc<Client>,
4464 mut cx: AsyncAppContext,
4465 ) -> Result<()> {
4466 let version = deserialize_version(envelope.payload.version);
4467 let mtime = envelope
4468 .payload
4469 .mtime
4470 .ok_or_else(|| anyhow!("missing mtime"))?
4471 .into();
4472
4473 this.update(&mut cx, |this, cx| {
4474 let buffer = this
4475 .opened_buffers
4476 .get(&envelope.payload.buffer_id)
4477 .and_then(|buffer| buffer.upgrade(cx));
4478 if let Some(buffer) = buffer {
4479 buffer.update(cx, |buffer, cx| {
4480 buffer.did_save(version, mtime, None, cx);
4481 });
4482 }
4483 Ok(())
4484 })
4485 }
4486
4487 async fn handle_buffer_reloaded(
4488 this: ModelHandle<Self>,
4489 envelope: TypedEnvelope<proto::BufferReloaded>,
4490 _: Arc<Client>,
4491 mut cx: AsyncAppContext,
4492 ) -> Result<()> {
4493 let payload = envelope.payload.clone();
4494 let version = deserialize_version(payload.version);
4495 let mtime = payload
4496 .mtime
4497 .ok_or_else(|| anyhow!("missing mtime"))?
4498 .into();
4499 this.update(&mut cx, |this, cx| {
4500 let buffer = this
4501 .opened_buffers
4502 .get(&payload.buffer_id)
4503 .and_then(|buffer| buffer.upgrade(cx));
4504 if let Some(buffer) = buffer {
4505 buffer.update(cx, |buffer, cx| {
4506 buffer.did_reload(version, mtime, cx);
4507 });
4508 }
4509 Ok(())
4510 })
4511 }
4512
4513 pub fn match_paths<'a>(
4514 &self,
4515 query: &'a str,
4516 include_ignored: bool,
4517 smart_case: bool,
4518 max_results: usize,
4519 cancel_flag: &'a AtomicBool,
4520 cx: &AppContext,
4521 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4522 let worktrees = self
4523 .worktrees(cx)
4524 .filter(|worktree| worktree.read(cx).is_visible())
4525 .collect::<Vec<_>>();
4526 let include_root_name = worktrees.len() > 1;
4527 let candidate_sets = worktrees
4528 .into_iter()
4529 .map(|worktree| CandidateSet {
4530 snapshot: worktree.read(cx).snapshot(),
4531 include_ignored,
4532 include_root_name,
4533 })
4534 .collect::<Vec<_>>();
4535
4536 let background = cx.background().clone();
4537 async move {
4538 fuzzy::match_paths(
4539 candidate_sets.as_slice(),
4540 query,
4541 smart_case,
4542 max_results,
4543 cancel_flag,
4544 background,
4545 )
4546 .await
4547 }
4548 }
4549
4550 fn edits_from_lsp(
4551 &mut self,
4552 buffer: &ModelHandle<Buffer>,
4553 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4554 version: Option<i32>,
4555 cx: &mut ModelContext<Self>,
4556 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4557 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4558 cx.background().spawn(async move {
4559 let snapshot = snapshot?;
4560 let mut lsp_edits = lsp_edits
4561 .into_iter()
4562 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4563 .peekable();
4564
4565 let mut edits = Vec::new();
4566 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4567 // Combine any LSP edits that are adjacent.
4568 //
4569 // Also, combine LSP edits that are separated from each other by only
4570 // a newline. This is important because for some code actions,
4571 // Rust-analyzer rewrites the entire buffer via a series of edits that
4572 // are separated by unchanged newline characters.
4573 //
4574 // In order for the diffing logic below to work properly, any edits that
4575 // cancel each other out must be combined into one.
4576 while let Some((next_range, next_text)) = lsp_edits.peek() {
4577 if next_range.start > range.end {
4578 if next_range.start.row > range.end.row + 1
4579 || next_range.start.column > 0
4580 || snapshot.clip_point_utf16(
4581 PointUtf16::new(range.end.row, u32::MAX),
4582 Bias::Left,
4583 ) > range.end
4584 {
4585 break;
4586 }
4587 new_text.push('\n');
4588 }
4589 range.end = next_range.end;
4590 new_text.push_str(&next_text);
4591 lsp_edits.next();
4592 }
4593
4594 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4595 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4596 {
4597 return Err(anyhow!("invalid edits received from language server"));
4598 }
4599
4600 // For multiline edits, perform a diff of the old and new text so that
4601 // we can identify the changes more precisely, preserving the locations
4602 // of any anchors positioned in the unchanged regions.
4603 if range.end.row > range.start.row {
4604 let mut offset = range.start.to_offset(&snapshot);
4605 let old_text = snapshot.text_for_range(range).collect::<String>();
4606
4607 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4608 let mut moved_since_edit = true;
4609 for change in diff.iter_all_changes() {
4610 let tag = change.tag();
4611 let value = change.value();
4612 match tag {
4613 ChangeTag::Equal => {
4614 offset += value.len();
4615 moved_since_edit = true;
4616 }
4617 ChangeTag::Delete => {
4618 let start = snapshot.anchor_after(offset);
4619 let end = snapshot.anchor_before(offset + value.len());
4620 if moved_since_edit {
4621 edits.push((start..end, String::new()));
4622 } else {
4623 edits.last_mut().unwrap().0.end = end;
4624 }
4625 offset += value.len();
4626 moved_since_edit = false;
4627 }
4628 ChangeTag::Insert => {
4629 if moved_since_edit {
4630 let anchor = snapshot.anchor_after(offset);
4631 edits.push((anchor.clone()..anchor, value.to_string()));
4632 } else {
4633 edits.last_mut().unwrap().1.push_str(value);
4634 }
4635 moved_since_edit = false;
4636 }
4637 }
4638 }
4639 } else if range.end == range.start {
4640 let anchor = snapshot.anchor_after(range.start);
4641 edits.push((anchor.clone()..anchor, new_text));
4642 } else {
4643 let edit_start = snapshot.anchor_after(range.start);
4644 let edit_end = snapshot.anchor_before(range.end);
4645 edits.push((edit_start..edit_end, new_text));
4646 }
4647 }
4648
4649 Ok(edits)
4650 })
4651 }
4652
4653 fn buffer_snapshot_for_lsp_version(
4654 &mut self,
4655 buffer: &ModelHandle<Buffer>,
4656 version: Option<i32>,
4657 cx: &AppContext,
4658 ) -> Result<TextBufferSnapshot> {
4659 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4660
4661 if let Some(version) = version {
4662 let buffer_id = buffer.read(cx).remote_id();
4663 let snapshots = self
4664 .buffer_snapshots
4665 .get_mut(&buffer_id)
4666 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4667 let mut found_snapshot = None;
4668 snapshots.retain(|(snapshot_version, snapshot)| {
4669 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4670 false
4671 } else {
4672 if *snapshot_version == version {
4673 found_snapshot = Some(snapshot.clone());
4674 }
4675 true
4676 }
4677 });
4678
4679 found_snapshot.ok_or_else(|| {
4680 anyhow!(
4681 "snapshot not found for buffer {} at version {}",
4682 buffer_id,
4683 version
4684 )
4685 })
4686 } else {
4687 Ok((buffer.read(cx)).text_snapshot())
4688 }
4689 }
4690
4691 fn language_server_for_buffer(
4692 &self,
4693 buffer: &Buffer,
4694 cx: &AppContext,
4695 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4696 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4697 let worktree_id = file.worktree_id(cx);
4698 self.language_servers
4699 .get(&(worktree_id, language.lsp_adapter()?.name()))
4700 } else {
4701 None
4702 }
4703 }
4704}
4705
4706impl WorktreeHandle {
4707 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4708 match self {
4709 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4710 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4711 }
4712 }
4713}
4714
4715impl OpenBuffer {
4716 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4717 match self {
4718 OpenBuffer::Strong(handle) => Some(handle.clone()),
4719 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4720 OpenBuffer::Loading(_) => None,
4721 }
4722 }
4723}
4724
4725struct CandidateSet {
4726 snapshot: Snapshot,
4727 include_ignored: bool,
4728 include_root_name: bool,
4729}
4730
4731impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4732 type Candidates = CandidateSetIter<'a>;
4733
4734 fn id(&self) -> usize {
4735 self.snapshot.id().to_usize()
4736 }
4737
4738 fn len(&self) -> usize {
4739 if self.include_ignored {
4740 self.snapshot.file_count()
4741 } else {
4742 self.snapshot.visible_file_count()
4743 }
4744 }
4745
4746 fn prefix(&self) -> Arc<str> {
4747 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4748 self.snapshot.root_name().into()
4749 } else if self.include_root_name {
4750 format!("{}/", self.snapshot.root_name()).into()
4751 } else {
4752 "".into()
4753 }
4754 }
4755
4756 fn candidates(&'a self, start: usize) -> Self::Candidates {
4757 CandidateSetIter {
4758 traversal: self.snapshot.files(self.include_ignored, start),
4759 }
4760 }
4761}
4762
4763struct CandidateSetIter<'a> {
4764 traversal: Traversal<'a>,
4765}
4766
4767impl<'a> Iterator for CandidateSetIter<'a> {
4768 type Item = PathMatchCandidate<'a>;
4769
4770 fn next(&mut self) -> Option<Self::Item> {
4771 self.traversal.next().map(|entry| {
4772 if let EntryKind::File(char_bag) = entry.kind {
4773 PathMatchCandidate {
4774 path: &entry.path,
4775 char_bag,
4776 }
4777 } else {
4778 unreachable!()
4779 }
4780 })
4781 }
4782}
4783
4784impl Entity for Project {
4785 type Event = Event;
4786
4787 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4788 match &self.client_state {
4789 ProjectClientState::Local { remote_id_rx, .. } => {
4790 if let Some(project_id) = *remote_id_rx.borrow() {
4791 self.client
4792 .send(proto::UnregisterProject { project_id })
4793 .log_err();
4794 }
4795 }
4796 ProjectClientState::Remote { remote_id, .. } => {
4797 self.client
4798 .send(proto::LeaveProject {
4799 project_id: *remote_id,
4800 })
4801 .log_err();
4802 }
4803 }
4804 }
4805
4806 fn app_will_quit(
4807 &mut self,
4808 _: &mut MutableAppContext,
4809 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4810 let shutdown_futures = self
4811 .language_servers
4812 .drain()
4813 .filter_map(|(_, (_, server))| server.shutdown())
4814 .collect::<Vec<_>>();
4815 Some(
4816 async move {
4817 futures::future::join_all(shutdown_futures).await;
4818 }
4819 .boxed(),
4820 )
4821 }
4822}
4823
4824impl Collaborator {
4825 fn from_proto(
4826 message: proto::Collaborator,
4827 user_store: &ModelHandle<UserStore>,
4828 cx: &mut AsyncAppContext,
4829 ) -> impl Future<Output = Result<Self>> {
4830 let user = user_store.update(cx, |user_store, cx| {
4831 user_store.fetch_user(message.user_id, cx)
4832 });
4833
4834 async move {
4835 Ok(Self {
4836 peer_id: PeerId(message.peer_id),
4837 user: user.await?,
4838 replica_id: message.replica_id as ReplicaId,
4839 })
4840 }
4841 }
4842}
4843
4844impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4845 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4846 Self {
4847 worktree_id,
4848 path: path.as_ref().into(),
4849 }
4850 }
4851}
4852
4853impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4854 fn from(options: lsp::CreateFileOptions) -> Self {
4855 Self {
4856 overwrite: options.overwrite.unwrap_or(false),
4857 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4858 }
4859 }
4860}
4861
4862impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4863 fn from(options: lsp::RenameFileOptions) -> Self {
4864 Self {
4865 overwrite: options.overwrite.unwrap_or(false),
4866 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4867 }
4868 }
4869}
4870
4871impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4872 fn from(options: lsp::DeleteFileOptions) -> Self {
4873 Self {
4874 recursive: options.recursive.unwrap_or(false),
4875 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4876 }
4877 }
4878}
4879
4880fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4881 proto::Symbol {
4882 source_worktree_id: symbol.source_worktree_id.to_proto(),
4883 worktree_id: symbol.worktree_id.to_proto(),
4884 language_server_name: symbol.language_server_name.0.to_string(),
4885 name: symbol.name.clone(),
4886 kind: unsafe { mem::transmute(symbol.kind) },
4887 path: symbol.path.to_string_lossy().to_string(),
4888 start: Some(proto::Point {
4889 row: symbol.range.start.row,
4890 column: symbol.range.start.column,
4891 }),
4892 end: Some(proto::Point {
4893 row: symbol.range.end.row,
4894 column: symbol.range.end.column,
4895 }),
4896 signature: symbol.signature.to_vec(),
4897 }
4898}
4899
4900fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4901 let mut path_components = path.components();
4902 let mut base_components = base.components();
4903 let mut components: Vec<Component> = Vec::new();
4904 loop {
4905 match (path_components.next(), base_components.next()) {
4906 (None, None) => break,
4907 (Some(a), None) => {
4908 components.push(a);
4909 components.extend(path_components.by_ref());
4910 break;
4911 }
4912 (None, _) => components.push(Component::ParentDir),
4913 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4914 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4915 (Some(a), Some(_)) => {
4916 components.push(Component::ParentDir);
4917 for _ in base_components {
4918 components.push(Component::ParentDir);
4919 }
4920 components.push(a);
4921 components.extend(path_components.by_ref());
4922 break;
4923 }
4924 }
4925 }
4926 components.iter().map(|c| c.as_os_str()).collect()
4927}
4928
4929impl Item for Buffer {
4930 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4931 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4932 }
4933}
4934
4935#[cfg(test)]
4936mod tests {
4937 use crate::worktree::WorktreeHandle;
4938
4939 use super::{Event, *};
4940 use fs::RealFs;
4941 use futures::{future, StreamExt};
4942 use gpui::test::subscribe;
4943 use language::{
4944 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
4945 OffsetRangeExt, Point, ToPoint,
4946 };
4947 use lsp::Url;
4948 use serde_json::json;
4949 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
4950 use unindent::Unindent as _;
4951 use util::{assert_set_eq, test::temp_tree};
4952
4953 #[gpui::test]
4954 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4955 let dir = temp_tree(json!({
4956 "root": {
4957 "apple": "",
4958 "banana": {
4959 "carrot": {
4960 "date": "",
4961 "endive": "",
4962 }
4963 },
4964 "fennel": {
4965 "grape": "",
4966 }
4967 }
4968 }));
4969
4970 let root_link_path = dir.path().join("root_link");
4971 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4972 unix::fs::symlink(
4973 &dir.path().join("root/fennel"),
4974 &dir.path().join("root/finnochio"),
4975 )
4976 .unwrap();
4977
4978 let project = Project::test(Arc::new(RealFs), [root_link_path], cx).await;
4979
4980 project.read_with(cx, |project, cx| {
4981 let tree = project.worktrees(cx).next().unwrap().read(cx);
4982 assert_eq!(tree.file_count(), 5);
4983 assert_eq!(
4984 tree.inode_for_path("fennel/grape"),
4985 tree.inode_for_path("finnochio/grape")
4986 );
4987 });
4988
4989 let cancel_flag = Default::default();
4990 let results = project
4991 .read_with(cx, |project, cx| {
4992 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4993 })
4994 .await;
4995 assert_eq!(
4996 results
4997 .into_iter()
4998 .map(|result| result.path)
4999 .collect::<Vec<Arc<Path>>>(),
5000 vec![
5001 PathBuf::from("banana/carrot/date").into(),
5002 PathBuf::from("banana/carrot/endive").into(),
5003 ]
5004 );
5005 }
5006
5007 #[gpui::test]
5008 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5009 cx.foreground().forbid_parking();
5010
5011 let mut rust_language = Language::new(
5012 LanguageConfig {
5013 name: "Rust".into(),
5014 path_suffixes: vec!["rs".to_string()],
5015 ..Default::default()
5016 },
5017 Some(tree_sitter_rust::language()),
5018 );
5019 let mut json_language = Language::new(
5020 LanguageConfig {
5021 name: "JSON".into(),
5022 path_suffixes: vec!["json".to_string()],
5023 ..Default::default()
5024 },
5025 None,
5026 );
5027 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5028 name: "the-rust-language-server",
5029 capabilities: lsp::ServerCapabilities {
5030 completion_provider: Some(lsp::CompletionOptions {
5031 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5032 ..Default::default()
5033 }),
5034 ..Default::default()
5035 },
5036 ..Default::default()
5037 });
5038 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5039 name: "the-json-language-server",
5040 capabilities: lsp::ServerCapabilities {
5041 completion_provider: Some(lsp::CompletionOptions {
5042 trigger_characters: Some(vec![":".to_string()]),
5043 ..Default::default()
5044 }),
5045 ..Default::default()
5046 },
5047 ..Default::default()
5048 });
5049
5050 let fs = FakeFs::new(cx.background());
5051 fs.insert_tree(
5052 "/the-root",
5053 json!({
5054 "test.rs": "const A: i32 = 1;",
5055 "test2.rs": "",
5056 "Cargo.toml": "a = 1",
5057 "package.json": "{\"a\": 1}",
5058 }),
5059 )
5060 .await;
5061
5062 let project = Project::test(fs.clone(), ["/the-root"], cx).await;
5063 project.update(cx, |project, _| {
5064 project.languages.add(Arc::new(rust_language));
5065 project.languages.add(Arc::new(json_language));
5066 });
5067
5068 // Open a buffer without an associated language server.
5069 let toml_buffer = project
5070 .update(cx, |project, cx| {
5071 project.open_local_buffer("/the-root/Cargo.toml", cx)
5072 })
5073 .await
5074 .unwrap();
5075
5076 // Open a buffer with an associated language server.
5077 let rust_buffer = project
5078 .update(cx, |project, cx| {
5079 project.open_local_buffer("/the-root/test.rs", cx)
5080 })
5081 .await
5082 .unwrap();
5083
5084 // A server is started up, and it is notified about Rust files.
5085 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5086 assert_eq!(
5087 fake_rust_server
5088 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5089 .await
5090 .text_document,
5091 lsp::TextDocumentItem {
5092 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5093 version: 0,
5094 text: "const A: i32 = 1;".to_string(),
5095 language_id: Default::default()
5096 }
5097 );
5098
5099 // The buffer is configured based on the language server's capabilities.
5100 rust_buffer.read_with(cx, |buffer, _| {
5101 assert_eq!(
5102 buffer.completion_triggers(),
5103 &[".".to_string(), "::".to_string()]
5104 );
5105 });
5106 toml_buffer.read_with(cx, |buffer, _| {
5107 assert!(buffer.completion_triggers().is_empty());
5108 });
5109
5110 // Edit a buffer. The changes are reported to the language server.
5111 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5112 assert_eq!(
5113 fake_rust_server
5114 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5115 .await
5116 .text_document,
5117 lsp::VersionedTextDocumentIdentifier::new(
5118 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5119 1
5120 )
5121 );
5122
5123 // Open a third buffer with a different associated language server.
5124 let json_buffer = project
5125 .update(cx, |project, cx| {
5126 project.open_local_buffer("/the-root/package.json", cx)
5127 })
5128 .await
5129 .unwrap();
5130
5131 // A json language server is started up and is only notified about the json buffer.
5132 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5133 assert_eq!(
5134 fake_json_server
5135 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5136 .await
5137 .text_document,
5138 lsp::TextDocumentItem {
5139 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5140 version: 0,
5141 text: "{\"a\": 1}".to_string(),
5142 language_id: Default::default()
5143 }
5144 );
5145
5146 // This buffer is configured based on the second language server's
5147 // capabilities.
5148 json_buffer.read_with(cx, |buffer, _| {
5149 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5150 });
5151
5152 // When opening another buffer whose language server is already running,
5153 // it is also configured based on the existing language server's capabilities.
5154 let rust_buffer2 = project
5155 .update(cx, |project, cx| {
5156 project.open_local_buffer("/the-root/test2.rs", cx)
5157 })
5158 .await
5159 .unwrap();
5160 rust_buffer2.read_with(cx, |buffer, _| {
5161 assert_eq!(
5162 buffer.completion_triggers(),
5163 &[".".to_string(), "::".to_string()]
5164 );
5165 });
5166
5167 // Changes are reported only to servers matching the buffer's language.
5168 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5169 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5170 assert_eq!(
5171 fake_rust_server
5172 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5173 .await
5174 .text_document,
5175 lsp::VersionedTextDocumentIdentifier::new(
5176 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5177 1
5178 )
5179 );
5180
5181 // Save notifications are reported to all servers.
5182 toml_buffer
5183 .update(cx, |buffer, cx| buffer.save(cx))
5184 .await
5185 .unwrap();
5186 assert_eq!(
5187 fake_rust_server
5188 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5189 .await
5190 .text_document,
5191 lsp::TextDocumentIdentifier::new(
5192 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5193 )
5194 );
5195 assert_eq!(
5196 fake_json_server
5197 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5198 .await
5199 .text_document,
5200 lsp::TextDocumentIdentifier::new(
5201 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5202 )
5203 );
5204
5205 // Renames are reported only to servers matching the buffer's language.
5206 fs.rename(
5207 Path::new("/the-root/test2.rs"),
5208 Path::new("/the-root/test3.rs"),
5209 Default::default(),
5210 )
5211 .await
5212 .unwrap();
5213 assert_eq!(
5214 fake_rust_server
5215 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5216 .await
5217 .text_document,
5218 lsp::TextDocumentIdentifier::new(
5219 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5220 ),
5221 );
5222 assert_eq!(
5223 fake_rust_server
5224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5225 .await
5226 .text_document,
5227 lsp::TextDocumentItem {
5228 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5229 version: 0,
5230 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5231 language_id: Default::default()
5232 },
5233 );
5234
5235 rust_buffer2.update(cx, |buffer, cx| {
5236 buffer.update_diagnostics(
5237 DiagnosticSet::from_sorted_entries(
5238 vec![DiagnosticEntry {
5239 diagnostic: Default::default(),
5240 range: Anchor::MIN..Anchor::MAX,
5241 }],
5242 &buffer.snapshot(),
5243 ),
5244 cx,
5245 );
5246 assert_eq!(
5247 buffer
5248 .snapshot()
5249 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5250 .count(),
5251 1
5252 );
5253 });
5254
5255 // When the rename changes the extension of the file, the buffer gets closed on the old
5256 // language server and gets opened on the new one.
5257 fs.rename(
5258 Path::new("/the-root/test3.rs"),
5259 Path::new("/the-root/test3.json"),
5260 Default::default(),
5261 )
5262 .await
5263 .unwrap();
5264 assert_eq!(
5265 fake_rust_server
5266 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5267 .await
5268 .text_document,
5269 lsp::TextDocumentIdentifier::new(
5270 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5271 ),
5272 );
5273 assert_eq!(
5274 fake_json_server
5275 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5276 .await
5277 .text_document,
5278 lsp::TextDocumentItem {
5279 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5280 version: 0,
5281 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5282 language_id: Default::default()
5283 },
5284 );
5285
5286 // We clear the diagnostics, since the language has changed.
5287 rust_buffer2.read_with(cx, |buffer, _| {
5288 assert_eq!(
5289 buffer
5290 .snapshot()
5291 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5292 .count(),
5293 0
5294 );
5295 });
5296
5297 // The renamed file's version resets after changing language server.
5298 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5299 assert_eq!(
5300 fake_json_server
5301 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5302 .await
5303 .text_document,
5304 lsp::VersionedTextDocumentIdentifier::new(
5305 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5306 1
5307 )
5308 );
5309
5310 // Restart language servers
5311 project.update(cx, |project, cx| {
5312 project.restart_language_servers_for_buffers(
5313 vec![rust_buffer.clone(), json_buffer.clone()],
5314 cx,
5315 );
5316 });
5317
5318 let mut rust_shutdown_requests = fake_rust_server
5319 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5320 let mut json_shutdown_requests = fake_json_server
5321 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5322 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5323
5324 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5325 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5326
5327 // Ensure rust document is reopened in new rust language server
5328 assert_eq!(
5329 fake_rust_server
5330 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5331 .await
5332 .text_document,
5333 lsp::TextDocumentItem {
5334 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5335 version: 1,
5336 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5337 language_id: Default::default()
5338 }
5339 );
5340
5341 // Ensure json documents are reopened in new json language server
5342 assert_set_eq!(
5343 [
5344 fake_json_server
5345 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5346 .await
5347 .text_document,
5348 fake_json_server
5349 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5350 .await
5351 .text_document,
5352 ],
5353 [
5354 lsp::TextDocumentItem {
5355 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5356 version: 0,
5357 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5358 language_id: Default::default()
5359 },
5360 lsp::TextDocumentItem {
5361 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5362 version: 1,
5363 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5364 language_id: Default::default()
5365 }
5366 ]
5367 );
5368
5369 // Close notifications are reported only to servers matching the buffer's language.
5370 cx.update(|_| drop(json_buffer));
5371 let close_message = lsp::DidCloseTextDocumentParams {
5372 text_document: lsp::TextDocumentIdentifier::new(
5373 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5374 ),
5375 };
5376 assert_eq!(
5377 fake_json_server
5378 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5379 .await,
5380 close_message,
5381 );
5382 }
5383
5384 #[gpui::test]
5385 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5386 cx.foreground().forbid_parking();
5387
5388 let fs = FakeFs::new(cx.background());
5389 fs.insert_tree(
5390 "/dir",
5391 json!({
5392 "a.rs": "let a = 1;",
5393 "b.rs": "let b = 2;"
5394 }),
5395 )
5396 .await;
5397
5398 let project = Project::test(fs, ["/dir/a.rs", "/dir/b.rs"], cx).await;
5399
5400 let buffer_a = project
5401 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5402 .await
5403 .unwrap();
5404 let buffer_b = project
5405 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5406 .await
5407 .unwrap();
5408
5409 project.update(cx, |project, cx| {
5410 project
5411 .update_diagnostics(
5412 lsp::PublishDiagnosticsParams {
5413 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5414 version: None,
5415 diagnostics: vec![lsp::Diagnostic {
5416 range: lsp::Range::new(
5417 lsp::Position::new(0, 4),
5418 lsp::Position::new(0, 5),
5419 ),
5420 severity: Some(lsp::DiagnosticSeverity::ERROR),
5421 message: "error 1".to_string(),
5422 ..Default::default()
5423 }],
5424 },
5425 &[],
5426 cx,
5427 )
5428 .unwrap();
5429 project
5430 .update_diagnostics(
5431 lsp::PublishDiagnosticsParams {
5432 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5433 version: None,
5434 diagnostics: vec![lsp::Diagnostic {
5435 range: lsp::Range::new(
5436 lsp::Position::new(0, 4),
5437 lsp::Position::new(0, 5),
5438 ),
5439 severity: Some(lsp::DiagnosticSeverity::WARNING),
5440 message: "error 2".to_string(),
5441 ..Default::default()
5442 }],
5443 },
5444 &[],
5445 cx,
5446 )
5447 .unwrap();
5448 });
5449
5450 buffer_a.read_with(cx, |buffer, _| {
5451 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5452 assert_eq!(
5453 chunks
5454 .iter()
5455 .map(|(s, d)| (s.as_str(), *d))
5456 .collect::<Vec<_>>(),
5457 &[
5458 ("let ", None),
5459 ("a", Some(DiagnosticSeverity::ERROR)),
5460 (" = 1;", None),
5461 ]
5462 );
5463 });
5464 buffer_b.read_with(cx, |buffer, _| {
5465 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5466 assert_eq!(
5467 chunks
5468 .iter()
5469 .map(|(s, d)| (s.as_str(), *d))
5470 .collect::<Vec<_>>(),
5471 &[
5472 ("let ", None),
5473 ("b", Some(DiagnosticSeverity::WARNING)),
5474 (" = 2;", None),
5475 ]
5476 );
5477 });
5478 }
5479
5480 #[gpui::test]
5481 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5482 cx.foreground().forbid_parking();
5483
5484 let progress_token = "the-progress-token";
5485 let mut language = Language::new(
5486 LanguageConfig {
5487 name: "Rust".into(),
5488 path_suffixes: vec!["rs".to_string()],
5489 ..Default::default()
5490 },
5491 Some(tree_sitter_rust::language()),
5492 );
5493 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5494 disk_based_diagnostics_progress_token: Some(progress_token),
5495 disk_based_diagnostics_sources: &["disk"],
5496 ..Default::default()
5497 });
5498
5499 let fs = FakeFs::new(cx.background());
5500 fs.insert_tree(
5501 "/dir",
5502 json!({
5503 "a.rs": "fn a() { A }",
5504 "b.rs": "const y: i32 = 1",
5505 }),
5506 )
5507 .await;
5508
5509 let project = Project::test(fs, ["/dir"], cx).await;
5510 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5511 let worktree_id =
5512 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5513
5514 // Cause worktree to start the fake language server
5515 let _buffer = project
5516 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5517 .await
5518 .unwrap();
5519
5520 let mut events = subscribe(&project, cx);
5521
5522 let mut fake_server = fake_servers.next().await.unwrap();
5523 fake_server.start_progress(progress_token).await;
5524 assert_eq!(
5525 events.next().await.unwrap(),
5526 Event::DiskBasedDiagnosticsStarted
5527 );
5528
5529 fake_server.start_progress(progress_token).await;
5530 fake_server.end_progress(progress_token).await;
5531 fake_server.start_progress(progress_token).await;
5532
5533 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5534 lsp::PublishDiagnosticsParams {
5535 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5536 version: None,
5537 diagnostics: vec![lsp::Diagnostic {
5538 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5539 severity: Some(lsp::DiagnosticSeverity::ERROR),
5540 message: "undefined variable 'A'".to_string(),
5541 ..Default::default()
5542 }],
5543 },
5544 );
5545 assert_eq!(
5546 events.next().await.unwrap(),
5547 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5548 );
5549
5550 fake_server.end_progress(progress_token).await;
5551 fake_server.end_progress(progress_token).await;
5552 assert_eq!(
5553 events.next().await.unwrap(),
5554 Event::DiskBasedDiagnosticsUpdated
5555 );
5556 assert_eq!(
5557 events.next().await.unwrap(),
5558 Event::DiskBasedDiagnosticsFinished
5559 );
5560
5561 let buffer = project
5562 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5563 .await
5564 .unwrap();
5565
5566 buffer.read_with(cx, |buffer, _| {
5567 let snapshot = buffer.snapshot();
5568 let diagnostics = snapshot
5569 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5570 .collect::<Vec<_>>();
5571 assert_eq!(
5572 diagnostics,
5573 &[DiagnosticEntry {
5574 range: Point::new(0, 9)..Point::new(0, 10),
5575 diagnostic: Diagnostic {
5576 severity: lsp::DiagnosticSeverity::ERROR,
5577 message: "undefined variable 'A'".to_string(),
5578 group_id: 0,
5579 is_primary: true,
5580 ..Default::default()
5581 }
5582 }]
5583 )
5584 });
5585
5586 // Ensure publishing empty diagnostics twice only results in one update event.
5587 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5588 lsp::PublishDiagnosticsParams {
5589 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5590 version: None,
5591 diagnostics: Default::default(),
5592 },
5593 );
5594 assert_eq!(
5595 events.next().await.unwrap(),
5596 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5597 );
5598
5599 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5600 lsp::PublishDiagnosticsParams {
5601 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5602 version: None,
5603 diagnostics: Default::default(),
5604 },
5605 );
5606 cx.foreground().run_until_parked();
5607 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5608 }
5609
5610 #[gpui::test]
5611 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5612 cx.foreground().forbid_parking();
5613
5614 let progress_token = "the-progress-token";
5615 let mut language = Language::new(
5616 LanguageConfig {
5617 path_suffixes: vec!["rs".to_string()],
5618 ..Default::default()
5619 },
5620 None,
5621 );
5622 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5623 disk_based_diagnostics_sources: &["disk"],
5624 disk_based_diagnostics_progress_token: Some(progress_token),
5625 ..Default::default()
5626 });
5627
5628 let fs = FakeFs::new(cx.background());
5629 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5630
5631 let project = Project::test(fs, ["/dir"], cx).await;
5632 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5633
5634 let buffer = project
5635 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5636 .await
5637 .unwrap();
5638
5639 // Simulate diagnostics starting to update.
5640 let mut fake_server = fake_servers.next().await.unwrap();
5641 fake_server.start_progress(progress_token).await;
5642
5643 // Restart the server before the diagnostics finish updating.
5644 project.update(cx, |project, cx| {
5645 project.restart_language_servers_for_buffers([buffer], cx);
5646 });
5647 let mut events = subscribe(&project, cx);
5648
5649 // Simulate the newly started server sending more diagnostics.
5650 let mut fake_server = fake_servers.next().await.unwrap();
5651 fake_server.start_progress(progress_token).await;
5652 assert_eq!(
5653 events.next().await.unwrap(),
5654 Event::DiskBasedDiagnosticsStarted
5655 );
5656
5657 // All diagnostics are considered done, despite the old server's diagnostic
5658 // task never completing.
5659 fake_server.end_progress(progress_token).await;
5660 assert_eq!(
5661 events.next().await.unwrap(),
5662 Event::DiskBasedDiagnosticsUpdated
5663 );
5664 assert_eq!(
5665 events.next().await.unwrap(),
5666 Event::DiskBasedDiagnosticsFinished
5667 );
5668 project.read_with(cx, |project, _| {
5669 assert!(!project.is_running_disk_based_diagnostics());
5670 });
5671 }
5672
5673 #[gpui::test]
5674 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5675 cx.foreground().forbid_parking();
5676
5677 let mut language = Language::new(
5678 LanguageConfig {
5679 name: "Rust".into(),
5680 path_suffixes: vec!["rs".to_string()],
5681 ..Default::default()
5682 },
5683 Some(tree_sitter_rust::language()),
5684 );
5685 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5686 disk_based_diagnostics_sources: &["disk"],
5687 ..Default::default()
5688 });
5689
5690 let text = "
5691 fn a() { A }
5692 fn b() { BB }
5693 fn c() { CCC }
5694 "
5695 .unindent();
5696
5697 let fs = FakeFs::new(cx.background());
5698 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5699
5700 let project = Project::test(fs, ["/dir"], cx).await;
5701 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5702
5703 let buffer = project
5704 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5705 .await
5706 .unwrap();
5707
5708 let mut fake_server = fake_servers.next().await.unwrap();
5709 let open_notification = fake_server
5710 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5711 .await;
5712
5713 // Edit the buffer, moving the content down
5714 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
5715 let change_notification_1 = fake_server
5716 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5717 .await;
5718 assert!(
5719 change_notification_1.text_document.version > open_notification.text_document.version
5720 );
5721
5722 // Report some diagnostics for the initial version of the buffer
5723 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5724 lsp::PublishDiagnosticsParams {
5725 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5726 version: Some(open_notification.text_document.version),
5727 diagnostics: vec![
5728 lsp::Diagnostic {
5729 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5730 severity: Some(DiagnosticSeverity::ERROR),
5731 message: "undefined variable 'A'".to_string(),
5732 source: Some("disk".to_string()),
5733 ..Default::default()
5734 },
5735 lsp::Diagnostic {
5736 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5737 severity: Some(DiagnosticSeverity::ERROR),
5738 message: "undefined variable 'BB'".to_string(),
5739 source: Some("disk".to_string()),
5740 ..Default::default()
5741 },
5742 lsp::Diagnostic {
5743 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5744 severity: Some(DiagnosticSeverity::ERROR),
5745 source: Some("disk".to_string()),
5746 message: "undefined variable 'CCC'".to_string(),
5747 ..Default::default()
5748 },
5749 ],
5750 },
5751 );
5752
5753 // The diagnostics have moved down since they were created.
5754 buffer.next_notification(cx).await;
5755 buffer.read_with(cx, |buffer, _| {
5756 assert_eq!(
5757 buffer
5758 .snapshot()
5759 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5760 .collect::<Vec<_>>(),
5761 &[
5762 DiagnosticEntry {
5763 range: Point::new(3, 9)..Point::new(3, 11),
5764 diagnostic: Diagnostic {
5765 severity: DiagnosticSeverity::ERROR,
5766 message: "undefined variable 'BB'".to_string(),
5767 is_disk_based: true,
5768 group_id: 1,
5769 is_primary: true,
5770 ..Default::default()
5771 },
5772 },
5773 DiagnosticEntry {
5774 range: Point::new(4, 9)..Point::new(4, 12),
5775 diagnostic: Diagnostic {
5776 severity: DiagnosticSeverity::ERROR,
5777 message: "undefined variable 'CCC'".to_string(),
5778 is_disk_based: true,
5779 group_id: 2,
5780 is_primary: true,
5781 ..Default::default()
5782 }
5783 }
5784 ]
5785 );
5786 assert_eq!(
5787 chunks_with_diagnostics(buffer, 0..buffer.len()),
5788 [
5789 ("\n\nfn a() { ".to_string(), None),
5790 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5791 (" }\nfn b() { ".to_string(), None),
5792 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5793 (" }\nfn c() { ".to_string(), None),
5794 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5795 (" }\n".to_string(), None),
5796 ]
5797 );
5798 assert_eq!(
5799 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5800 [
5801 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5802 (" }\nfn c() { ".to_string(), None),
5803 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5804 ]
5805 );
5806 });
5807
5808 // Ensure overlapping diagnostics are highlighted correctly.
5809 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5810 lsp::PublishDiagnosticsParams {
5811 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5812 version: Some(open_notification.text_document.version),
5813 diagnostics: vec![
5814 lsp::Diagnostic {
5815 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5816 severity: Some(DiagnosticSeverity::ERROR),
5817 message: "undefined variable 'A'".to_string(),
5818 source: Some("disk".to_string()),
5819 ..Default::default()
5820 },
5821 lsp::Diagnostic {
5822 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5823 severity: Some(DiagnosticSeverity::WARNING),
5824 message: "unreachable statement".to_string(),
5825 source: Some("disk".to_string()),
5826 ..Default::default()
5827 },
5828 ],
5829 },
5830 );
5831
5832 buffer.next_notification(cx).await;
5833 buffer.read_with(cx, |buffer, _| {
5834 assert_eq!(
5835 buffer
5836 .snapshot()
5837 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5838 .collect::<Vec<_>>(),
5839 &[
5840 DiagnosticEntry {
5841 range: Point::new(2, 9)..Point::new(2, 12),
5842 diagnostic: Diagnostic {
5843 severity: DiagnosticSeverity::WARNING,
5844 message: "unreachable statement".to_string(),
5845 is_disk_based: true,
5846 group_id: 1,
5847 is_primary: true,
5848 ..Default::default()
5849 }
5850 },
5851 DiagnosticEntry {
5852 range: Point::new(2, 9)..Point::new(2, 10),
5853 diagnostic: Diagnostic {
5854 severity: DiagnosticSeverity::ERROR,
5855 message: "undefined variable 'A'".to_string(),
5856 is_disk_based: true,
5857 group_id: 0,
5858 is_primary: true,
5859 ..Default::default()
5860 },
5861 }
5862 ]
5863 );
5864 assert_eq!(
5865 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5866 [
5867 ("fn a() { ".to_string(), None),
5868 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5869 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5870 ("\n".to_string(), None),
5871 ]
5872 );
5873 assert_eq!(
5874 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5875 [
5876 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5877 ("\n".to_string(), None),
5878 ]
5879 );
5880 });
5881
5882 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5883 // changes since the last save.
5884 buffer.update(cx, |buffer, cx| {
5885 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
5886 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
5887 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
5888 });
5889 let change_notification_2 = fake_server
5890 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5891 .await;
5892 assert!(
5893 change_notification_2.text_document.version
5894 > change_notification_1.text_document.version
5895 );
5896
5897 // Handle out-of-order diagnostics
5898 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5899 lsp::PublishDiagnosticsParams {
5900 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5901 version: Some(change_notification_2.text_document.version),
5902 diagnostics: vec![
5903 lsp::Diagnostic {
5904 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5905 severity: Some(DiagnosticSeverity::ERROR),
5906 message: "undefined variable 'BB'".to_string(),
5907 source: Some("disk".to_string()),
5908 ..Default::default()
5909 },
5910 lsp::Diagnostic {
5911 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5912 severity: Some(DiagnosticSeverity::WARNING),
5913 message: "undefined variable 'A'".to_string(),
5914 source: Some("disk".to_string()),
5915 ..Default::default()
5916 },
5917 ],
5918 },
5919 );
5920
5921 buffer.next_notification(cx).await;
5922 buffer.read_with(cx, |buffer, _| {
5923 assert_eq!(
5924 buffer
5925 .snapshot()
5926 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5927 .collect::<Vec<_>>(),
5928 &[
5929 DiagnosticEntry {
5930 range: Point::new(2, 21)..Point::new(2, 22),
5931 diagnostic: Diagnostic {
5932 severity: DiagnosticSeverity::WARNING,
5933 message: "undefined variable 'A'".to_string(),
5934 is_disk_based: true,
5935 group_id: 1,
5936 is_primary: true,
5937 ..Default::default()
5938 }
5939 },
5940 DiagnosticEntry {
5941 range: Point::new(3, 9)..Point::new(3, 14),
5942 diagnostic: Diagnostic {
5943 severity: DiagnosticSeverity::ERROR,
5944 message: "undefined variable 'BB'".to_string(),
5945 is_disk_based: true,
5946 group_id: 0,
5947 is_primary: true,
5948 ..Default::default()
5949 },
5950 }
5951 ]
5952 );
5953 });
5954 }
5955
5956 #[gpui::test]
5957 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5958 cx.foreground().forbid_parking();
5959
5960 let text = concat!(
5961 "let one = ;\n", //
5962 "let two = \n",
5963 "let three = 3;\n",
5964 );
5965
5966 let fs = FakeFs::new(cx.background());
5967 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5968
5969 let project = Project::test(fs, ["/dir"], cx).await;
5970 let buffer = project
5971 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5972 .await
5973 .unwrap();
5974
5975 project.update(cx, |project, cx| {
5976 project
5977 .update_buffer_diagnostics(
5978 &buffer,
5979 vec![
5980 DiagnosticEntry {
5981 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5982 diagnostic: Diagnostic {
5983 severity: DiagnosticSeverity::ERROR,
5984 message: "syntax error 1".to_string(),
5985 ..Default::default()
5986 },
5987 },
5988 DiagnosticEntry {
5989 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5990 diagnostic: Diagnostic {
5991 severity: DiagnosticSeverity::ERROR,
5992 message: "syntax error 2".to_string(),
5993 ..Default::default()
5994 },
5995 },
5996 ],
5997 None,
5998 cx,
5999 )
6000 .unwrap();
6001 });
6002
6003 // An empty range is extended forward to include the following character.
6004 // At the end of a line, an empty range is extended backward to include
6005 // the preceding character.
6006 buffer.read_with(cx, |buffer, _| {
6007 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6008 assert_eq!(
6009 chunks
6010 .iter()
6011 .map(|(s, d)| (s.as_str(), *d))
6012 .collect::<Vec<_>>(),
6013 &[
6014 ("let one = ", None),
6015 (";", Some(DiagnosticSeverity::ERROR)),
6016 ("\nlet two =", None),
6017 (" ", Some(DiagnosticSeverity::ERROR)),
6018 ("\nlet three = 3;\n", None)
6019 ]
6020 );
6021 });
6022 }
6023
6024 #[gpui::test]
6025 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6026 cx.foreground().forbid_parking();
6027
6028 let mut language = Language::new(
6029 LanguageConfig {
6030 name: "Rust".into(),
6031 path_suffixes: vec!["rs".to_string()],
6032 ..Default::default()
6033 },
6034 Some(tree_sitter_rust::language()),
6035 );
6036 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6037
6038 let text = "
6039 fn a() {
6040 f1();
6041 }
6042 fn b() {
6043 f2();
6044 }
6045 fn c() {
6046 f3();
6047 }
6048 "
6049 .unindent();
6050
6051 let fs = FakeFs::new(cx.background());
6052 fs.insert_tree(
6053 "/dir",
6054 json!({
6055 "a.rs": text.clone(),
6056 }),
6057 )
6058 .await;
6059
6060 let project = Project::test(fs, ["/dir"], cx).await;
6061 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6062 let buffer = project
6063 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6064 .await
6065 .unwrap();
6066
6067 let mut fake_server = fake_servers.next().await.unwrap();
6068 let lsp_document_version = fake_server
6069 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6070 .await
6071 .text_document
6072 .version;
6073
6074 // Simulate editing the buffer after the language server computes some edits.
6075 buffer.update(cx, |buffer, cx| {
6076 buffer.edit(
6077 [(
6078 Point::new(0, 0)..Point::new(0, 0),
6079 "// above first function\n",
6080 )],
6081 cx,
6082 );
6083 buffer.edit(
6084 [(
6085 Point::new(2, 0)..Point::new(2, 0),
6086 " // inside first function\n",
6087 )],
6088 cx,
6089 );
6090 buffer.edit(
6091 [(
6092 Point::new(6, 4)..Point::new(6, 4),
6093 "// inside second function ",
6094 )],
6095 cx,
6096 );
6097
6098 assert_eq!(
6099 buffer.text(),
6100 "
6101 // above first function
6102 fn a() {
6103 // inside first function
6104 f1();
6105 }
6106 fn b() {
6107 // inside second function f2();
6108 }
6109 fn c() {
6110 f3();
6111 }
6112 "
6113 .unindent()
6114 );
6115 });
6116
6117 let edits = project
6118 .update(cx, |project, cx| {
6119 project.edits_from_lsp(
6120 &buffer,
6121 vec![
6122 // replace body of first function
6123 lsp::TextEdit {
6124 range: lsp::Range::new(
6125 lsp::Position::new(0, 0),
6126 lsp::Position::new(3, 0),
6127 ),
6128 new_text: "
6129 fn a() {
6130 f10();
6131 }
6132 "
6133 .unindent(),
6134 },
6135 // edit inside second function
6136 lsp::TextEdit {
6137 range: lsp::Range::new(
6138 lsp::Position::new(4, 6),
6139 lsp::Position::new(4, 6),
6140 ),
6141 new_text: "00".into(),
6142 },
6143 // edit inside third function via two distinct edits
6144 lsp::TextEdit {
6145 range: lsp::Range::new(
6146 lsp::Position::new(7, 5),
6147 lsp::Position::new(7, 5),
6148 ),
6149 new_text: "4000".into(),
6150 },
6151 lsp::TextEdit {
6152 range: lsp::Range::new(
6153 lsp::Position::new(7, 5),
6154 lsp::Position::new(7, 6),
6155 ),
6156 new_text: "".into(),
6157 },
6158 ],
6159 Some(lsp_document_version),
6160 cx,
6161 )
6162 })
6163 .await
6164 .unwrap();
6165
6166 buffer.update(cx, |buffer, cx| {
6167 for (range, new_text) in edits {
6168 buffer.edit([(range, new_text)], cx);
6169 }
6170 assert_eq!(
6171 buffer.text(),
6172 "
6173 // above first function
6174 fn a() {
6175 // inside first function
6176 f10();
6177 }
6178 fn b() {
6179 // inside second function f200();
6180 }
6181 fn c() {
6182 f4000();
6183 }
6184 "
6185 .unindent()
6186 );
6187 });
6188 }
6189
6190 #[gpui::test]
6191 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6192 cx.foreground().forbid_parking();
6193
6194 let text = "
6195 use a::b;
6196 use a::c;
6197
6198 fn f() {
6199 b();
6200 c();
6201 }
6202 "
6203 .unindent();
6204
6205 let fs = FakeFs::new(cx.background());
6206 fs.insert_tree(
6207 "/dir",
6208 json!({
6209 "a.rs": text.clone(),
6210 }),
6211 )
6212 .await;
6213
6214 let project = Project::test(fs, ["/dir"], cx).await;
6215 let buffer = project
6216 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6217 .await
6218 .unwrap();
6219
6220 // Simulate the language server sending us a small edit in the form of a very large diff.
6221 // Rust-analyzer does this when performing a merge-imports code action.
6222 let edits = project
6223 .update(cx, |project, cx| {
6224 project.edits_from_lsp(
6225 &buffer,
6226 [
6227 // Replace the first use statement without editing the semicolon.
6228 lsp::TextEdit {
6229 range: lsp::Range::new(
6230 lsp::Position::new(0, 4),
6231 lsp::Position::new(0, 8),
6232 ),
6233 new_text: "a::{b, c}".into(),
6234 },
6235 // Reinsert the remainder of the file between the semicolon and the final
6236 // newline of the file.
6237 lsp::TextEdit {
6238 range: lsp::Range::new(
6239 lsp::Position::new(0, 9),
6240 lsp::Position::new(0, 9),
6241 ),
6242 new_text: "\n\n".into(),
6243 },
6244 lsp::TextEdit {
6245 range: lsp::Range::new(
6246 lsp::Position::new(0, 9),
6247 lsp::Position::new(0, 9),
6248 ),
6249 new_text: "
6250 fn f() {
6251 b();
6252 c();
6253 }"
6254 .unindent(),
6255 },
6256 // Delete everything after the first newline of the file.
6257 lsp::TextEdit {
6258 range: lsp::Range::new(
6259 lsp::Position::new(1, 0),
6260 lsp::Position::new(7, 0),
6261 ),
6262 new_text: "".into(),
6263 },
6264 ],
6265 None,
6266 cx,
6267 )
6268 })
6269 .await
6270 .unwrap();
6271
6272 buffer.update(cx, |buffer, cx| {
6273 let edits = edits
6274 .into_iter()
6275 .map(|(range, text)| {
6276 (
6277 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6278 text,
6279 )
6280 })
6281 .collect::<Vec<_>>();
6282
6283 assert_eq!(
6284 edits,
6285 [
6286 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6287 (Point::new(1, 0)..Point::new(2, 0), "".into())
6288 ]
6289 );
6290
6291 for (range, new_text) in edits {
6292 buffer.edit([(range, new_text)], cx);
6293 }
6294 assert_eq!(
6295 buffer.text(),
6296 "
6297 use a::{b, c};
6298
6299 fn f() {
6300 b();
6301 c();
6302 }
6303 "
6304 .unindent()
6305 );
6306 });
6307 }
6308
6309 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6310 buffer: &Buffer,
6311 range: Range<T>,
6312 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6313 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6314 for chunk in buffer.snapshot().chunks(range, true) {
6315 if chunks.last().map_or(false, |prev_chunk| {
6316 prev_chunk.1 == chunk.diagnostic_severity
6317 }) {
6318 chunks.last_mut().unwrap().0.push_str(chunk.text);
6319 } else {
6320 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6321 }
6322 }
6323 chunks
6324 }
6325
6326 #[gpui::test]
6327 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6328 let dir = temp_tree(json!({
6329 "root": {
6330 "dir1": {},
6331 "dir2": {
6332 "dir3": {}
6333 }
6334 }
6335 }));
6336
6337 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6338 let cancel_flag = Default::default();
6339 let results = project
6340 .read_with(cx, |project, cx| {
6341 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6342 })
6343 .await;
6344
6345 assert!(results.is_empty());
6346 }
6347
6348 #[gpui::test]
6349 async fn test_definition(cx: &mut gpui::TestAppContext) {
6350 let mut language = Language::new(
6351 LanguageConfig {
6352 name: "Rust".into(),
6353 path_suffixes: vec!["rs".to_string()],
6354 ..Default::default()
6355 },
6356 Some(tree_sitter_rust::language()),
6357 );
6358 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6359
6360 let fs = FakeFs::new(cx.background());
6361 fs.insert_tree(
6362 "/dir",
6363 json!({
6364 "a.rs": "const fn a() { A }",
6365 "b.rs": "const y: i32 = crate::a()",
6366 }),
6367 )
6368 .await;
6369
6370 let project = Project::test(fs, ["/dir/b.rs"], cx).await;
6371 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6372
6373 let buffer = project
6374 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6375 .await
6376 .unwrap();
6377
6378 let fake_server = fake_servers.next().await.unwrap();
6379 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6380 let params = params.text_document_position_params;
6381 assert_eq!(
6382 params.text_document.uri.to_file_path().unwrap(),
6383 Path::new("/dir/b.rs"),
6384 );
6385 assert_eq!(params.position, lsp::Position::new(0, 22));
6386
6387 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6388 lsp::Location::new(
6389 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6390 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6391 ),
6392 )))
6393 });
6394
6395 let mut definitions = project
6396 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6397 .await
6398 .unwrap();
6399
6400 assert_eq!(definitions.len(), 1);
6401 let definition = definitions.pop().unwrap();
6402 cx.update(|cx| {
6403 let target_buffer = definition.buffer.read(cx);
6404 assert_eq!(
6405 target_buffer
6406 .file()
6407 .unwrap()
6408 .as_local()
6409 .unwrap()
6410 .abs_path(cx),
6411 Path::new("/dir/a.rs"),
6412 );
6413 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6414 assert_eq!(
6415 list_worktrees(&project, cx),
6416 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6417 );
6418
6419 drop(definition);
6420 });
6421 cx.read(|cx| {
6422 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6423 });
6424
6425 fn list_worktrees<'a>(
6426 project: &'a ModelHandle<Project>,
6427 cx: &'a AppContext,
6428 ) -> Vec<(&'a Path, bool)> {
6429 project
6430 .read(cx)
6431 .worktrees(cx)
6432 .map(|worktree| {
6433 let worktree = worktree.read(cx);
6434 (
6435 worktree.as_local().unwrap().abs_path().as_ref(),
6436 worktree.is_visible(),
6437 )
6438 })
6439 .collect::<Vec<_>>()
6440 }
6441 }
6442
6443 #[gpui::test]
6444 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6445 let mut language = Language::new(
6446 LanguageConfig {
6447 name: "TypeScript".into(),
6448 path_suffixes: vec!["ts".to_string()],
6449 ..Default::default()
6450 },
6451 Some(tree_sitter_typescript::language_typescript()),
6452 );
6453 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6454
6455 let fs = FakeFs::new(cx.background());
6456 fs.insert_tree(
6457 "/dir",
6458 json!({
6459 "a.ts": "",
6460 }),
6461 )
6462 .await;
6463
6464 let project = Project::test(fs, ["/dir"], cx).await;
6465 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6466 let buffer = project
6467 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6468 .await
6469 .unwrap();
6470
6471 let fake_server = fake_language_servers.next().await.unwrap();
6472
6473 let text = "let a = b.fqn";
6474 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6475 let completions = project.update(cx, |project, cx| {
6476 project.completions(&buffer, text.len(), cx)
6477 });
6478
6479 fake_server
6480 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6481 Ok(Some(lsp::CompletionResponse::Array(vec![
6482 lsp::CompletionItem {
6483 label: "fullyQualifiedName?".into(),
6484 insert_text: Some("fullyQualifiedName".into()),
6485 ..Default::default()
6486 },
6487 ])))
6488 })
6489 .next()
6490 .await;
6491 let completions = completions.await.unwrap();
6492 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6493 assert_eq!(completions.len(), 1);
6494 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6495 assert_eq!(
6496 completions[0].old_range.to_offset(&snapshot),
6497 text.len() - 3..text.len()
6498 );
6499 }
6500
6501 #[gpui::test(iterations = 10)]
6502 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6503 let mut language = Language::new(
6504 LanguageConfig {
6505 name: "TypeScript".into(),
6506 path_suffixes: vec!["ts".to_string()],
6507 ..Default::default()
6508 },
6509 None,
6510 );
6511 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6512
6513 let fs = FakeFs::new(cx.background());
6514 fs.insert_tree(
6515 "/dir",
6516 json!({
6517 "a.ts": "a",
6518 }),
6519 )
6520 .await;
6521
6522 let project = Project::test(fs, ["/dir"], cx).await;
6523 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6524 let buffer = project
6525 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6526 .await
6527 .unwrap();
6528
6529 let fake_server = fake_language_servers.next().await.unwrap();
6530
6531 // Language server returns code actions that contain commands, and not edits.
6532 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6533 fake_server
6534 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6535 Ok(Some(vec![
6536 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6537 title: "The code action".into(),
6538 command: Some(lsp::Command {
6539 title: "The command".into(),
6540 command: "_the/command".into(),
6541 arguments: Some(vec![json!("the-argument")]),
6542 }),
6543 ..Default::default()
6544 }),
6545 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6546 title: "two".into(),
6547 ..Default::default()
6548 }),
6549 ]))
6550 })
6551 .next()
6552 .await;
6553
6554 let action = actions.await.unwrap()[0].clone();
6555 let apply = project.update(cx, |project, cx| {
6556 project.apply_code_action(buffer.clone(), action, true, cx)
6557 });
6558
6559 // Resolving the code action does not populate its edits. In absence of
6560 // edits, we must execute the given command.
6561 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6562 |action, _| async move { Ok(action) },
6563 );
6564
6565 // While executing the command, the language server sends the editor
6566 // a `workspaceEdit` request.
6567 fake_server
6568 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6569 let fake = fake_server.clone();
6570 move |params, _| {
6571 assert_eq!(params.command, "_the/command");
6572 let fake = fake.clone();
6573 async move {
6574 fake.server
6575 .request::<lsp::request::ApplyWorkspaceEdit>(
6576 lsp::ApplyWorkspaceEditParams {
6577 label: None,
6578 edit: lsp::WorkspaceEdit {
6579 changes: Some(
6580 [(
6581 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6582 vec![lsp::TextEdit {
6583 range: lsp::Range::new(
6584 lsp::Position::new(0, 0),
6585 lsp::Position::new(0, 0),
6586 ),
6587 new_text: "X".into(),
6588 }],
6589 )]
6590 .into_iter()
6591 .collect(),
6592 ),
6593 ..Default::default()
6594 },
6595 },
6596 )
6597 .await
6598 .unwrap();
6599 Ok(Some(json!(null)))
6600 }
6601 }
6602 })
6603 .next()
6604 .await;
6605
6606 // Applying the code action returns a project transaction containing the edits
6607 // sent by the language server in its `workspaceEdit` request.
6608 let transaction = apply.await.unwrap();
6609 assert!(transaction.0.contains_key(&buffer));
6610 buffer.update(cx, |buffer, cx| {
6611 assert_eq!(buffer.text(), "Xa");
6612 buffer.undo(cx);
6613 assert_eq!(buffer.text(), "a");
6614 });
6615 }
6616
6617 #[gpui::test]
6618 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6619 let fs = FakeFs::new(cx.background());
6620 fs.insert_tree(
6621 "/dir",
6622 json!({
6623 "file1": "the old contents",
6624 }),
6625 )
6626 .await;
6627
6628 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6629 let buffer = project
6630 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6631 .await
6632 .unwrap();
6633 buffer
6634 .update(cx, |buffer, cx| {
6635 assert_eq!(buffer.text(), "the old contents");
6636 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6637 buffer.save(cx)
6638 })
6639 .await
6640 .unwrap();
6641
6642 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6643 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6644 }
6645
6646 #[gpui::test]
6647 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6648 let fs = FakeFs::new(cx.background());
6649 fs.insert_tree(
6650 "/dir",
6651 json!({
6652 "file1": "the old contents",
6653 }),
6654 )
6655 .await;
6656
6657 let project = Project::test(fs.clone(), ["/dir/file1"], cx).await;
6658 let buffer = project
6659 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6660 .await
6661 .unwrap();
6662 buffer
6663 .update(cx, |buffer, cx| {
6664 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6665 buffer.save(cx)
6666 })
6667 .await
6668 .unwrap();
6669
6670 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6671 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6672 }
6673
6674 #[gpui::test]
6675 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6676 let fs = FakeFs::new(cx.background());
6677 fs.insert_tree("/dir", json!({})).await;
6678
6679 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6680 let buffer = project.update(cx, |project, cx| {
6681 project.create_buffer("", None, cx).unwrap()
6682 });
6683 buffer.update(cx, |buffer, cx| {
6684 buffer.edit([(0..0, "abc")], cx);
6685 assert!(buffer.is_dirty());
6686 assert!(!buffer.has_conflict());
6687 });
6688 project
6689 .update(cx, |project, cx| {
6690 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6691 })
6692 .await
6693 .unwrap();
6694 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6695 buffer.read_with(cx, |buffer, cx| {
6696 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6697 assert!(!buffer.is_dirty());
6698 assert!(!buffer.has_conflict());
6699 });
6700
6701 let opened_buffer = project
6702 .update(cx, |project, cx| {
6703 project.open_local_buffer("/dir/file1", cx)
6704 })
6705 .await
6706 .unwrap();
6707 assert_eq!(opened_buffer, buffer);
6708 }
6709
6710 #[gpui::test(retries = 5)]
6711 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6712 let dir = temp_tree(json!({
6713 "a": {
6714 "file1": "",
6715 "file2": "",
6716 "file3": "",
6717 },
6718 "b": {
6719 "c": {
6720 "file4": "",
6721 "file5": "",
6722 }
6723 }
6724 }));
6725
6726 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6727 let rpc = project.read_with(cx, |p, _| p.client.clone());
6728
6729 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6730 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
6731 async move { buffer.await.unwrap() }
6732 };
6733 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6734 project.read_with(cx, |project, cx| {
6735 let tree = project.worktrees(cx).next().unwrap();
6736 tree.read(cx)
6737 .entry_for_path(path)
6738 .expect(&format!("no entry for path {}", path))
6739 .id
6740 })
6741 };
6742
6743 let buffer2 = buffer_for_path("a/file2", cx).await;
6744 let buffer3 = buffer_for_path("a/file3", cx).await;
6745 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6746 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6747
6748 let file2_id = id_for_path("a/file2", &cx);
6749 let file3_id = id_for_path("a/file3", &cx);
6750 let file4_id = id_for_path("b/c/file4", &cx);
6751
6752 // Create a remote copy of this worktree.
6753 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
6754 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6755 let (remote, load_task) = cx.update(|cx| {
6756 Worktree::remote(
6757 1,
6758 1,
6759 initial_snapshot.to_proto(&Default::default(), true),
6760 rpc.clone(),
6761 cx,
6762 )
6763 });
6764 // tree
6765 load_task.await;
6766
6767 cx.read(|cx| {
6768 assert!(!buffer2.read(cx).is_dirty());
6769 assert!(!buffer3.read(cx).is_dirty());
6770 assert!(!buffer4.read(cx).is_dirty());
6771 assert!(!buffer5.read(cx).is_dirty());
6772 });
6773
6774 // Rename and delete files and directories.
6775 tree.flush_fs_events(&cx).await;
6776 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6777 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6778 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6779 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6780 tree.flush_fs_events(&cx).await;
6781
6782 let expected_paths = vec![
6783 "a",
6784 "a/file1",
6785 "a/file2.new",
6786 "b",
6787 "d",
6788 "d/file3",
6789 "d/file4",
6790 ];
6791
6792 cx.read(|app| {
6793 assert_eq!(
6794 tree.read(app)
6795 .paths()
6796 .map(|p| p.to_str().unwrap())
6797 .collect::<Vec<_>>(),
6798 expected_paths
6799 );
6800
6801 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6802 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6803 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6804
6805 assert_eq!(
6806 buffer2.read(app).file().unwrap().path().as_ref(),
6807 Path::new("a/file2.new")
6808 );
6809 assert_eq!(
6810 buffer3.read(app).file().unwrap().path().as_ref(),
6811 Path::new("d/file3")
6812 );
6813 assert_eq!(
6814 buffer4.read(app).file().unwrap().path().as_ref(),
6815 Path::new("d/file4")
6816 );
6817 assert_eq!(
6818 buffer5.read(app).file().unwrap().path().as_ref(),
6819 Path::new("b/c/file5")
6820 );
6821
6822 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6823 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6824 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6825 assert!(buffer5.read(app).file().unwrap().is_deleted());
6826 });
6827
6828 // Update the remote worktree. Check that it becomes consistent with the
6829 // local worktree.
6830 remote.update(cx, |remote, cx| {
6831 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6832 &initial_snapshot,
6833 1,
6834 1,
6835 true,
6836 );
6837 remote
6838 .as_remote_mut()
6839 .unwrap()
6840 .snapshot
6841 .apply_remote_update(update_message)
6842 .unwrap();
6843
6844 assert_eq!(
6845 remote
6846 .paths()
6847 .map(|p| p.to_str().unwrap())
6848 .collect::<Vec<_>>(),
6849 expected_paths
6850 );
6851 });
6852 }
6853
6854 #[gpui::test]
6855 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6856 let fs = FakeFs::new(cx.background());
6857 fs.insert_tree(
6858 "/dir",
6859 json!({
6860 "a.txt": "a-contents",
6861 "b.txt": "b-contents",
6862 }),
6863 )
6864 .await;
6865
6866 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6867
6868 // Spawn multiple tasks to open paths, repeating some paths.
6869 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6870 (
6871 p.open_local_buffer("/dir/a.txt", cx),
6872 p.open_local_buffer("/dir/b.txt", cx),
6873 p.open_local_buffer("/dir/a.txt", cx),
6874 )
6875 });
6876
6877 let buffer_a_1 = buffer_a_1.await.unwrap();
6878 let buffer_a_2 = buffer_a_2.await.unwrap();
6879 let buffer_b = buffer_b.await.unwrap();
6880 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6881 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6882
6883 // There is only one buffer per path.
6884 let buffer_a_id = buffer_a_1.id();
6885 assert_eq!(buffer_a_2.id(), buffer_a_id);
6886
6887 // Open the same path again while it is still open.
6888 drop(buffer_a_1);
6889 let buffer_a_3 = project
6890 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
6891 .await
6892 .unwrap();
6893
6894 // There's still only one buffer per path.
6895 assert_eq!(buffer_a_3.id(), buffer_a_id);
6896 }
6897
6898 #[gpui::test]
6899 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6900 let fs = FakeFs::new(cx.background());
6901 fs.insert_tree(
6902 "/dir",
6903 json!({
6904 "file1": "abc",
6905 "file2": "def",
6906 "file3": "ghi",
6907 }),
6908 )
6909 .await;
6910
6911 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6912
6913 let buffer1 = project
6914 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6915 .await
6916 .unwrap();
6917 let events = Rc::new(RefCell::new(Vec::new()));
6918
6919 // initially, the buffer isn't dirty.
6920 buffer1.update(cx, |buffer, cx| {
6921 cx.subscribe(&buffer1, {
6922 let events = events.clone();
6923 move |_, _, event, _| match event {
6924 BufferEvent::Operation(_) => {}
6925 _ => events.borrow_mut().push(event.clone()),
6926 }
6927 })
6928 .detach();
6929
6930 assert!(!buffer.is_dirty());
6931 assert!(events.borrow().is_empty());
6932
6933 buffer.edit([(1..2, "")], cx);
6934 });
6935
6936 // after the first edit, the buffer is dirty, and emits a dirtied event.
6937 buffer1.update(cx, |buffer, cx| {
6938 assert!(buffer.text() == "ac");
6939 assert!(buffer.is_dirty());
6940 assert_eq!(
6941 *events.borrow(),
6942 &[language::Event::Edited, language::Event::Dirtied]
6943 );
6944 events.borrow_mut().clear();
6945 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6946 });
6947
6948 // after saving, the buffer is not dirty, and emits a saved event.
6949 buffer1.update(cx, |buffer, cx| {
6950 assert!(!buffer.is_dirty());
6951 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6952 events.borrow_mut().clear();
6953
6954 buffer.edit([(1..1, "B")], cx);
6955 buffer.edit([(2..2, "D")], cx);
6956 });
6957
6958 // after editing again, the buffer is dirty, and emits another dirty event.
6959 buffer1.update(cx, |buffer, cx| {
6960 assert!(buffer.text() == "aBDc");
6961 assert!(buffer.is_dirty());
6962 assert_eq!(
6963 *events.borrow(),
6964 &[
6965 language::Event::Edited,
6966 language::Event::Dirtied,
6967 language::Event::Edited,
6968 ],
6969 );
6970 events.borrow_mut().clear();
6971
6972 // TODO - currently, after restoring the buffer to its
6973 // previously-saved state, the is still considered dirty.
6974 buffer.edit([(1..3, "")], cx);
6975 assert!(buffer.text() == "ac");
6976 assert!(buffer.is_dirty());
6977 });
6978
6979 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6980
6981 // When a file is deleted, the buffer is considered dirty.
6982 let events = Rc::new(RefCell::new(Vec::new()));
6983 let buffer2 = project
6984 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
6985 .await
6986 .unwrap();
6987 buffer2.update(cx, |_, cx| {
6988 cx.subscribe(&buffer2, {
6989 let events = events.clone();
6990 move |_, _, event, _| events.borrow_mut().push(event.clone())
6991 })
6992 .detach();
6993 });
6994
6995 fs.remove_file("/dir/file2".as_ref(), Default::default())
6996 .await
6997 .unwrap();
6998 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6999 assert_eq!(
7000 *events.borrow(),
7001 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7002 );
7003
7004 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7005 let events = Rc::new(RefCell::new(Vec::new()));
7006 let buffer3 = project
7007 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7008 .await
7009 .unwrap();
7010 buffer3.update(cx, |_, cx| {
7011 cx.subscribe(&buffer3, {
7012 let events = events.clone();
7013 move |_, _, event, _| events.borrow_mut().push(event.clone())
7014 })
7015 .detach();
7016 });
7017
7018 buffer3.update(cx, |buffer, cx| {
7019 buffer.edit([(0..0, "x")], cx);
7020 });
7021 events.borrow_mut().clear();
7022 fs.remove_file("/dir/file3".as_ref(), Default::default())
7023 .await
7024 .unwrap();
7025 buffer3
7026 .condition(&cx, |_, _| !events.borrow().is_empty())
7027 .await;
7028 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7029 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7030 }
7031
7032 #[gpui::test]
7033 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7034 let initial_contents = "aaa\nbbbbb\nc\n";
7035 let fs = FakeFs::new(cx.background());
7036 fs.insert_tree(
7037 "/dir",
7038 json!({
7039 "the-file": initial_contents,
7040 }),
7041 )
7042 .await;
7043 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7044 let buffer = project
7045 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7046 .await
7047 .unwrap();
7048
7049 let anchors = (0..3)
7050 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7051 .collect::<Vec<_>>();
7052
7053 // Change the file on disk, adding two new lines of text, and removing
7054 // one line.
7055 buffer.read_with(cx, |buffer, _| {
7056 assert!(!buffer.is_dirty());
7057 assert!(!buffer.has_conflict());
7058 });
7059 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7060 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7061 .await
7062 .unwrap();
7063
7064 // Because the buffer was not modified, it is reloaded from disk. Its
7065 // contents are edited according to the diff between the old and new
7066 // file contents.
7067 buffer
7068 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7069 .await;
7070
7071 buffer.update(cx, |buffer, _| {
7072 assert_eq!(buffer.text(), new_contents);
7073 assert!(!buffer.is_dirty());
7074 assert!(!buffer.has_conflict());
7075
7076 let anchor_positions = anchors
7077 .iter()
7078 .map(|anchor| anchor.to_point(&*buffer))
7079 .collect::<Vec<_>>();
7080 assert_eq!(
7081 anchor_positions,
7082 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7083 );
7084 });
7085
7086 // Modify the buffer
7087 buffer.update(cx, |buffer, cx| {
7088 buffer.edit([(0..0, " ")], cx);
7089 assert!(buffer.is_dirty());
7090 assert!(!buffer.has_conflict());
7091 });
7092
7093 // Change the file on disk again, adding blank lines to the beginning.
7094 fs.save(
7095 "/dir/the-file".as_ref(),
7096 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7097 )
7098 .await
7099 .unwrap();
7100
7101 // Because the buffer is modified, it doesn't reload from disk, but is
7102 // marked as having a conflict.
7103 buffer
7104 .condition(&cx, |buffer, _| buffer.has_conflict())
7105 .await;
7106 }
7107
7108 #[gpui::test]
7109 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7110 cx.foreground().forbid_parking();
7111
7112 let fs = FakeFs::new(cx.background());
7113 fs.insert_tree(
7114 "/the-dir",
7115 json!({
7116 "a.rs": "
7117 fn foo(mut v: Vec<usize>) {
7118 for x in &v {
7119 v.push(1);
7120 }
7121 }
7122 "
7123 .unindent(),
7124 }),
7125 )
7126 .await;
7127
7128 let project = Project::test(fs.clone(), ["/the-dir"], cx).await;
7129 let buffer = project
7130 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7131 .await
7132 .unwrap();
7133
7134 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7135 let message = lsp::PublishDiagnosticsParams {
7136 uri: buffer_uri.clone(),
7137 diagnostics: vec![
7138 lsp::Diagnostic {
7139 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7140 severity: Some(DiagnosticSeverity::WARNING),
7141 message: "error 1".to_string(),
7142 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7143 location: lsp::Location {
7144 uri: buffer_uri.clone(),
7145 range: lsp::Range::new(
7146 lsp::Position::new(1, 8),
7147 lsp::Position::new(1, 9),
7148 ),
7149 },
7150 message: "error 1 hint 1".to_string(),
7151 }]),
7152 ..Default::default()
7153 },
7154 lsp::Diagnostic {
7155 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7156 severity: Some(DiagnosticSeverity::HINT),
7157 message: "error 1 hint 1".to_string(),
7158 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7159 location: lsp::Location {
7160 uri: buffer_uri.clone(),
7161 range: lsp::Range::new(
7162 lsp::Position::new(1, 8),
7163 lsp::Position::new(1, 9),
7164 ),
7165 },
7166 message: "original diagnostic".to_string(),
7167 }]),
7168 ..Default::default()
7169 },
7170 lsp::Diagnostic {
7171 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7172 severity: Some(DiagnosticSeverity::ERROR),
7173 message: "error 2".to_string(),
7174 related_information: Some(vec![
7175 lsp::DiagnosticRelatedInformation {
7176 location: lsp::Location {
7177 uri: buffer_uri.clone(),
7178 range: lsp::Range::new(
7179 lsp::Position::new(1, 13),
7180 lsp::Position::new(1, 15),
7181 ),
7182 },
7183 message: "error 2 hint 1".to_string(),
7184 },
7185 lsp::DiagnosticRelatedInformation {
7186 location: lsp::Location {
7187 uri: buffer_uri.clone(),
7188 range: lsp::Range::new(
7189 lsp::Position::new(1, 13),
7190 lsp::Position::new(1, 15),
7191 ),
7192 },
7193 message: "error 2 hint 2".to_string(),
7194 },
7195 ]),
7196 ..Default::default()
7197 },
7198 lsp::Diagnostic {
7199 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7200 severity: Some(DiagnosticSeverity::HINT),
7201 message: "error 2 hint 1".to_string(),
7202 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7203 location: lsp::Location {
7204 uri: buffer_uri.clone(),
7205 range: lsp::Range::new(
7206 lsp::Position::new(2, 8),
7207 lsp::Position::new(2, 17),
7208 ),
7209 },
7210 message: "original diagnostic".to_string(),
7211 }]),
7212 ..Default::default()
7213 },
7214 lsp::Diagnostic {
7215 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7216 severity: Some(DiagnosticSeverity::HINT),
7217 message: "error 2 hint 2".to_string(),
7218 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7219 location: lsp::Location {
7220 uri: buffer_uri.clone(),
7221 range: lsp::Range::new(
7222 lsp::Position::new(2, 8),
7223 lsp::Position::new(2, 17),
7224 ),
7225 },
7226 message: "original diagnostic".to_string(),
7227 }]),
7228 ..Default::default()
7229 },
7230 ],
7231 version: None,
7232 };
7233
7234 project
7235 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7236 .unwrap();
7237 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7238
7239 assert_eq!(
7240 buffer
7241 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7242 .collect::<Vec<_>>(),
7243 &[
7244 DiagnosticEntry {
7245 range: Point::new(1, 8)..Point::new(1, 9),
7246 diagnostic: Diagnostic {
7247 severity: DiagnosticSeverity::WARNING,
7248 message: "error 1".to_string(),
7249 group_id: 0,
7250 is_primary: true,
7251 ..Default::default()
7252 }
7253 },
7254 DiagnosticEntry {
7255 range: Point::new(1, 8)..Point::new(1, 9),
7256 diagnostic: Diagnostic {
7257 severity: DiagnosticSeverity::HINT,
7258 message: "error 1 hint 1".to_string(),
7259 group_id: 0,
7260 is_primary: false,
7261 ..Default::default()
7262 }
7263 },
7264 DiagnosticEntry {
7265 range: Point::new(1, 13)..Point::new(1, 15),
7266 diagnostic: Diagnostic {
7267 severity: DiagnosticSeverity::HINT,
7268 message: "error 2 hint 1".to_string(),
7269 group_id: 1,
7270 is_primary: false,
7271 ..Default::default()
7272 }
7273 },
7274 DiagnosticEntry {
7275 range: Point::new(1, 13)..Point::new(1, 15),
7276 diagnostic: Diagnostic {
7277 severity: DiagnosticSeverity::HINT,
7278 message: "error 2 hint 2".to_string(),
7279 group_id: 1,
7280 is_primary: false,
7281 ..Default::default()
7282 }
7283 },
7284 DiagnosticEntry {
7285 range: Point::new(2, 8)..Point::new(2, 17),
7286 diagnostic: Diagnostic {
7287 severity: DiagnosticSeverity::ERROR,
7288 message: "error 2".to_string(),
7289 group_id: 1,
7290 is_primary: true,
7291 ..Default::default()
7292 }
7293 }
7294 ]
7295 );
7296
7297 assert_eq!(
7298 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7299 &[
7300 DiagnosticEntry {
7301 range: Point::new(1, 8)..Point::new(1, 9),
7302 diagnostic: Diagnostic {
7303 severity: DiagnosticSeverity::WARNING,
7304 message: "error 1".to_string(),
7305 group_id: 0,
7306 is_primary: true,
7307 ..Default::default()
7308 }
7309 },
7310 DiagnosticEntry {
7311 range: Point::new(1, 8)..Point::new(1, 9),
7312 diagnostic: Diagnostic {
7313 severity: DiagnosticSeverity::HINT,
7314 message: "error 1 hint 1".to_string(),
7315 group_id: 0,
7316 is_primary: false,
7317 ..Default::default()
7318 }
7319 },
7320 ]
7321 );
7322 assert_eq!(
7323 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7324 &[
7325 DiagnosticEntry {
7326 range: Point::new(1, 13)..Point::new(1, 15),
7327 diagnostic: Diagnostic {
7328 severity: DiagnosticSeverity::HINT,
7329 message: "error 2 hint 1".to_string(),
7330 group_id: 1,
7331 is_primary: false,
7332 ..Default::default()
7333 }
7334 },
7335 DiagnosticEntry {
7336 range: Point::new(1, 13)..Point::new(1, 15),
7337 diagnostic: Diagnostic {
7338 severity: DiagnosticSeverity::HINT,
7339 message: "error 2 hint 2".to_string(),
7340 group_id: 1,
7341 is_primary: false,
7342 ..Default::default()
7343 }
7344 },
7345 DiagnosticEntry {
7346 range: Point::new(2, 8)..Point::new(2, 17),
7347 diagnostic: Diagnostic {
7348 severity: DiagnosticSeverity::ERROR,
7349 message: "error 2".to_string(),
7350 group_id: 1,
7351 is_primary: true,
7352 ..Default::default()
7353 }
7354 }
7355 ]
7356 );
7357 }
7358
7359 #[gpui::test]
7360 async fn test_rename(cx: &mut gpui::TestAppContext) {
7361 cx.foreground().forbid_parking();
7362
7363 let mut language = Language::new(
7364 LanguageConfig {
7365 name: "Rust".into(),
7366 path_suffixes: vec!["rs".to_string()],
7367 ..Default::default()
7368 },
7369 Some(tree_sitter_rust::language()),
7370 );
7371 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7372 capabilities: lsp::ServerCapabilities {
7373 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7374 prepare_provider: Some(true),
7375 work_done_progress_options: Default::default(),
7376 })),
7377 ..Default::default()
7378 },
7379 ..Default::default()
7380 });
7381
7382 let fs = FakeFs::new(cx.background());
7383 fs.insert_tree(
7384 "/dir",
7385 json!({
7386 "one.rs": "const ONE: usize = 1;",
7387 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7388 }),
7389 )
7390 .await;
7391
7392 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7393 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7394 let buffer = project
7395 .update(cx, |project, cx| {
7396 project.open_local_buffer("/dir/one.rs", cx)
7397 })
7398 .await
7399 .unwrap();
7400
7401 let fake_server = fake_servers.next().await.unwrap();
7402
7403 let response = project.update(cx, |project, cx| {
7404 project.prepare_rename(buffer.clone(), 7, cx)
7405 });
7406 fake_server
7407 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7408 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7409 assert_eq!(params.position, lsp::Position::new(0, 7));
7410 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7411 lsp::Position::new(0, 6),
7412 lsp::Position::new(0, 9),
7413 ))))
7414 })
7415 .next()
7416 .await
7417 .unwrap();
7418 let range = response.await.unwrap().unwrap();
7419 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7420 assert_eq!(range, 6..9);
7421
7422 let response = project.update(cx, |project, cx| {
7423 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7424 });
7425 fake_server
7426 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7427 assert_eq!(
7428 params.text_document_position.text_document.uri.as_str(),
7429 "file:///dir/one.rs"
7430 );
7431 assert_eq!(
7432 params.text_document_position.position,
7433 lsp::Position::new(0, 7)
7434 );
7435 assert_eq!(params.new_name, "THREE");
7436 Ok(Some(lsp::WorkspaceEdit {
7437 changes: Some(
7438 [
7439 (
7440 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7441 vec![lsp::TextEdit::new(
7442 lsp::Range::new(
7443 lsp::Position::new(0, 6),
7444 lsp::Position::new(0, 9),
7445 ),
7446 "THREE".to_string(),
7447 )],
7448 ),
7449 (
7450 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7451 vec![
7452 lsp::TextEdit::new(
7453 lsp::Range::new(
7454 lsp::Position::new(0, 24),
7455 lsp::Position::new(0, 27),
7456 ),
7457 "THREE".to_string(),
7458 ),
7459 lsp::TextEdit::new(
7460 lsp::Range::new(
7461 lsp::Position::new(0, 35),
7462 lsp::Position::new(0, 38),
7463 ),
7464 "THREE".to_string(),
7465 ),
7466 ],
7467 ),
7468 ]
7469 .into_iter()
7470 .collect(),
7471 ),
7472 ..Default::default()
7473 }))
7474 })
7475 .next()
7476 .await
7477 .unwrap();
7478 let mut transaction = response.await.unwrap().0;
7479 assert_eq!(transaction.len(), 2);
7480 assert_eq!(
7481 transaction
7482 .remove_entry(&buffer)
7483 .unwrap()
7484 .0
7485 .read_with(cx, |buffer, _| buffer.text()),
7486 "const THREE: usize = 1;"
7487 );
7488 assert_eq!(
7489 transaction
7490 .into_keys()
7491 .next()
7492 .unwrap()
7493 .read_with(cx, |buffer, _| buffer.text()),
7494 "const TWO: usize = one::THREE + one::THREE;"
7495 );
7496 }
7497
7498 #[gpui::test]
7499 async fn test_search(cx: &mut gpui::TestAppContext) {
7500 let fs = FakeFs::new(cx.background());
7501 fs.insert_tree(
7502 "/dir",
7503 json!({
7504 "one.rs": "const ONE: usize = 1;",
7505 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7506 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7507 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7508 }),
7509 )
7510 .await;
7511 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7512 assert_eq!(
7513 search(&project, SearchQuery::text("TWO", false, true), cx)
7514 .await
7515 .unwrap(),
7516 HashMap::from_iter([
7517 ("two.rs".to_string(), vec![6..9]),
7518 ("three.rs".to_string(), vec![37..40])
7519 ])
7520 );
7521
7522 let buffer_4 = project
7523 .update(cx, |project, cx| {
7524 project.open_local_buffer("/dir/four.rs", cx)
7525 })
7526 .await
7527 .unwrap();
7528 buffer_4.update(cx, |buffer, cx| {
7529 let text = "two::TWO";
7530 buffer.edit([(20..28, text), (31..43, text)], cx);
7531 });
7532
7533 assert_eq!(
7534 search(&project, SearchQuery::text("TWO", false, true), cx)
7535 .await
7536 .unwrap(),
7537 HashMap::from_iter([
7538 ("two.rs".to_string(), vec![6..9]),
7539 ("three.rs".to_string(), vec![37..40]),
7540 ("four.rs".to_string(), vec![25..28, 36..39])
7541 ])
7542 );
7543
7544 async fn search(
7545 project: &ModelHandle<Project>,
7546 query: SearchQuery,
7547 cx: &mut gpui::TestAppContext,
7548 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7549 let results = project
7550 .update(cx, |project, cx| project.search(query, cx))
7551 .await?;
7552
7553 Ok(results
7554 .into_iter()
7555 .map(|(buffer, ranges)| {
7556 buffer.read_with(cx, |buffer, _| {
7557 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7558 let ranges = ranges
7559 .into_iter()
7560 .map(|range| range.to_offset(buffer))
7561 .collect::<Vec<_>>();
7562 (path, ranges)
7563 })
7564 })
7565 .collect())
7566 }
7567 }
7568}