1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 hash::Hash,
40 mem,
41 ops::Range,
42 path::{Component, Path, PathBuf},
43 rc::Rc,
44 sync::{
45 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
46 Arc,
47 },
48 time::Instant,
49};
50use util::{post_inc, ResultExt, TryFutureExt as _};
51
52pub use fs::*;
53pub use worktree::*;
54
55pub trait Item: Entity {
56 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
57}
58
59pub struct Project {
60 worktrees: Vec<WorktreeHandle>,
61 active_entry: Option<ProjectEntryId>,
62 languages: Arc<LanguageRegistry>,
63 language_servers:
64 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
65 started_language_servers:
66 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
67 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
68 language_server_settings: Arc<Mutex<serde_json::Value>>,
69 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
70 next_language_server_id: usize,
71 client: Arc<client::Client>,
72 next_entry_id: Arc<AtomicUsize>,
73 user_store: ModelHandle<UserStore>,
74 fs: Arc<dyn Fs>,
75 client_state: ProjectClientState,
76 collaborators: HashMap<PeerId, Collaborator>,
77 subscriptions: Vec<client::Subscription>,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136#[derive(Serialize)]
137pub struct LanguageServerStatus {
138 pub name: String,
139 pub pending_work: BTreeMap<String, LanguageServerProgress>,
140 pub pending_diagnostic_updates: isize,
141}
142
143#[derive(Clone, Debug, Serialize)]
144pub struct LanguageServerProgress {
145 pub message: Option<String>,
146 pub percentage: Option<usize>,
147 #[serde(skip_serializing)]
148 pub last_update_at: Instant,
149}
150
151#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
152pub struct ProjectPath {
153 pub worktree_id: WorktreeId,
154 pub path: Arc<Path>,
155}
156
157#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
158pub struct DiagnosticSummary {
159 pub error_count: usize,
160 pub warning_count: usize,
161}
162
163#[derive(Debug)]
164pub struct Location {
165 pub buffer: ModelHandle<Buffer>,
166 pub range: Range<language::Anchor>,
167}
168
169#[derive(Debug)]
170pub struct DocumentHighlight {
171 pub range: Range<language::Anchor>,
172 pub kind: DocumentHighlightKind,
173}
174
175#[derive(Clone, Debug)]
176pub struct Symbol {
177 pub source_worktree_id: WorktreeId,
178 pub worktree_id: WorktreeId,
179 pub language_server_name: LanguageServerName,
180 pub path: PathBuf,
181 pub label: CodeLabel,
182 pub name: String,
183 pub kind: lsp::SymbolKind,
184 pub range: Range<PointUtf16>,
185 pub signature: [u8; 32],
186}
187
188#[derive(Default)]
189pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
190
191impl DiagnosticSummary {
192 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
193 let mut this = Self {
194 error_count: 0,
195 warning_count: 0,
196 };
197
198 for entry in diagnostics {
199 if entry.diagnostic.is_primary {
200 match entry.diagnostic.severity {
201 DiagnosticSeverity::ERROR => this.error_count += 1,
202 DiagnosticSeverity::WARNING => this.warning_count += 1,
203 _ => {}
204 }
205 }
206 }
207
208 this
209 }
210
211 pub fn is_empty(&self) -> bool {
212 self.error_count == 0 && self.warning_count == 0
213 }
214
215 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
216 proto::DiagnosticSummary {
217 path: path.to_string_lossy().to_string(),
218 error_count: self.error_count as u32,
219 warning_count: self.warning_count as u32,
220 }
221 }
222}
223
224#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
225pub struct ProjectEntryId(usize);
226
227impl ProjectEntryId {
228 pub fn new(counter: &AtomicUsize) -> Self {
229 Self(counter.fetch_add(1, SeqCst))
230 }
231
232 pub fn from_proto(id: u64) -> Self {
233 Self(id as usize)
234 }
235
236 pub fn to_proto(&self) -> u64 {
237 self.0 as u64
238 }
239
240 pub fn to_usize(&self) -> usize {
241 self.0
242 }
243}
244
245impl Project {
246 pub fn init(client: &Arc<Client>) {
247 client.add_model_message_handler(Self::handle_add_collaborator);
248 client.add_model_message_handler(Self::handle_buffer_reloaded);
249 client.add_model_message_handler(Self::handle_buffer_saved);
250 client.add_model_message_handler(Self::handle_start_language_server);
251 client.add_model_message_handler(Self::handle_update_language_server);
252 client.add_model_message_handler(Self::handle_remove_collaborator);
253 client.add_model_message_handler(Self::handle_register_worktree);
254 client.add_model_message_handler(Self::handle_unregister_worktree);
255 client.add_model_message_handler(Self::handle_unshare_project);
256 client.add_model_message_handler(Self::handle_update_buffer_file);
257 client.add_model_message_handler(Self::handle_update_buffer);
258 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
259 client.add_model_message_handler(Self::handle_update_worktree);
260 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
261 client.add_model_request_handler(Self::handle_apply_code_action);
262 client.add_model_request_handler(Self::handle_reload_buffers);
263 client.add_model_request_handler(Self::handle_format_buffers);
264 client.add_model_request_handler(Self::handle_get_code_actions);
265 client.add_model_request_handler(Self::handle_get_completions);
266 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
269 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
271 client.add_model_request_handler(Self::handle_search_project);
272 client.add_model_request_handler(Self::handle_get_project_symbols);
273 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
274 client.add_model_request_handler(Self::handle_open_buffer_by_id);
275 client.add_model_request_handler(Self::handle_open_buffer_by_path);
276 client.add_model_request_handler(Self::handle_save_buffer);
277 }
278
279 pub fn local(
280 client: Arc<Client>,
281 user_store: ModelHandle<UserStore>,
282 languages: Arc<LanguageRegistry>,
283 fs: Arc<dyn Fs>,
284 cx: &mut MutableAppContext,
285 ) -> ModelHandle<Self> {
286 cx.add_model(|cx: &mut ModelContext<Self>| {
287 let (remote_id_tx, remote_id_rx) = watch::channel();
288 let _maintain_remote_id_task = cx.spawn_weak({
289 let rpc = client.clone();
290 move |this, mut cx| {
291 async move {
292 let mut status = rpc.status();
293 while let Some(status) = status.next().await {
294 if let Some(this) = this.upgrade(&cx) {
295 if status.is_connected() {
296 this.update(&mut cx, |this, cx| this.register(cx)).await?;
297 } else {
298 this.update(&mut cx, |this, cx| this.unregister(cx));
299 }
300 }
301 }
302 Ok(())
303 }
304 .log_err()
305 }
306 });
307
308 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
309 Self {
310 worktrees: Default::default(),
311 collaborators: Default::default(),
312 opened_buffers: Default::default(),
313 shared_buffers: Default::default(),
314 loading_buffers: Default::default(),
315 loading_local_worktrees: Default::default(),
316 buffer_snapshots: Default::default(),
317 client_state: ProjectClientState::Local {
318 is_shared: false,
319 remote_id_tx,
320 remote_id_rx,
321 _maintain_remote_id_task,
322 },
323 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
324 subscriptions: Vec::new(),
325 active_entry: None,
326 languages,
327 client,
328 user_store,
329 fs,
330 next_entry_id: Default::default(),
331 language_servers: Default::default(),
332 started_language_servers: Default::default(),
333 language_server_statuses: Default::default(),
334 last_workspace_edits_by_language_server: Default::default(),
335 language_server_settings: Default::default(),
336 next_language_server_id: 0,
337 nonce: StdRng::from_entropy().gen(),
338 }
339 })
340 }
341
342 pub async fn remote(
343 remote_id: u64,
344 client: Arc<Client>,
345 user_store: ModelHandle<UserStore>,
346 languages: Arc<LanguageRegistry>,
347 fs: Arc<dyn Fs>,
348 cx: &mut AsyncAppContext,
349 ) -> Result<ModelHandle<Self>> {
350 client.authenticate_and_connect(true, &cx).await?;
351
352 let response = client
353 .request(proto::JoinProject {
354 project_id: remote_id,
355 })
356 .await?;
357
358 let replica_id = response.replica_id as ReplicaId;
359
360 let mut worktrees = Vec::new();
361 for worktree in response.worktrees {
362 let (worktree, load_task) = cx
363 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
364 worktrees.push(worktree);
365 load_task.detach();
366 }
367
368 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
369 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
370 let mut this = Self {
371 worktrees: Vec::new(),
372 loading_buffers: Default::default(),
373 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
374 shared_buffers: Default::default(),
375 loading_local_worktrees: Default::default(),
376 active_entry: None,
377 collaborators: Default::default(),
378 languages,
379 user_store: user_store.clone(),
380 fs,
381 next_entry_id: Default::default(),
382 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
383 client: client.clone(),
384 client_state: ProjectClientState::Remote {
385 sharing_has_stopped: false,
386 remote_id,
387 replica_id,
388 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
389 async move {
390 let mut status = client.status();
391 let is_connected =
392 status.next().await.map_or(false, |s| s.is_connected());
393 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
394 if !is_connected || status.next().await.is_some() {
395 if let Some(this) = this.upgrade(&cx) {
396 this.update(&mut cx, |this, cx| this.project_unshared(cx))
397 }
398 }
399 Ok(())
400 }
401 .log_err()
402 }),
403 },
404 language_servers: Default::default(),
405 started_language_servers: Default::default(),
406 language_server_settings: Default::default(),
407 language_server_statuses: response
408 .language_servers
409 .into_iter()
410 .map(|server| {
411 (
412 server.id as usize,
413 LanguageServerStatus {
414 name: server.name,
415 pending_work: Default::default(),
416 pending_diagnostic_updates: 0,
417 },
418 )
419 })
420 .collect(),
421 last_workspace_edits_by_language_server: Default::default(),
422 next_language_server_id: 0,
423 opened_buffers: Default::default(),
424 buffer_snapshots: Default::default(),
425 nonce: StdRng::from_entropy().gen(),
426 };
427 for worktree in worktrees {
428 this.add_worktree(&worktree, cx);
429 }
430 this
431 });
432
433 let user_ids = response
434 .collaborators
435 .iter()
436 .map(|peer| peer.user_id)
437 .collect();
438 user_store
439 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
440 .await?;
441 let mut collaborators = HashMap::default();
442 for message in response.collaborators {
443 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
444 collaborators.insert(collaborator.peer_id, collaborator);
445 }
446
447 this.update(cx, |this, _| {
448 this.collaborators = collaborators;
449 });
450
451 Ok(this)
452 }
453
454 #[cfg(any(test, feature = "test-support"))]
455 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
456 let languages = Arc::new(LanguageRegistry::test());
457 let http_client = client::test::FakeHttpClient::with_404_response();
458 let client = client::Client::new(http_client.clone());
459 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
460 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
461 }
462
463 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
464 self.opened_buffers
465 .get(&remote_id)
466 .and_then(|buffer| buffer.upgrade(cx))
467 }
468
469 pub fn languages(&self) -> &Arc<LanguageRegistry> {
470 &self.languages
471 }
472
473 #[cfg(any(test, feature = "test-support"))]
474 pub fn check_invariants(&self, cx: &AppContext) {
475 if self.is_local() {
476 let mut worktree_root_paths = HashMap::default();
477 for worktree in self.worktrees(cx) {
478 let worktree = worktree.read(cx);
479 let abs_path = worktree.as_local().unwrap().abs_path().clone();
480 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
481 assert_eq!(
482 prev_worktree_id,
483 None,
484 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
485 abs_path,
486 worktree.id(),
487 prev_worktree_id
488 )
489 }
490 } else {
491 let replica_id = self.replica_id();
492 for buffer in self.opened_buffers.values() {
493 if let Some(buffer) = buffer.upgrade(cx) {
494 let buffer = buffer.read(cx);
495 assert_eq!(
496 buffer.deferred_ops_len(),
497 0,
498 "replica {}, buffer {} has deferred operations",
499 replica_id,
500 buffer.remote_id()
501 );
502 }
503 }
504 }
505 }
506
507 #[cfg(any(test, feature = "test-support"))]
508 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
509 let path = path.into();
510 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
511 self.opened_buffers.iter().any(|(_, buffer)| {
512 if let Some(buffer) = buffer.upgrade(cx) {
513 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
514 if file.worktree == worktree && file.path() == &path.path {
515 return true;
516 }
517 }
518 }
519 false
520 })
521 } else {
522 false
523 }
524 }
525
526 pub fn fs(&self) -> &Arc<dyn Fs> {
527 &self.fs
528 }
529
530 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
531 self.unshare(cx);
532 for worktree in &self.worktrees {
533 if let Some(worktree) = worktree.upgrade(cx) {
534 worktree.update(cx, |worktree, _| {
535 worktree.as_local_mut().unwrap().unregister();
536 });
537 }
538 }
539
540 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
541 *remote_id_tx.borrow_mut() = None;
542 }
543
544 self.subscriptions.clear();
545 }
546
547 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
548 self.unregister(cx);
549
550 let response = self.client.request(proto::RegisterProject {});
551 cx.spawn(|this, mut cx| async move {
552 let remote_id = response.await?.project_id;
553
554 let mut registrations = Vec::new();
555 this.update(&mut cx, |this, cx| {
556 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
557 *remote_id_tx.borrow_mut() = Some(remote_id);
558 }
559
560 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
561
562 this.subscriptions
563 .push(this.client.add_model_for_remote_entity(remote_id, cx));
564
565 for worktree in &this.worktrees {
566 if let Some(worktree) = worktree.upgrade(cx) {
567 registrations.push(worktree.update(cx, |worktree, cx| {
568 let worktree = worktree.as_local_mut().unwrap();
569 worktree.register(remote_id, cx)
570 }));
571 }
572 }
573 });
574
575 futures::future::try_join_all(registrations).await?;
576 Ok(())
577 })
578 }
579
580 pub fn remote_id(&self) -> Option<u64> {
581 match &self.client_state {
582 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
583 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
584 }
585 }
586
587 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
588 let mut id = None;
589 let mut watch = None;
590 match &self.client_state {
591 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
592 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
593 }
594
595 async move {
596 if let Some(id) = id {
597 return id;
598 }
599 let mut watch = watch.unwrap();
600 loop {
601 let id = *watch.borrow();
602 if let Some(id) = id {
603 return id;
604 }
605 watch.next().await;
606 }
607 }
608 }
609
610 pub fn replica_id(&self) -> ReplicaId {
611 match &self.client_state {
612 ProjectClientState::Local { .. } => 0,
613 ProjectClientState::Remote { replica_id, .. } => *replica_id,
614 }
615 }
616
617 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
618 &self.collaborators
619 }
620
621 pub fn worktrees<'a>(
622 &'a self,
623 cx: &'a AppContext,
624 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
625 self.worktrees
626 .iter()
627 .filter_map(move |worktree| worktree.upgrade(cx))
628 }
629
630 pub fn visible_worktrees<'a>(
631 &'a self,
632 cx: &'a AppContext,
633 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
634 self.worktrees.iter().filter_map(|worktree| {
635 worktree.upgrade(cx).and_then(|worktree| {
636 if worktree.read(cx).is_visible() {
637 Some(worktree)
638 } else {
639 None
640 }
641 })
642 })
643 }
644
645 pub fn worktree_for_id(
646 &self,
647 id: WorktreeId,
648 cx: &AppContext,
649 ) -> Option<ModelHandle<Worktree>> {
650 self.worktrees(cx)
651 .find(|worktree| worktree.read(cx).id() == id)
652 }
653
654 pub fn worktree_for_entry(
655 &self,
656 entry_id: ProjectEntryId,
657 cx: &AppContext,
658 ) -> Option<ModelHandle<Worktree>> {
659 self.worktrees(cx)
660 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
661 }
662
663 pub fn worktree_id_for_entry(
664 &self,
665 entry_id: ProjectEntryId,
666 cx: &AppContext,
667 ) -> Option<WorktreeId> {
668 self.worktree_for_entry(entry_id, cx)
669 .map(|worktree| worktree.read(cx).id())
670 }
671
672 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
673 let rpc = self.client.clone();
674 cx.spawn(|this, mut cx| async move {
675 let project_id = this.update(&mut cx, |this, cx| {
676 if let ProjectClientState::Local {
677 is_shared,
678 remote_id_rx,
679 ..
680 } = &mut this.client_state
681 {
682 *is_shared = true;
683
684 for open_buffer in this.opened_buffers.values_mut() {
685 match open_buffer {
686 OpenBuffer::Strong(_) => {}
687 OpenBuffer::Weak(buffer) => {
688 if let Some(buffer) = buffer.upgrade(cx) {
689 *open_buffer = OpenBuffer::Strong(buffer);
690 }
691 }
692 OpenBuffer::Loading(_) => unreachable!(),
693 }
694 }
695
696 for worktree_handle in this.worktrees.iter_mut() {
697 match worktree_handle {
698 WorktreeHandle::Strong(_) => {}
699 WorktreeHandle::Weak(worktree) => {
700 if let Some(worktree) = worktree.upgrade(cx) {
701 *worktree_handle = WorktreeHandle::Strong(worktree);
702 }
703 }
704 }
705 }
706
707 remote_id_rx
708 .borrow()
709 .ok_or_else(|| anyhow!("no project id"))
710 } else {
711 Err(anyhow!("can't share a remote project"))
712 }
713 })?;
714
715 rpc.request(proto::ShareProject { project_id }).await?;
716
717 let mut tasks = Vec::new();
718 this.update(&mut cx, |this, cx| {
719 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
720 worktree.update(cx, |worktree, cx| {
721 let worktree = worktree.as_local_mut().unwrap();
722 tasks.push(worktree.share(project_id, cx));
723 });
724 }
725 });
726 for task in tasks {
727 task.await?;
728 }
729 this.update(&mut cx, |_, cx| cx.notify());
730 Ok(())
731 })
732 }
733
734 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
735 let rpc = self.client.clone();
736
737 if let ProjectClientState::Local {
738 is_shared,
739 remote_id_rx,
740 ..
741 } = &mut self.client_state
742 {
743 if !*is_shared {
744 return;
745 }
746
747 *is_shared = false;
748 self.collaborators.clear();
749 self.shared_buffers.clear();
750 for worktree_handle in self.worktrees.iter_mut() {
751 if let WorktreeHandle::Strong(worktree) = worktree_handle {
752 let is_visible = worktree.update(cx, |worktree, _| {
753 worktree.as_local_mut().unwrap().unshare();
754 worktree.is_visible()
755 });
756 if !is_visible {
757 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
758 }
759 }
760 }
761
762 for open_buffer in self.opened_buffers.values_mut() {
763 match open_buffer {
764 OpenBuffer::Strong(buffer) => {
765 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
766 }
767 _ => {}
768 }
769 }
770
771 if let Some(project_id) = *remote_id_rx.borrow() {
772 rpc.send(proto::UnshareProject { project_id }).log_err();
773 }
774
775 cx.notify();
776 } else {
777 log::error!("attempted to unshare a remote project");
778 }
779 }
780
781 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
782 if let ProjectClientState::Remote {
783 sharing_has_stopped,
784 ..
785 } = &mut self.client_state
786 {
787 *sharing_has_stopped = true;
788 self.collaborators.clear();
789 cx.notify();
790 }
791 }
792
793 pub fn is_read_only(&self) -> bool {
794 match &self.client_state {
795 ProjectClientState::Local { .. } => false,
796 ProjectClientState::Remote {
797 sharing_has_stopped,
798 ..
799 } => *sharing_has_stopped,
800 }
801 }
802
803 pub fn is_local(&self) -> bool {
804 match &self.client_state {
805 ProjectClientState::Local { .. } => true,
806 ProjectClientState::Remote { .. } => false,
807 }
808 }
809
810 pub fn is_remote(&self) -> bool {
811 !self.is_local()
812 }
813
814 pub fn create_buffer(
815 &mut self,
816 text: &str,
817 language: Option<Arc<Language>>,
818 cx: &mut ModelContext<Self>,
819 ) -> Result<ModelHandle<Buffer>> {
820 if self.is_remote() {
821 return Err(anyhow!("creating buffers as a guest is not supported yet"));
822 }
823
824 let buffer = cx.add_model(|cx| {
825 Buffer::new(self.replica_id(), text, cx)
826 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
827 });
828 self.register_buffer(&buffer, cx)?;
829 Ok(buffer)
830 }
831
832 pub fn open_path(
833 &mut self,
834 path: impl Into<ProjectPath>,
835 cx: &mut ModelContext<Self>,
836 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
837 let task = self.open_buffer(path, cx);
838 cx.spawn_weak(|_, cx| async move {
839 let buffer = task.await?;
840 let project_entry_id = buffer
841 .read_with(&cx, |buffer, cx| {
842 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
843 })
844 .ok_or_else(|| anyhow!("no project entry"))?;
845 Ok((project_entry_id, buffer.into()))
846 })
847 }
848
849 pub fn open_buffer(
850 &mut self,
851 path: impl Into<ProjectPath>,
852 cx: &mut ModelContext<Self>,
853 ) -> Task<Result<ModelHandle<Buffer>>> {
854 let project_path = path.into();
855 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
856 worktree
857 } else {
858 return Task::ready(Err(anyhow!("no such worktree")));
859 };
860
861 // If there is already a buffer for the given path, then return it.
862 let existing_buffer = self.get_open_buffer(&project_path, cx);
863 if let Some(existing_buffer) = existing_buffer {
864 return Task::ready(Ok(existing_buffer));
865 }
866
867 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
868 // If the given path is already being loaded, then wait for that existing
869 // task to complete and return the same buffer.
870 hash_map::Entry::Occupied(e) => e.get().clone(),
871
872 // Otherwise, record the fact that this path is now being loaded.
873 hash_map::Entry::Vacant(entry) => {
874 let (mut tx, rx) = postage::watch::channel();
875 entry.insert(rx.clone());
876
877 let load_buffer = if worktree.read(cx).is_local() {
878 self.open_local_buffer(&project_path.path, &worktree, cx)
879 } else {
880 self.open_remote_buffer(&project_path.path, &worktree, cx)
881 };
882
883 cx.spawn(move |this, mut cx| async move {
884 let load_result = load_buffer.await;
885 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
886 // Record the fact that the buffer is no longer loading.
887 this.loading_buffers.remove(&project_path);
888 let buffer = load_result.map_err(Arc::new)?;
889 Ok(buffer)
890 }));
891 })
892 .detach();
893 rx
894 }
895 };
896
897 cx.foreground().spawn(async move {
898 loop {
899 if let Some(result) = loading_watch.borrow().as_ref() {
900 match result {
901 Ok(buffer) => return Ok(buffer.clone()),
902 Err(error) => return Err(anyhow!("{}", error)),
903 }
904 }
905 loading_watch.next().await;
906 }
907 })
908 }
909
910 fn open_local_buffer(
911 &mut self,
912 path: &Arc<Path>,
913 worktree: &ModelHandle<Worktree>,
914 cx: &mut ModelContext<Self>,
915 ) -> Task<Result<ModelHandle<Buffer>>> {
916 let load_buffer = worktree.update(cx, |worktree, cx| {
917 let worktree = worktree.as_local_mut().unwrap();
918 worktree.load_buffer(path, cx)
919 });
920 cx.spawn(|this, mut cx| async move {
921 let buffer = load_buffer.await?;
922 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
923 Ok(buffer)
924 })
925 }
926
927 fn open_remote_buffer(
928 &mut self,
929 path: &Arc<Path>,
930 worktree: &ModelHandle<Worktree>,
931 cx: &mut ModelContext<Self>,
932 ) -> Task<Result<ModelHandle<Buffer>>> {
933 let rpc = self.client.clone();
934 let project_id = self.remote_id().unwrap();
935 let remote_worktree_id = worktree.read(cx).id();
936 let path = path.clone();
937 let path_string = path.to_string_lossy().to_string();
938 cx.spawn(|this, mut cx| async move {
939 let response = rpc
940 .request(proto::OpenBufferByPath {
941 project_id,
942 worktree_id: remote_worktree_id.to_proto(),
943 path: path_string,
944 })
945 .await?;
946 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
947 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
948 .await
949 })
950 }
951
952 fn open_local_buffer_via_lsp(
953 &mut self,
954 abs_path: lsp::Url,
955 lsp_adapter: Arc<dyn LspAdapter>,
956 lsp_server: Arc<LanguageServer>,
957 cx: &mut ModelContext<Self>,
958 ) -> Task<Result<ModelHandle<Buffer>>> {
959 cx.spawn(|this, mut cx| async move {
960 let abs_path = abs_path
961 .to_file_path()
962 .map_err(|_| anyhow!("can't convert URI to path"))?;
963 let (worktree, relative_path) = if let Some(result) =
964 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
965 {
966 result
967 } else {
968 let worktree = this
969 .update(&mut cx, |this, cx| {
970 this.create_local_worktree(&abs_path, false, cx)
971 })
972 .await?;
973 this.update(&mut cx, |this, cx| {
974 this.language_servers.insert(
975 (worktree.read(cx).id(), lsp_adapter.name()),
976 (lsp_adapter, lsp_server),
977 );
978 });
979 (worktree, PathBuf::new())
980 };
981
982 let project_path = ProjectPath {
983 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
984 path: relative_path.into(),
985 };
986 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
987 .await
988 })
989 }
990
991 pub fn open_buffer_by_id(
992 &mut self,
993 id: u64,
994 cx: &mut ModelContext<Self>,
995 ) -> Task<Result<ModelHandle<Buffer>>> {
996 if let Some(buffer) = self.buffer_for_id(id, cx) {
997 Task::ready(Ok(buffer))
998 } else if self.is_local() {
999 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1000 } else if let Some(project_id) = self.remote_id() {
1001 let request = self
1002 .client
1003 .request(proto::OpenBufferById { project_id, id });
1004 cx.spawn(|this, mut cx| async move {
1005 let buffer = request
1006 .await?
1007 .buffer
1008 .ok_or_else(|| anyhow!("invalid buffer"))?;
1009 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1010 .await
1011 })
1012 } else {
1013 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1014 }
1015 }
1016
1017 pub fn save_buffer_as(
1018 &mut self,
1019 buffer: ModelHandle<Buffer>,
1020 abs_path: PathBuf,
1021 cx: &mut ModelContext<Project>,
1022 ) -> Task<Result<()>> {
1023 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1024 let old_path =
1025 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1026 cx.spawn(|this, mut cx| async move {
1027 if let Some(old_path) = old_path {
1028 this.update(&mut cx, |this, cx| {
1029 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1030 });
1031 }
1032 let (worktree, path) = worktree_task.await?;
1033 worktree
1034 .update(&mut cx, |worktree, cx| {
1035 worktree
1036 .as_local_mut()
1037 .unwrap()
1038 .save_buffer_as(buffer.clone(), path, cx)
1039 })
1040 .await?;
1041 this.update(&mut cx, |this, cx| {
1042 this.assign_language_to_buffer(&buffer, cx);
1043 this.register_buffer_with_language_server(&buffer, cx);
1044 });
1045 Ok(())
1046 })
1047 }
1048
1049 pub fn get_open_buffer(
1050 &mut self,
1051 path: &ProjectPath,
1052 cx: &mut ModelContext<Self>,
1053 ) -> Option<ModelHandle<Buffer>> {
1054 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1055 self.opened_buffers.values().find_map(|buffer| {
1056 let buffer = buffer.upgrade(cx)?;
1057 let file = File::from_dyn(buffer.read(cx).file())?;
1058 if file.worktree == worktree && file.path() == &path.path {
1059 Some(buffer)
1060 } else {
1061 None
1062 }
1063 })
1064 }
1065
1066 fn register_buffer(
1067 &mut self,
1068 buffer: &ModelHandle<Buffer>,
1069 cx: &mut ModelContext<Self>,
1070 ) -> Result<()> {
1071 let remote_id = buffer.read(cx).remote_id();
1072 let open_buffer = if self.is_remote() || self.is_shared() {
1073 OpenBuffer::Strong(buffer.clone())
1074 } else {
1075 OpenBuffer::Weak(buffer.downgrade())
1076 };
1077
1078 match self.opened_buffers.insert(remote_id, open_buffer) {
1079 None => {}
1080 Some(OpenBuffer::Loading(operations)) => {
1081 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1082 }
1083 Some(OpenBuffer::Weak(existing_handle)) => {
1084 if existing_handle.upgrade(cx).is_some() {
1085 Err(anyhow!(
1086 "already registered buffer with remote id {}",
1087 remote_id
1088 ))?
1089 }
1090 }
1091 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1092 "already registered buffer with remote id {}",
1093 remote_id
1094 ))?,
1095 }
1096 cx.subscribe(buffer, |this, buffer, event, cx| {
1097 this.on_buffer_event(buffer, event, cx);
1098 })
1099 .detach();
1100
1101 self.assign_language_to_buffer(buffer, cx);
1102 self.register_buffer_with_language_server(buffer, cx);
1103 cx.observe_release(buffer, |this, buffer, cx| {
1104 if let Some(file) = File::from_dyn(buffer.file()) {
1105 if file.is_local() {
1106 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1107 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1108 server
1109 .notify::<lsp::notification::DidCloseTextDocument>(
1110 lsp::DidCloseTextDocumentParams {
1111 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1112 },
1113 )
1114 .log_err();
1115 }
1116 }
1117 }
1118 })
1119 .detach();
1120
1121 Ok(())
1122 }
1123
1124 fn register_buffer_with_language_server(
1125 &mut self,
1126 buffer_handle: &ModelHandle<Buffer>,
1127 cx: &mut ModelContext<Self>,
1128 ) {
1129 let buffer = buffer_handle.read(cx);
1130 let buffer_id = buffer.remote_id();
1131 if let Some(file) = File::from_dyn(buffer.file()) {
1132 if file.is_local() {
1133 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1134 let initial_snapshot = buffer.text_snapshot();
1135 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1136
1137 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1138 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1139 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1140 .log_err();
1141 }
1142 }
1143
1144 if let Some((_, server)) = language_server {
1145 server
1146 .notify::<lsp::notification::DidOpenTextDocument>(
1147 lsp::DidOpenTextDocumentParams {
1148 text_document: lsp::TextDocumentItem::new(
1149 uri,
1150 Default::default(),
1151 0,
1152 initial_snapshot.text(),
1153 ),
1154 }
1155 .clone(),
1156 )
1157 .log_err();
1158 buffer_handle.update(cx, |buffer, cx| {
1159 buffer.set_completion_triggers(
1160 server
1161 .capabilities()
1162 .completion_provider
1163 .as_ref()
1164 .and_then(|provider| provider.trigger_characters.clone())
1165 .unwrap_or(Vec::new()),
1166 cx,
1167 )
1168 });
1169 self.buffer_snapshots
1170 .insert(buffer_id, vec![(0, initial_snapshot)]);
1171 }
1172 }
1173 }
1174 }
1175
1176 fn unregister_buffer_from_language_server(
1177 &mut self,
1178 buffer: &ModelHandle<Buffer>,
1179 old_path: PathBuf,
1180 cx: &mut ModelContext<Self>,
1181 ) {
1182 buffer.update(cx, |buffer, cx| {
1183 buffer.update_diagnostics(Default::default(), cx);
1184 self.buffer_snapshots.remove(&buffer.remote_id());
1185 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1186 language_server
1187 .notify::<lsp::notification::DidCloseTextDocument>(
1188 lsp::DidCloseTextDocumentParams {
1189 text_document: lsp::TextDocumentIdentifier::new(
1190 lsp::Url::from_file_path(old_path).unwrap(),
1191 ),
1192 },
1193 )
1194 .log_err();
1195 }
1196 });
1197 }
1198
1199 fn on_buffer_event(
1200 &mut self,
1201 buffer: ModelHandle<Buffer>,
1202 event: &BufferEvent,
1203 cx: &mut ModelContext<Self>,
1204 ) -> Option<()> {
1205 match event {
1206 BufferEvent::Operation(operation) => {
1207 let project_id = self.remote_id()?;
1208 let request = self.client.request(proto::UpdateBuffer {
1209 project_id,
1210 buffer_id: buffer.read(cx).remote_id(),
1211 operations: vec![language::proto::serialize_operation(&operation)],
1212 });
1213 cx.background().spawn(request).detach_and_log_err(cx);
1214 }
1215 BufferEvent::Edited { .. } => {
1216 let (_, language_server) = self
1217 .language_server_for_buffer(buffer.read(cx), cx)?
1218 .clone();
1219 let buffer = buffer.read(cx);
1220 let file = File::from_dyn(buffer.file())?;
1221 let abs_path = file.as_local()?.abs_path(cx);
1222 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1223 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1224 let (version, prev_snapshot) = buffer_snapshots.last()?;
1225 let next_snapshot = buffer.text_snapshot();
1226 let next_version = version + 1;
1227
1228 let content_changes = buffer
1229 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1230 .map(|edit| {
1231 let edit_start = edit.new.start.0;
1232 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1233 let new_text = next_snapshot
1234 .text_for_range(edit.new.start.1..edit.new.end.1)
1235 .collect();
1236 lsp::TextDocumentContentChangeEvent {
1237 range: Some(lsp::Range::new(
1238 point_to_lsp(edit_start),
1239 point_to_lsp(edit_end),
1240 )),
1241 range_length: None,
1242 text: new_text,
1243 }
1244 })
1245 .collect();
1246
1247 buffer_snapshots.push((next_version, next_snapshot));
1248
1249 language_server
1250 .notify::<lsp::notification::DidChangeTextDocument>(
1251 lsp::DidChangeTextDocumentParams {
1252 text_document: lsp::VersionedTextDocumentIdentifier::new(
1253 uri,
1254 next_version,
1255 ),
1256 content_changes,
1257 },
1258 )
1259 .log_err();
1260 }
1261 BufferEvent::Saved => {
1262 let file = File::from_dyn(buffer.read(cx).file())?;
1263 let worktree_id = file.worktree_id(cx);
1264 let abs_path = file.as_local()?.abs_path(cx);
1265 let text_document = lsp::TextDocumentIdentifier {
1266 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1267 };
1268
1269 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1270 server
1271 .notify::<lsp::notification::DidSaveTextDocument>(
1272 lsp::DidSaveTextDocumentParams {
1273 text_document: text_document.clone(),
1274 text: None,
1275 },
1276 )
1277 .log_err();
1278 }
1279 }
1280 _ => {}
1281 }
1282
1283 None
1284 }
1285
1286 fn language_servers_for_worktree(
1287 &self,
1288 worktree_id: WorktreeId,
1289 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1290 self.language_servers.iter().filter_map(
1291 move |((language_server_worktree_id, _), server)| {
1292 if *language_server_worktree_id == worktree_id {
1293 Some(server)
1294 } else {
1295 None
1296 }
1297 },
1298 )
1299 }
1300
1301 fn assign_language_to_buffer(
1302 &mut self,
1303 buffer: &ModelHandle<Buffer>,
1304 cx: &mut ModelContext<Self>,
1305 ) -> Option<()> {
1306 // If the buffer has a language, set it and start the language server if we haven't already.
1307 let full_path = buffer.read(cx).file()?.full_path(cx);
1308 let language = self.languages.select_language(&full_path)?;
1309 buffer.update(cx, |buffer, cx| {
1310 buffer.set_language(Some(language.clone()), cx);
1311 });
1312
1313 let file = File::from_dyn(buffer.read(cx).file())?;
1314 let worktree = file.worktree.read(cx).as_local()?;
1315 let worktree_id = worktree.id();
1316 let worktree_abs_path = worktree.abs_path().clone();
1317 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1318
1319 None
1320 }
1321
1322 fn start_language_server(
1323 &mut self,
1324 worktree_id: WorktreeId,
1325 worktree_path: Arc<Path>,
1326 language: Arc<Language>,
1327 cx: &mut ModelContext<Self>,
1328 ) {
1329 let adapter = if let Some(adapter) = language.lsp_adapter() {
1330 adapter
1331 } else {
1332 return;
1333 };
1334 let key = (worktree_id, adapter.name());
1335 self.started_language_servers
1336 .entry(key.clone())
1337 .or_insert_with(|| {
1338 let server_id = post_inc(&mut self.next_language_server_id);
1339 let language_server = self.languages.start_language_server(
1340 server_id,
1341 language.clone(),
1342 worktree_path,
1343 self.client.http_client(),
1344 cx,
1345 );
1346 cx.spawn_weak(|this, mut cx| async move {
1347 let language_server = language_server?.await.log_err()?;
1348 let language_server = language_server
1349 .initialize(adapter.initialization_options())
1350 .await
1351 .log_err()?;
1352 let this = this.upgrade(&cx)?;
1353 let disk_based_diagnostics_progress_token =
1354 adapter.disk_based_diagnostics_progress_token();
1355
1356 language_server
1357 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1358 let this = this.downgrade();
1359 let adapter = adapter.clone();
1360 move |params, mut cx| {
1361 if let Some(this) = this.upgrade(&cx) {
1362 this.update(&mut cx, |this, cx| {
1363 this.on_lsp_diagnostics_published(
1364 server_id,
1365 params,
1366 &adapter,
1367 disk_based_diagnostics_progress_token,
1368 cx,
1369 );
1370 });
1371 }
1372 }
1373 })
1374 .detach();
1375
1376 language_server
1377 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1378 let settings = this
1379 .read_with(&cx, |this, _| this.language_server_settings.clone());
1380 move |params, _| {
1381 let settings = settings.lock().clone();
1382 async move {
1383 Ok(params
1384 .items
1385 .into_iter()
1386 .map(|item| {
1387 if let Some(section) = &item.section {
1388 settings
1389 .get(section)
1390 .cloned()
1391 .unwrap_or(serde_json::Value::Null)
1392 } else {
1393 settings.clone()
1394 }
1395 })
1396 .collect())
1397 }
1398 }
1399 })
1400 .detach();
1401
1402 language_server
1403 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1404 let this = this.downgrade();
1405 let adapter = adapter.clone();
1406 let language_server = language_server.clone();
1407 move |params, cx| {
1408 Self::on_lsp_workspace_edit(
1409 this,
1410 params,
1411 server_id,
1412 adapter.clone(),
1413 language_server.clone(),
1414 cx,
1415 )
1416 }
1417 })
1418 .detach();
1419
1420 language_server
1421 .on_notification::<lsp::notification::Progress, _>({
1422 let this = this.downgrade();
1423 move |params, mut cx| {
1424 if let Some(this) = this.upgrade(&cx) {
1425 this.update(&mut cx, |this, cx| {
1426 this.on_lsp_progress(
1427 params,
1428 server_id,
1429 disk_based_diagnostics_progress_token,
1430 cx,
1431 );
1432 });
1433 }
1434 }
1435 })
1436 .detach();
1437
1438 this.update(&mut cx, |this, cx| {
1439 this.language_servers
1440 .insert(key.clone(), (adapter, language_server.clone()));
1441 this.language_server_statuses.insert(
1442 server_id,
1443 LanguageServerStatus {
1444 name: language_server.name().to_string(),
1445 pending_work: Default::default(),
1446 pending_diagnostic_updates: 0,
1447 },
1448 );
1449 language_server
1450 .notify::<lsp::notification::DidChangeConfiguration>(
1451 lsp::DidChangeConfigurationParams {
1452 settings: this.language_server_settings.lock().clone(),
1453 },
1454 )
1455 .ok();
1456
1457 if let Some(project_id) = this.remote_id() {
1458 this.client
1459 .send(proto::StartLanguageServer {
1460 project_id,
1461 server: Some(proto::LanguageServer {
1462 id: server_id as u64,
1463 name: language_server.name().to_string(),
1464 }),
1465 })
1466 .log_err();
1467 }
1468
1469 // Tell the language server about every open buffer in the worktree that matches the language.
1470 for buffer in this.opened_buffers.values() {
1471 if let Some(buffer_handle) = buffer.upgrade(cx) {
1472 let buffer = buffer_handle.read(cx);
1473 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1474 file
1475 } else {
1476 continue;
1477 };
1478 let language = if let Some(language) = buffer.language() {
1479 language
1480 } else {
1481 continue;
1482 };
1483 if file.worktree.read(cx).id() != key.0
1484 || language.lsp_adapter().map(|a| a.name())
1485 != Some(key.1.clone())
1486 {
1487 continue;
1488 }
1489
1490 let file = file.as_local()?;
1491 let versions = this
1492 .buffer_snapshots
1493 .entry(buffer.remote_id())
1494 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1495 let (version, initial_snapshot) = versions.last().unwrap();
1496 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1497 language_server
1498 .notify::<lsp::notification::DidOpenTextDocument>(
1499 lsp::DidOpenTextDocumentParams {
1500 text_document: lsp::TextDocumentItem::new(
1501 uri,
1502 Default::default(),
1503 *version,
1504 initial_snapshot.text(),
1505 ),
1506 },
1507 )
1508 .log_err()?;
1509 buffer_handle.update(cx, |buffer, cx| {
1510 buffer.set_completion_triggers(
1511 language_server
1512 .capabilities()
1513 .completion_provider
1514 .as_ref()
1515 .and_then(|provider| {
1516 provider.trigger_characters.clone()
1517 })
1518 .unwrap_or(Vec::new()),
1519 cx,
1520 )
1521 });
1522 }
1523 }
1524
1525 cx.notify();
1526 Some(())
1527 });
1528
1529 Some(language_server)
1530 })
1531 });
1532 }
1533
1534 pub fn restart_language_servers_for_buffers(
1535 &mut self,
1536 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1537 cx: &mut ModelContext<Self>,
1538 ) -> Option<()> {
1539 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1540 .into_iter()
1541 .filter_map(|buffer| {
1542 let file = File::from_dyn(buffer.read(cx).file())?;
1543 let worktree = file.worktree.read(cx).as_local()?;
1544 let worktree_id = worktree.id();
1545 let worktree_abs_path = worktree.abs_path().clone();
1546 let full_path = file.full_path(cx);
1547 Some((worktree_id, worktree_abs_path, full_path))
1548 })
1549 .collect();
1550 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1551 let language = self.languages.select_language(&full_path)?;
1552 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1553 }
1554
1555 None
1556 }
1557
1558 fn restart_language_server(
1559 &mut self,
1560 worktree_id: WorktreeId,
1561 worktree_path: Arc<Path>,
1562 language: Arc<Language>,
1563 cx: &mut ModelContext<Self>,
1564 ) {
1565 let adapter = if let Some(adapter) = language.lsp_adapter() {
1566 adapter
1567 } else {
1568 return;
1569 };
1570 let key = (worktree_id, adapter.name());
1571 let server_to_shutdown = self.language_servers.remove(&key);
1572 self.started_language_servers.remove(&key);
1573 server_to_shutdown
1574 .as_ref()
1575 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1576 cx.spawn_weak(|this, mut cx| async move {
1577 if let Some(this) = this.upgrade(&cx) {
1578 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1579 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1580 shutdown_task.await;
1581 }
1582 }
1583
1584 this.update(&mut cx, |this, cx| {
1585 this.start_language_server(worktree_id, worktree_path, language, cx);
1586 });
1587 }
1588 })
1589 .detach();
1590 }
1591
1592 fn on_lsp_diagnostics_published(
1593 &mut self,
1594 server_id: usize,
1595 mut params: lsp::PublishDiagnosticsParams,
1596 adapter: &Arc<dyn LspAdapter>,
1597 disk_based_diagnostics_progress_token: Option<&str>,
1598 cx: &mut ModelContext<Self>,
1599 ) {
1600 adapter.process_diagnostics(&mut params);
1601 if disk_based_diagnostics_progress_token.is_none() {
1602 self.disk_based_diagnostics_started(cx);
1603 self.broadcast_language_server_update(
1604 server_id,
1605 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1606 proto::LspDiskBasedDiagnosticsUpdating {},
1607 ),
1608 );
1609 }
1610 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1611 .log_err();
1612 if disk_based_diagnostics_progress_token.is_none() {
1613 self.disk_based_diagnostics_finished(cx);
1614 self.broadcast_language_server_update(
1615 server_id,
1616 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1617 proto::LspDiskBasedDiagnosticsUpdated {},
1618 ),
1619 );
1620 }
1621 }
1622
1623 fn on_lsp_progress(
1624 &mut self,
1625 progress: lsp::ProgressParams,
1626 server_id: usize,
1627 disk_based_diagnostics_progress_token: Option<&str>,
1628 cx: &mut ModelContext<Self>,
1629 ) {
1630 let token = match progress.token {
1631 lsp::NumberOrString::String(token) => token,
1632 lsp::NumberOrString::Number(token) => {
1633 log::info!("skipping numeric progress token {}", token);
1634 return;
1635 }
1636 };
1637 let progress = match progress.value {
1638 lsp::ProgressParamsValue::WorkDone(value) => value,
1639 };
1640 let language_server_status =
1641 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1642 status
1643 } else {
1644 return;
1645 };
1646 match progress {
1647 lsp::WorkDoneProgress::Begin(_) => {
1648 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1649 language_server_status.pending_diagnostic_updates += 1;
1650 if language_server_status.pending_diagnostic_updates == 1 {
1651 self.disk_based_diagnostics_started(cx);
1652 self.broadcast_language_server_update(
1653 server_id,
1654 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1655 proto::LspDiskBasedDiagnosticsUpdating {},
1656 ),
1657 );
1658 }
1659 } else {
1660 self.on_lsp_work_start(server_id, token.clone(), cx);
1661 self.broadcast_language_server_update(
1662 server_id,
1663 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1664 token,
1665 }),
1666 );
1667 }
1668 }
1669 lsp::WorkDoneProgress::Report(report) => {
1670 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1671 self.on_lsp_work_progress(
1672 server_id,
1673 token.clone(),
1674 LanguageServerProgress {
1675 message: report.message.clone(),
1676 percentage: report.percentage.map(|p| p as usize),
1677 last_update_at: Instant::now(),
1678 },
1679 cx,
1680 );
1681 self.broadcast_language_server_update(
1682 server_id,
1683 proto::update_language_server::Variant::WorkProgress(
1684 proto::LspWorkProgress {
1685 token,
1686 message: report.message,
1687 percentage: report.percentage.map(|p| p as u32),
1688 },
1689 ),
1690 );
1691 }
1692 }
1693 lsp::WorkDoneProgress::End(_) => {
1694 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1695 language_server_status.pending_diagnostic_updates -= 1;
1696 if language_server_status.pending_diagnostic_updates == 0 {
1697 self.disk_based_diagnostics_finished(cx);
1698 self.broadcast_language_server_update(
1699 server_id,
1700 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1701 proto::LspDiskBasedDiagnosticsUpdated {},
1702 ),
1703 );
1704 }
1705 } else {
1706 self.on_lsp_work_end(server_id, token.clone(), cx);
1707 self.broadcast_language_server_update(
1708 server_id,
1709 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1710 token,
1711 }),
1712 );
1713 }
1714 }
1715 }
1716 }
1717
1718 fn on_lsp_work_start(
1719 &mut self,
1720 language_server_id: usize,
1721 token: String,
1722 cx: &mut ModelContext<Self>,
1723 ) {
1724 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1725 status.pending_work.insert(
1726 token,
1727 LanguageServerProgress {
1728 message: None,
1729 percentage: None,
1730 last_update_at: Instant::now(),
1731 },
1732 );
1733 cx.notify();
1734 }
1735 }
1736
1737 fn on_lsp_work_progress(
1738 &mut self,
1739 language_server_id: usize,
1740 token: String,
1741 progress: LanguageServerProgress,
1742 cx: &mut ModelContext<Self>,
1743 ) {
1744 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1745 status.pending_work.insert(token, progress);
1746 cx.notify();
1747 }
1748 }
1749
1750 fn on_lsp_work_end(
1751 &mut self,
1752 language_server_id: usize,
1753 token: String,
1754 cx: &mut ModelContext<Self>,
1755 ) {
1756 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1757 status.pending_work.remove(&token);
1758 cx.notify();
1759 }
1760 }
1761
1762 async fn on_lsp_workspace_edit(
1763 this: WeakModelHandle<Self>,
1764 params: lsp::ApplyWorkspaceEditParams,
1765 server_id: usize,
1766 adapter: Arc<dyn LspAdapter>,
1767 language_server: Arc<LanguageServer>,
1768 mut cx: AsyncAppContext,
1769 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1770 let this = this
1771 .upgrade(&cx)
1772 .ok_or_else(|| anyhow!("project project closed"))?;
1773 let transaction = Self::deserialize_workspace_edit(
1774 this.clone(),
1775 params.edit,
1776 true,
1777 adapter.clone(),
1778 language_server.clone(),
1779 &mut cx,
1780 )
1781 .await
1782 .log_err();
1783 this.update(&mut cx, |this, _| {
1784 if let Some(transaction) = transaction {
1785 this.last_workspace_edits_by_language_server
1786 .insert(server_id, transaction);
1787 }
1788 });
1789 Ok(lsp::ApplyWorkspaceEditResponse {
1790 applied: true,
1791 failed_change: None,
1792 failure_reason: None,
1793 })
1794 }
1795
1796 fn broadcast_language_server_update(
1797 &self,
1798 language_server_id: usize,
1799 event: proto::update_language_server::Variant,
1800 ) {
1801 if let Some(project_id) = self.remote_id() {
1802 self.client
1803 .send(proto::UpdateLanguageServer {
1804 project_id,
1805 language_server_id: language_server_id as u64,
1806 variant: Some(event),
1807 })
1808 .log_err();
1809 }
1810 }
1811
1812 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1813 for (_, server) in self.language_servers.values() {
1814 server
1815 .notify::<lsp::notification::DidChangeConfiguration>(
1816 lsp::DidChangeConfigurationParams {
1817 settings: settings.clone(),
1818 },
1819 )
1820 .ok();
1821 }
1822 *self.language_server_settings.lock() = settings;
1823 }
1824
1825 pub fn language_server_statuses(
1826 &self,
1827 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1828 self.language_server_statuses.values()
1829 }
1830
1831 pub fn update_diagnostics(
1832 &mut self,
1833 params: lsp::PublishDiagnosticsParams,
1834 disk_based_sources: &[&str],
1835 cx: &mut ModelContext<Self>,
1836 ) -> Result<()> {
1837 let abs_path = params
1838 .uri
1839 .to_file_path()
1840 .map_err(|_| anyhow!("URI is not a file"))?;
1841 let mut next_group_id = 0;
1842 let mut diagnostics = Vec::default();
1843 let mut primary_diagnostic_group_ids = HashMap::default();
1844 let mut sources_by_group_id = HashMap::default();
1845 let mut supporting_diagnostics = HashMap::default();
1846 for diagnostic in ¶ms.diagnostics {
1847 let source = diagnostic.source.as_ref();
1848 let code = diagnostic.code.as_ref().map(|code| match code {
1849 lsp::NumberOrString::Number(code) => code.to_string(),
1850 lsp::NumberOrString::String(code) => code.clone(),
1851 });
1852 let range = range_from_lsp(diagnostic.range);
1853 let is_supporting = diagnostic
1854 .related_information
1855 .as_ref()
1856 .map_or(false, |infos| {
1857 infos.iter().any(|info| {
1858 primary_diagnostic_group_ids.contains_key(&(
1859 source,
1860 code.clone(),
1861 range_from_lsp(info.location.range),
1862 ))
1863 })
1864 });
1865
1866 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1867 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1868 });
1869
1870 if is_supporting {
1871 supporting_diagnostics.insert(
1872 (source, code.clone(), range),
1873 (diagnostic.severity, is_unnecessary),
1874 );
1875 } else {
1876 let group_id = post_inc(&mut next_group_id);
1877 let is_disk_based = source.map_or(false, |source| {
1878 disk_based_sources.contains(&source.as_str())
1879 });
1880
1881 sources_by_group_id.insert(group_id, source);
1882 primary_diagnostic_group_ids
1883 .insert((source, code.clone(), range.clone()), group_id);
1884
1885 diagnostics.push(DiagnosticEntry {
1886 range,
1887 diagnostic: Diagnostic {
1888 code: code.clone(),
1889 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1890 message: diagnostic.message.clone(),
1891 group_id,
1892 is_primary: true,
1893 is_valid: true,
1894 is_disk_based,
1895 is_unnecessary,
1896 },
1897 });
1898 if let Some(infos) = &diagnostic.related_information {
1899 for info in infos {
1900 if info.location.uri == params.uri && !info.message.is_empty() {
1901 let range = range_from_lsp(info.location.range);
1902 diagnostics.push(DiagnosticEntry {
1903 range,
1904 diagnostic: Diagnostic {
1905 code: code.clone(),
1906 severity: DiagnosticSeverity::INFORMATION,
1907 message: info.message.clone(),
1908 group_id,
1909 is_primary: false,
1910 is_valid: true,
1911 is_disk_based,
1912 is_unnecessary: false,
1913 },
1914 });
1915 }
1916 }
1917 }
1918 }
1919 }
1920
1921 for entry in &mut diagnostics {
1922 let diagnostic = &mut entry.diagnostic;
1923 if !diagnostic.is_primary {
1924 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1925 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1926 source,
1927 diagnostic.code.clone(),
1928 entry.range.clone(),
1929 )) {
1930 if let Some(severity) = severity {
1931 diagnostic.severity = severity;
1932 }
1933 diagnostic.is_unnecessary = is_unnecessary;
1934 }
1935 }
1936 }
1937
1938 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1939 Ok(())
1940 }
1941
1942 pub fn update_diagnostic_entries(
1943 &mut self,
1944 abs_path: PathBuf,
1945 version: Option<i32>,
1946 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1947 cx: &mut ModelContext<Project>,
1948 ) -> Result<(), anyhow::Error> {
1949 let (worktree, relative_path) = self
1950 .find_local_worktree(&abs_path, cx)
1951 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1952 if !worktree.read(cx).is_visible() {
1953 return Ok(());
1954 }
1955
1956 let project_path = ProjectPath {
1957 worktree_id: worktree.read(cx).id(),
1958 path: relative_path.into(),
1959 };
1960 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
1961 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1962 }
1963
1964 let updated = worktree.update(cx, |worktree, cx| {
1965 worktree
1966 .as_local_mut()
1967 .ok_or_else(|| anyhow!("not a local worktree"))?
1968 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1969 })?;
1970 if updated {
1971 cx.emit(Event::DiagnosticsUpdated(project_path));
1972 }
1973 Ok(())
1974 }
1975
1976 fn update_buffer_diagnostics(
1977 &mut self,
1978 buffer: &ModelHandle<Buffer>,
1979 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1980 version: Option<i32>,
1981 cx: &mut ModelContext<Self>,
1982 ) -> Result<()> {
1983 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1984 Ordering::Equal
1985 .then_with(|| b.is_primary.cmp(&a.is_primary))
1986 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1987 .then_with(|| a.severity.cmp(&b.severity))
1988 .then_with(|| a.message.cmp(&b.message))
1989 }
1990
1991 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1992
1993 diagnostics.sort_unstable_by(|a, b| {
1994 Ordering::Equal
1995 .then_with(|| a.range.start.cmp(&b.range.start))
1996 .then_with(|| b.range.end.cmp(&a.range.end))
1997 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1998 });
1999
2000 let mut sanitized_diagnostics = Vec::new();
2001 let edits_since_save = Patch::new(
2002 snapshot
2003 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2004 .collect(),
2005 );
2006 for entry in diagnostics {
2007 let start;
2008 let end;
2009 if entry.diagnostic.is_disk_based {
2010 // Some diagnostics are based on files on disk instead of buffers'
2011 // current contents. Adjust these diagnostics' ranges to reflect
2012 // any unsaved edits.
2013 start = edits_since_save.old_to_new(entry.range.start);
2014 end = edits_since_save.old_to_new(entry.range.end);
2015 } else {
2016 start = entry.range.start;
2017 end = entry.range.end;
2018 }
2019
2020 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2021 ..snapshot.clip_point_utf16(end, Bias::Right);
2022
2023 // Expand empty ranges by one character
2024 if range.start == range.end {
2025 range.end.column += 1;
2026 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2027 if range.start == range.end && range.end.column > 0 {
2028 range.start.column -= 1;
2029 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2030 }
2031 }
2032
2033 sanitized_diagnostics.push(DiagnosticEntry {
2034 range,
2035 diagnostic: entry.diagnostic,
2036 });
2037 }
2038 drop(edits_since_save);
2039
2040 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2041 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2042 Ok(())
2043 }
2044
2045 pub fn reload_buffers(
2046 &self,
2047 buffers: HashSet<ModelHandle<Buffer>>,
2048 push_to_history: bool,
2049 cx: &mut ModelContext<Self>,
2050 ) -> Task<Result<ProjectTransaction>> {
2051 let mut local_buffers = Vec::new();
2052 let mut remote_buffers = None;
2053 for buffer_handle in buffers {
2054 let buffer = buffer_handle.read(cx);
2055 if buffer.is_dirty() {
2056 if let Some(file) = File::from_dyn(buffer.file()) {
2057 if file.is_local() {
2058 local_buffers.push(buffer_handle);
2059 } else {
2060 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2061 }
2062 }
2063 }
2064 }
2065
2066 let remote_buffers = self.remote_id().zip(remote_buffers);
2067 let client = self.client.clone();
2068
2069 cx.spawn(|this, mut cx| async move {
2070 let mut project_transaction = ProjectTransaction::default();
2071
2072 if let Some((project_id, remote_buffers)) = remote_buffers {
2073 let response = client
2074 .request(proto::ReloadBuffers {
2075 project_id,
2076 buffer_ids: remote_buffers
2077 .iter()
2078 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2079 .collect(),
2080 })
2081 .await?
2082 .transaction
2083 .ok_or_else(|| anyhow!("missing transaction"))?;
2084 project_transaction = this
2085 .update(&mut cx, |this, cx| {
2086 this.deserialize_project_transaction(response, push_to_history, cx)
2087 })
2088 .await?;
2089 }
2090
2091 for buffer in local_buffers {
2092 let transaction = buffer
2093 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2094 .await?;
2095 buffer.update(&mut cx, |buffer, cx| {
2096 if let Some(transaction) = transaction {
2097 if !push_to_history {
2098 buffer.forget_transaction(transaction.id);
2099 }
2100 project_transaction.0.insert(cx.handle(), transaction);
2101 }
2102 });
2103 }
2104
2105 Ok(project_transaction)
2106 })
2107 }
2108
2109 pub fn format(
2110 &self,
2111 buffers: HashSet<ModelHandle<Buffer>>,
2112 push_to_history: bool,
2113 cx: &mut ModelContext<Project>,
2114 ) -> Task<Result<ProjectTransaction>> {
2115 let mut local_buffers = Vec::new();
2116 let mut remote_buffers = None;
2117 for buffer_handle in buffers {
2118 let buffer = buffer_handle.read(cx);
2119 if let Some(file) = File::from_dyn(buffer.file()) {
2120 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2121 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2122 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2123 }
2124 } else {
2125 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2126 }
2127 } else {
2128 return Task::ready(Ok(Default::default()));
2129 }
2130 }
2131
2132 let remote_buffers = self.remote_id().zip(remote_buffers);
2133 let client = self.client.clone();
2134
2135 cx.spawn(|this, mut cx| async move {
2136 let mut project_transaction = ProjectTransaction::default();
2137
2138 if let Some((project_id, remote_buffers)) = remote_buffers {
2139 let response = client
2140 .request(proto::FormatBuffers {
2141 project_id,
2142 buffer_ids: remote_buffers
2143 .iter()
2144 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2145 .collect(),
2146 })
2147 .await?
2148 .transaction
2149 .ok_or_else(|| anyhow!("missing transaction"))?;
2150 project_transaction = this
2151 .update(&mut cx, |this, cx| {
2152 this.deserialize_project_transaction(response, push_to_history, cx)
2153 })
2154 .await?;
2155 }
2156
2157 for (buffer, buffer_abs_path, language_server) in local_buffers {
2158 let text_document = lsp::TextDocumentIdentifier::new(
2159 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2160 );
2161 let capabilities = &language_server.capabilities();
2162 let tab_size = cx.update(|cx| {
2163 let language_name = buffer.read(cx).language().map(|language| language.name());
2164 cx.global::<Settings>().tab_size(language_name.as_deref())
2165 });
2166 let lsp_edits = if capabilities
2167 .document_formatting_provider
2168 .as_ref()
2169 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2170 {
2171 language_server
2172 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2173 text_document,
2174 options: lsp::FormattingOptions {
2175 tab_size,
2176 insert_spaces: true,
2177 insert_final_newline: Some(true),
2178 ..Default::default()
2179 },
2180 work_done_progress_params: Default::default(),
2181 })
2182 .await?
2183 } else if capabilities
2184 .document_range_formatting_provider
2185 .as_ref()
2186 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2187 {
2188 let buffer_start = lsp::Position::new(0, 0);
2189 let buffer_end =
2190 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2191 language_server
2192 .request::<lsp::request::RangeFormatting>(
2193 lsp::DocumentRangeFormattingParams {
2194 text_document,
2195 range: lsp::Range::new(buffer_start, buffer_end),
2196 options: lsp::FormattingOptions {
2197 tab_size: 4,
2198 insert_spaces: true,
2199 insert_final_newline: Some(true),
2200 ..Default::default()
2201 },
2202 work_done_progress_params: Default::default(),
2203 },
2204 )
2205 .await?
2206 } else {
2207 continue;
2208 };
2209
2210 if let Some(lsp_edits) = lsp_edits {
2211 let edits = this
2212 .update(&mut cx, |this, cx| {
2213 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2214 })
2215 .await?;
2216 buffer.update(&mut cx, |buffer, cx| {
2217 buffer.finalize_last_transaction();
2218 buffer.start_transaction();
2219 for (range, text) in edits {
2220 buffer.edit([range], text, cx);
2221 }
2222 if buffer.end_transaction(cx).is_some() {
2223 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2224 if !push_to_history {
2225 buffer.forget_transaction(transaction.id);
2226 }
2227 project_transaction.0.insert(cx.handle(), transaction);
2228 }
2229 });
2230 }
2231 }
2232
2233 Ok(project_transaction)
2234 })
2235 }
2236
2237 pub fn definition<T: ToPointUtf16>(
2238 &self,
2239 buffer: &ModelHandle<Buffer>,
2240 position: T,
2241 cx: &mut ModelContext<Self>,
2242 ) -> Task<Result<Vec<Location>>> {
2243 let position = position.to_point_utf16(buffer.read(cx));
2244 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2245 }
2246
2247 pub fn references<T: ToPointUtf16>(
2248 &self,
2249 buffer: &ModelHandle<Buffer>,
2250 position: T,
2251 cx: &mut ModelContext<Self>,
2252 ) -> Task<Result<Vec<Location>>> {
2253 let position = position.to_point_utf16(buffer.read(cx));
2254 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2255 }
2256
2257 pub fn document_highlights<T: ToPointUtf16>(
2258 &self,
2259 buffer: &ModelHandle<Buffer>,
2260 position: T,
2261 cx: &mut ModelContext<Self>,
2262 ) -> Task<Result<Vec<DocumentHighlight>>> {
2263 let position = position.to_point_utf16(buffer.read(cx));
2264
2265 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2266 }
2267
2268 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2269 if self.is_local() {
2270 let mut requests = Vec::new();
2271 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2272 let worktree_id = *worktree_id;
2273 if let Some(worktree) = self
2274 .worktree_for_id(worktree_id, cx)
2275 .and_then(|worktree| worktree.read(cx).as_local())
2276 {
2277 let lsp_adapter = lsp_adapter.clone();
2278 let worktree_abs_path = worktree.abs_path().clone();
2279 requests.push(
2280 language_server
2281 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2282 query: query.to_string(),
2283 ..Default::default()
2284 })
2285 .log_err()
2286 .map(move |response| {
2287 (
2288 lsp_adapter,
2289 worktree_id,
2290 worktree_abs_path,
2291 response.unwrap_or_default(),
2292 )
2293 }),
2294 );
2295 }
2296 }
2297
2298 cx.spawn_weak(|this, cx| async move {
2299 let responses = futures::future::join_all(requests).await;
2300 let this = if let Some(this) = this.upgrade(&cx) {
2301 this
2302 } else {
2303 return Ok(Default::default());
2304 };
2305 this.read_with(&cx, |this, cx| {
2306 let mut symbols = Vec::new();
2307 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2308 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2309 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2310 let mut worktree_id = source_worktree_id;
2311 let path;
2312 if let Some((worktree, rel_path)) =
2313 this.find_local_worktree(&abs_path, cx)
2314 {
2315 worktree_id = worktree.read(cx).id();
2316 path = rel_path;
2317 } else {
2318 path = relativize_path(&worktree_abs_path, &abs_path);
2319 }
2320
2321 let label = this
2322 .languages
2323 .select_language(&path)
2324 .and_then(|language| {
2325 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2326 })
2327 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2328 let signature = this.symbol_signature(worktree_id, &path);
2329
2330 Some(Symbol {
2331 source_worktree_id,
2332 worktree_id,
2333 language_server_name: adapter.name(),
2334 name: lsp_symbol.name,
2335 kind: lsp_symbol.kind,
2336 label,
2337 path,
2338 range: range_from_lsp(lsp_symbol.location.range),
2339 signature,
2340 })
2341 }));
2342 }
2343 Ok(symbols)
2344 })
2345 })
2346 } else if let Some(project_id) = self.remote_id() {
2347 let request = self.client.request(proto::GetProjectSymbols {
2348 project_id,
2349 query: query.to_string(),
2350 });
2351 cx.spawn_weak(|this, cx| async move {
2352 let response = request.await?;
2353 let mut symbols = Vec::new();
2354 if let Some(this) = this.upgrade(&cx) {
2355 this.read_with(&cx, |this, _| {
2356 symbols.extend(
2357 response
2358 .symbols
2359 .into_iter()
2360 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2361 );
2362 })
2363 }
2364 Ok(symbols)
2365 })
2366 } else {
2367 Task::ready(Ok(Default::default()))
2368 }
2369 }
2370
2371 pub fn open_buffer_for_symbol(
2372 &mut self,
2373 symbol: &Symbol,
2374 cx: &mut ModelContext<Self>,
2375 ) -> Task<Result<ModelHandle<Buffer>>> {
2376 if self.is_local() {
2377 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2378 symbol.source_worktree_id,
2379 symbol.language_server_name.clone(),
2380 )) {
2381 server.clone()
2382 } else {
2383 return Task::ready(Err(anyhow!(
2384 "language server for worktree and language not found"
2385 )));
2386 };
2387
2388 let worktree_abs_path = if let Some(worktree_abs_path) = self
2389 .worktree_for_id(symbol.worktree_id, cx)
2390 .and_then(|worktree| worktree.read(cx).as_local())
2391 .map(|local_worktree| local_worktree.abs_path())
2392 {
2393 worktree_abs_path
2394 } else {
2395 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2396 };
2397 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2398 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2399 uri
2400 } else {
2401 return Task::ready(Err(anyhow!("invalid symbol path")));
2402 };
2403
2404 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2405 } else if let Some(project_id) = self.remote_id() {
2406 let request = self.client.request(proto::OpenBufferForSymbol {
2407 project_id,
2408 symbol: Some(serialize_symbol(symbol)),
2409 });
2410 cx.spawn(|this, mut cx| async move {
2411 let response = request.await?;
2412 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2413 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2414 .await
2415 })
2416 } else {
2417 Task::ready(Err(anyhow!("project does not have a remote id")))
2418 }
2419 }
2420
2421 pub fn completions<T: ToPointUtf16>(
2422 &self,
2423 source_buffer_handle: &ModelHandle<Buffer>,
2424 position: T,
2425 cx: &mut ModelContext<Self>,
2426 ) -> Task<Result<Vec<Completion>>> {
2427 let source_buffer_handle = source_buffer_handle.clone();
2428 let source_buffer = source_buffer_handle.read(cx);
2429 let buffer_id = source_buffer.remote_id();
2430 let language = source_buffer.language().cloned();
2431 let worktree;
2432 let buffer_abs_path;
2433 if let Some(file) = File::from_dyn(source_buffer.file()) {
2434 worktree = file.worktree.clone();
2435 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2436 } else {
2437 return Task::ready(Ok(Default::default()));
2438 };
2439
2440 let position = position.to_point_utf16(source_buffer);
2441 let anchor = source_buffer.anchor_after(position);
2442
2443 if worktree.read(cx).as_local().is_some() {
2444 let buffer_abs_path = buffer_abs_path.unwrap();
2445 let (_, lang_server) =
2446 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2447 server.clone()
2448 } else {
2449 return Task::ready(Ok(Default::default()));
2450 };
2451
2452 cx.spawn(|_, cx| async move {
2453 let completions = lang_server
2454 .request::<lsp::request::Completion>(lsp::CompletionParams {
2455 text_document_position: lsp::TextDocumentPositionParams::new(
2456 lsp::TextDocumentIdentifier::new(
2457 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2458 ),
2459 point_to_lsp(position),
2460 ),
2461 context: Default::default(),
2462 work_done_progress_params: Default::default(),
2463 partial_result_params: Default::default(),
2464 })
2465 .await
2466 .context("lsp completion request failed")?;
2467
2468 let completions = if let Some(completions) = completions {
2469 match completions {
2470 lsp::CompletionResponse::Array(completions) => completions,
2471 lsp::CompletionResponse::List(list) => list.items,
2472 }
2473 } else {
2474 Default::default()
2475 };
2476
2477 source_buffer_handle.read_with(&cx, |this, _| {
2478 Ok(completions
2479 .into_iter()
2480 .filter_map(|lsp_completion| {
2481 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2482 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2483 (range_from_lsp(edit.range), edit.new_text.clone())
2484 }
2485 None => {
2486 let clipped_position =
2487 this.clip_point_utf16(position, Bias::Left);
2488 if position != clipped_position {
2489 log::info!("completion out of expected range");
2490 return None;
2491 }
2492 (
2493 this.common_prefix_at(
2494 clipped_position,
2495 &lsp_completion.label,
2496 ),
2497 lsp_completion.label.clone(),
2498 )
2499 }
2500 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2501 log::info!("unsupported insert/replace completion");
2502 return None;
2503 }
2504 };
2505
2506 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2507 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2508 if clipped_start == old_range.start && clipped_end == old_range.end {
2509 Some(Completion {
2510 old_range: this.anchor_before(old_range.start)
2511 ..this.anchor_after(old_range.end),
2512 new_text,
2513 label: language
2514 .as_ref()
2515 .and_then(|l| l.label_for_completion(&lsp_completion))
2516 .unwrap_or_else(|| {
2517 CodeLabel::plain(
2518 lsp_completion.label.clone(),
2519 lsp_completion.filter_text.as_deref(),
2520 )
2521 }),
2522 lsp_completion,
2523 })
2524 } else {
2525 log::info!("completion out of expected range");
2526 None
2527 }
2528 })
2529 .collect())
2530 })
2531 })
2532 } else if let Some(project_id) = self.remote_id() {
2533 let rpc = self.client.clone();
2534 let message = proto::GetCompletions {
2535 project_id,
2536 buffer_id,
2537 position: Some(language::proto::serialize_anchor(&anchor)),
2538 version: serialize_version(&source_buffer.version()),
2539 };
2540 cx.spawn_weak(|_, mut cx| async move {
2541 let response = rpc.request(message).await?;
2542
2543 source_buffer_handle
2544 .update(&mut cx, |buffer, _| {
2545 buffer.wait_for_version(deserialize_version(response.version))
2546 })
2547 .await;
2548
2549 response
2550 .completions
2551 .into_iter()
2552 .map(|completion| {
2553 language::proto::deserialize_completion(completion, language.as_ref())
2554 })
2555 .collect()
2556 })
2557 } else {
2558 Task::ready(Ok(Default::default()))
2559 }
2560 }
2561
2562 pub fn apply_additional_edits_for_completion(
2563 &self,
2564 buffer_handle: ModelHandle<Buffer>,
2565 completion: Completion,
2566 push_to_history: bool,
2567 cx: &mut ModelContext<Self>,
2568 ) -> Task<Result<Option<Transaction>>> {
2569 let buffer = buffer_handle.read(cx);
2570 let buffer_id = buffer.remote_id();
2571
2572 if self.is_local() {
2573 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2574 {
2575 server.clone()
2576 } else {
2577 return Task::ready(Ok(Default::default()));
2578 };
2579
2580 cx.spawn(|this, mut cx| async move {
2581 let resolved_completion = lang_server
2582 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2583 .await?;
2584 if let Some(edits) = resolved_completion.additional_text_edits {
2585 let edits = this
2586 .update(&mut cx, |this, cx| {
2587 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2588 })
2589 .await?;
2590 buffer_handle.update(&mut cx, |buffer, cx| {
2591 buffer.finalize_last_transaction();
2592 buffer.start_transaction();
2593 for (range, text) in edits {
2594 buffer.edit([range], text, cx);
2595 }
2596 let transaction = if buffer.end_transaction(cx).is_some() {
2597 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2598 if !push_to_history {
2599 buffer.forget_transaction(transaction.id);
2600 }
2601 Some(transaction)
2602 } else {
2603 None
2604 };
2605 Ok(transaction)
2606 })
2607 } else {
2608 Ok(None)
2609 }
2610 })
2611 } else if let Some(project_id) = self.remote_id() {
2612 let client = self.client.clone();
2613 cx.spawn(|_, mut cx| async move {
2614 let response = client
2615 .request(proto::ApplyCompletionAdditionalEdits {
2616 project_id,
2617 buffer_id,
2618 completion: Some(language::proto::serialize_completion(&completion)),
2619 })
2620 .await?;
2621
2622 if let Some(transaction) = response.transaction {
2623 let transaction = language::proto::deserialize_transaction(transaction)?;
2624 buffer_handle
2625 .update(&mut cx, |buffer, _| {
2626 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2627 })
2628 .await;
2629 if push_to_history {
2630 buffer_handle.update(&mut cx, |buffer, _| {
2631 buffer.push_transaction(transaction.clone(), Instant::now());
2632 });
2633 }
2634 Ok(Some(transaction))
2635 } else {
2636 Ok(None)
2637 }
2638 })
2639 } else {
2640 Task::ready(Err(anyhow!("project does not have a remote id")))
2641 }
2642 }
2643
2644 pub fn code_actions<T: Clone + ToOffset>(
2645 &self,
2646 buffer_handle: &ModelHandle<Buffer>,
2647 range: Range<T>,
2648 cx: &mut ModelContext<Self>,
2649 ) -> Task<Result<Vec<CodeAction>>> {
2650 let buffer_handle = buffer_handle.clone();
2651 let buffer = buffer_handle.read(cx);
2652 let snapshot = buffer.snapshot();
2653 let relevant_diagnostics = snapshot
2654 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2655 .map(|entry| entry.to_lsp_diagnostic_stub())
2656 .collect();
2657 let buffer_id = buffer.remote_id();
2658 let worktree;
2659 let buffer_abs_path;
2660 if let Some(file) = File::from_dyn(buffer.file()) {
2661 worktree = file.worktree.clone();
2662 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2663 } else {
2664 return Task::ready(Ok(Default::default()));
2665 };
2666 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2667
2668 if worktree.read(cx).as_local().is_some() {
2669 let buffer_abs_path = buffer_abs_path.unwrap();
2670 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2671 {
2672 server.clone()
2673 } else {
2674 return Task::ready(Ok(Default::default()));
2675 };
2676
2677 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2678 cx.foreground().spawn(async move {
2679 if !lang_server.capabilities().code_action_provider.is_some() {
2680 return Ok(Default::default());
2681 }
2682
2683 Ok(lang_server
2684 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2685 text_document: lsp::TextDocumentIdentifier::new(
2686 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2687 ),
2688 range: lsp_range,
2689 work_done_progress_params: Default::default(),
2690 partial_result_params: Default::default(),
2691 context: lsp::CodeActionContext {
2692 diagnostics: relevant_diagnostics,
2693 only: Some(vec![
2694 lsp::CodeActionKind::QUICKFIX,
2695 lsp::CodeActionKind::REFACTOR,
2696 lsp::CodeActionKind::REFACTOR_EXTRACT,
2697 lsp::CodeActionKind::SOURCE,
2698 ]),
2699 },
2700 })
2701 .await?
2702 .unwrap_or_default()
2703 .into_iter()
2704 .filter_map(|entry| {
2705 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2706 Some(CodeAction {
2707 range: range.clone(),
2708 lsp_action,
2709 })
2710 } else {
2711 None
2712 }
2713 })
2714 .collect())
2715 })
2716 } else if let Some(project_id) = self.remote_id() {
2717 let rpc = self.client.clone();
2718 let version = buffer.version();
2719 cx.spawn_weak(|_, mut cx| async move {
2720 let response = rpc
2721 .request(proto::GetCodeActions {
2722 project_id,
2723 buffer_id,
2724 start: Some(language::proto::serialize_anchor(&range.start)),
2725 end: Some(language::proto::serialize_anchor(&range.end)),
2726 version: serialize_version(&version),
2727 })
2728 .await?;
2729
2730 buffer_handle
2731 .update(&mut cx, |buffer, _| {
2732 buffer.wait_for_version(deserialize_version(response.version))
2733 })
2734 .await;
2735
2736 response
2737 .actions
2738 .into_iter()
2739 .map(language::proto::deserialize_code_action)
2740 .collect()
2741 })
2742 } else {
2743 Task::ready(Ok(Default::default()))
2744 }
2745 }
2746
2747 pub fn apply_code_action(
2748 &self,
2749 buffer_handle: ModelHandle<Buffer>,
2750 mut action: CodeAction,
2751 push_to_history: bool,
2752 cx: &mut ModelContext<Self>,
2753 ) -> Task<Result<ProjectTransaction>> {
2754 if self.is_local() {
2755 let buffer = buffer_handle.read(cx);
2756 let (lsp_adapter, lang_server) =
2757 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2758 server.clone()
2759 } else {
2760 return Task::ready(Ok(Default::default()));
2761 };
2762 let range = action.range.to_point_utf16(buffer);
2763
2764 cx.spawn(|this, mut cx| async move {
2765 if let Some(lsp_range) = action
2766 .lsp_action
2767 .data
2768 .as_mut()
2769 .and_then(|d| d.get_mut("codeActionParams"))
2770 .and_then(|d| d.get_mut("range"))
2771 {
2772 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2773 action.lsp_action = lang_server
2774 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2775 .await?;
2776 } else {
2777 let actions = this
2778 .update(&mut cx, |this, cx| {
2779 this.code_actions(&buffer_handle, action.range, cx)
2780 })
2781 .await?;
2782 action.lsp_action = actions
2783 .into_iter()
2784 .find(|a| a.lsp_action.title == action.lsp_action.title)
2785 .ok_or_else(|| anyhow!("code action is outdated"))?
2786 .lsp_action;
2787 }
2788
2789 if let Some(edit) = action.lsp_action.edit {
2790 Self::deserialize_workspace_edit(
2791 this,
2792 edit,
2793 push_to_history,
2794 lsp_adapter,
2795 lang_server,
2796 &mut cx,
2797 )
2798 .await
2799 } else if let Some(command) = action.lsp_action.command {
2800 this.update(&mut cx, |this, _| {
2801 this.last_workspace_edits_by_language_server
2802 .remove(&lang_server.server_id());
2803 });
2804 lang_server
2805 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2806 command: command.command,
2807 arguments: command.arguments.unwrap_or_default(),
2808 ..Default::default()
2809 })
2810 .await?;
2811 Ok(this.update(&mut cx, |this, _| {
2812 this.last_workspace_edits_by_language_server
2813 .remove(&lang_server.server_id())
2814 .unwrap_or_default()
2815 }))
2816 } else {
2817 Ok(ProjectTransaction::default())
2818 }
2819 })
2820 } else if let Some(project_id) = self.remote_id() {
2821 let client = self.client.clone();
2822 let request = proto::ApplyCodeAction {
2823 project_id,
2824 buffer_id: buffer_handle.read(cx).remote_id(),
2825 action: Some(language::proto::serialize_code_action(&action)),
2826 };
2827 cx.spawn(|this, mut cx| async move {
2828 let response = client
2829 .request(request)
2830 .await?
2831 .transaction
2832 .ok_or_else(|| anyhow!("missing transaction"))?;
2833 this.update(&mut cx, |this, cx| {
2834 this.deserialize_project_transaction(response, push_to_history, cx)
2835 })
2836 .await
2837 })
2838 } else {
2839 Task::ready(Err(anyhow!("project does not have a remote id")))
2840 }
2841 }
2842
2843 async fn deserialize_workspace_edit(
2844 this: ModelHandle<Self>,
2845 edit: lsp::WorkspaceEdit,
2846 push_to_history: bool,
2847 lsp_adapter: Arc<dyn LspAdapter>,
2848 language_server: Arc<LanguageServer>,
2849 cx: &mut AsyncAppContext,
2850 ) -> Result<ProjectTransaction> {
2851 let fs = this.read_with(cx, |this, _| this.fs.clone());
2852 let mut operations = Vec::new();
2853 if let Some(document_changes) = edit.document_changes {
2854 match document_changes {
2855 lsp::DocumentChanges::Edits(edits) => {
2856 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2857 }
2858 lsp::DocumentChanges::Operations(ops) => operations = ops,
2859 }
2860 } else if let Some(changes) = edit.changes {
2861 operations.extend(changes.into_iter().map(|(uri, edits)| {
2862 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2863 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2864 uri,
2865 version: None,
2866 },
2867 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2868 })
2869 }));
2870 }
2871
2872 let mut project_transaction = ProjectTransaction::default();
2873 for operation in operations {
2874 match operation {
2875 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2876 let abs_path = op
2877 .uri
2878 .to_file_path()
2879 .map_err(|_| anyhow!("can't convert URI to path"))?;
2880
2881 if let Some(parent_path) = abs_path.parent() {
2882 fs.create_dir(parent_path).await?;
2883 }
2884 if abs_path.ends_with("/") {
2885 fs.create_dir(&abs_path).await?;
2886 } else {
2887 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2888 .await?;
2889 }
2890 }
2891 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2892 let source_abs_path = op
2893 .old_uri
2894 .to_file_path()
2895 .map_err(|_| anyhow!("can't convert URI to path"))?;
2896 let target_abs_path = op
2897 .new_uri
2898 .to_file_path()
2899 .map_err(|_| anyhow!("can't convert URI to path"))?;
2900 fs.rename(
2901 &source_abs_path,
2902 &target_abs_path,
2903 op.options.map(Into::into).unwrap_or_default(),
2904 )
2905 .await?;
2906 }
2907 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2908 let abs_path = op
2909 .uri
2910 .to_file_path()
2911 .map_err(|_| anyhow!("can't convert URI to path"))?;
2912 let options = op.options.map(Into::into).unwrap_or_default();
2913 if abs_path.ends_with("/") {
2914 fs.remove_dir(&abs_path, options).await?;
2915 } else {
2916 fs.remove_file(&abs_path, options).await?;
2917 }
2918 }
2919 lsp::DocumentChangeOperation::Edit(op) => {
2920 let buffer_to_edit = this
2921 .update(cx, |this, cx| {
2922 this.open_local_buffer_via_lsp(
2923 op.text_document.uri,
2924 lsp_adapter.clone(),
2925 language_server.clone(),
2926 cx,
2927 )
2928 })
2929 .await?;
2930
2931 let edits = this
2932 .update(cx, |this, cx| {
2933 let edits = op.edits.into_iter().map(|edit| match edit {
2934 lsp::OneOf::Left(edit) => edit,
2935 lsp::OneOf::Right(edit) => edit.text_edit,
2936 });
2937 this.edits_from_lsp(
2938 &buffer_to_edit,
2939 edits,
2940 op.text_document.version,
2941 cx,
2942 )
2943 })
2944 .await?;
2945
2946 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2947 buffer.finalize_last_transaction();
2948 buffer.start_transaction();
2949 for (range, text) in edits {
2950 buffer.edit([range], text, cx);
2951 }
2952 let transaction = if buffer.end_transaction(cx).is_some() {
2953 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2954 if !push_to_history {
2955 buffer.forget_transaction(transaction.id);
2956 }
2957 Some(transaction)
2958 } else {
2959 None
2960 };
2961
2962 transaction
2963 });
2964 if let Some(transaction) = transaction {
2965 project_transaction.0.insert(buffer_to_edit, transaction);
2966 }
2967 }
2968 }
2969 }
2970
2971 Ok(project_transaction)
2972 }
2973
2974 pub fn prepare_rename<T: ToPointUtf16>(
2975 &self,
2976 buffer: ModelHandle<Buffer>,
2977 position: T,
2978 cx: &mut ModelContext<Self>,
2979 ) -> Task<Result<Option<Range<Anchor>>>> {
2980 let position = position.to_point_utf16(buffer.read(cx));
2981 self.request_lsp(buffer, PrepareRename { position }, cx)
2982 }
2983
2984 pub fn perform_rename<T: ToPointUtf16>(
2985 &self,
2986 buffer: ModelHandle<Buffer>,
2987 position: T,
2988 new_name: String,
2989 push_to_history: bool,
2990 cx: &mut ModelContext<Self>,
2991 ) -> Task<Result<ProjectTransaction>> {
2992 let position = position.to_point_utf16(buffer.read(cx));
2993 self.request_lsp(
2994 buffer,
2995 PerformRename {
2996 position,
2997 new_name,
2998 push_to_history,
2999 },
3000 cx,
3001 )
3002 }
3003
3004 pub fn search(
3005 &self,
3006 query: SearchQuery,
3007 cx: &mut ModelContext<Self>,
3008 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3009 if self.is_local() {
3010 let snapshots = self
3011 .visible_worktrees(cx)
3012 .filter_map(|tree| {
3013 let tree = tree.read(cx).as_local()?;
3014 Some(tree.snapshot())
3015 })
3016 .collect::<Vec<_>>();
3017
3018 let background = cx.background().clone();
3019 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3020 if path_count == 0 {
3021 return Task::ready(Ok(Default::default()));
3022 }
3023 let workers = background.num_cpus().min(path_count);
3024 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3025 cx.background()
3026 .spawn({
3027 let fs = self.fs.clone();
3028 let background = cx.background().clone();
3029 let query = query.clone();
3030 async move {
3031 let fs = &fs;
3032 let query = &query;
3033 let matching_paths_tx = &matching_paths_tx;
3034 let paths_per_worker = (path_count + workers - 1) / workers;
3035 let snapshots = &snapshots;
3036 background
3037 .scoped(|scope| {
3038 for worker_ix in 0..workers {
3039 let worker_start_ix = worker_ix * paths_per_worker;
3040 let worker_end_ix = worker_start_ix + paths_per_worker;
3041 scope.spawn(async move {
3042 let mut snapshot_start_ix = 0;
3043 let mut abs_path = PathBuf::new();
3044 for snapshot in snapshots {
3045 let snapshot_end_ix =
3046 snapshot_start_ix + snapshot.visible_file_count();
3047 if worker_end_ix <= snapshot_start_ix {
3048 break;
3049 } else if worker_start_ix > snapshot_end_ix {
3050 snapshot_start_ix = snapshot_end_ix;
3051 continue;
3052 } else {
3053 let start_in_snapshot = worker_start_ix
3054 .saturating_sub(snapshot_start_ix);
3055 let end_in_snapshot =
3056 cmp::min(worker_end_ix, snapshot_end_ix)
3057 - snapshot_start_ix;
3058
3059 for entry in snapshot
3060 .files(false, start_in_snapshot)
3061 .take(end_in_snapshot - start_in_snapshot)
3062 {
3063 if matching_paths_tx.is_closed() {
3064 break;
3065 }
3066
3067 abs_path.clear();
3068 abs_path.push(&snapshot.abs_path());
3069 abs_path.push(&entry.path);
3070 let matches = if let Some(file) =
3071 fs.open_sync(&abs_path).await.log_err()
3072 {
3073 query.detect(file).unwrap_or(false)
3074 } else {
3075 false
3076 };
3077
3078 if matches {
3079 let project_path =
3080 (snapshot.id(), entry.path.clone());
3081 if matching_paths_tx
3082 .send(project_path)
3083 .await
3084 .is_err()
3085 {
3086 break;
3087 }
3088 }
3089 }
3090
3091 snapshot_start_ix = snapshot_end_ix;
3092 }
3093 }
3094 });
3095 }
3096 })
3097 .await;
3098 }
3099 })
3100 .detach();
3101
3102 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3103 let open_buffers = self
3104 .opened_buffers
3105 .values()
3106 .filter_map(|b| b.upgrade(cx))
3107 .collect::<HashSet<_>>();
3108 cx.spawn(|this, cx| async move {
3109 for buffer in &open_buffers {
3110 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3111 buffers_tx.send((buffer.clone(), snapshot)).await?;
3112 }
3113
3114 let open_buffers = Rc::new(RefCell::new(open_buffers));
3115 while let Some(project_path) = matching_paths_rx.next().await {
3116 if buffers_tx.is_closed() {
3117 break;
3118 }
3119
3120 let this = this.clone();
3121 let open_buffers = open_buffers.clone();
3122 let buffers_tx = buffers_tx.clone();
3123 cx.spawn(|mut cx| async move {
3124 if let Some(buffer) = this
3125 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3126 .await
3127 .log_err()
3128 {
3129 if open_buffers.borrow_mut().insert(buffer.clone()) {
3130 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3131 buffers_tx.send((buffer, snapshot)).await?;
3132 }
3133 }
3134
3135 Ok::<_, anyhow::Error>(())
3136 })
3137 .detach();
3138 }
3139
3140 Ok::<_, anyhow::Error>(())
3141 })
3142 .detach_and_log_err(cx);
3143
3144 let background = cx.background().clone();
3145 cx.background().spawn(async move {
3146 let query = &query;
3147 let mut matched_buffers = Vec::new();
3148 for _ in 0..workers {
3149 matched_buffers.push(HashMap::default());
3150 }
3151 background
3152 .scoped(|scope| {
3153 for worker_matched_buffers in matched_buffers.iter_mut() {
3154 let mut buffers_rx = buffers_rx.clone();
3155 scope.spawn(async move {
3156 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3157 let buffer_matches = query
3158 .search(snapshot.as_rope())
3159 .await
3160 .iter()
3161 .map(|range| {
3162 snapshot.anchor_before(range.start)
3163 ..snapshot.anchor_after(range.end)
3164 })
3165 .collect::<Vec<_>>();
3166 if !buffer_matches.is_empty() {
3167 worker_matched_buffers
3168 .insert(buffer.clone(), buffer_matches);
3169 }
3170 }
3171 });
3172 }
3173 })
3174 .await;
3175 Ok(matched_buffers.into_iter().flatten().collect())
3176 })
3177 } else if let Some(project_id) = self.remote_id() {
3178 let request = self.client.request(query.to_proto(project_id));
3179 cx.spawn(|this, mut cx| async move {
3180 let response = request.await?;
3181 let mut result = HashMap::default();
3182 for location in response.locations {
3183 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3184 let target_buffer = this
3185 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3186 .await?;
3187 let start = location
3188 .start
3189 .and_then(deserialize_anchor)
3190 .ok_or_else(|| anyhow!("missing target start"))?;
3191 let end = location
3192 .end
3193 .and_then(deserialize_anchor)
3194 .ok_or_else(|| anyhow!("missing target end"))?;
3195 result
3196 .entry(target_buffer)
3197 .or_insert(Vec::new())
3198 .push(start..end)
3199 }
3200 Ok(result)
3201 })
3202 } else {
3203 Task::ready(Ok(Default::default()))
3204 }
3205 }
3206
3207 fn request_lsp<R: LspCommand>(
3208 &self,
3209 buffer_handle: ModelHandle<Buffer>,
3210 request: R,
3211 cx: &mut ModelContext<Self>,
3212 ) -> Task<Result<R::Response>>
3213 where
3214 <R::LspRequest as lsp::request::Request>::Result: Send,
3215 {
3216 let buffer = buffer_handle.read(cx);
3217 if self.is_local() {
3218 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3219 if let Some((file, (_, language_server))) =
3220 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3221 {
3222 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3223 return cx.spawn(|this, cx| async move {
3224 if !request.check_capabilities(&language_server.capabilities()) {
3225 return Ok(Default::default());
3226 }
3227
3228 let response = language_server
3229 .request::<R::LspRequest>(lsp_params)
3230 .await
3231 .context("lsp request failed")?;
3232 request
3233 .response_from_lsp(response, this, buffer_handle, cx)
3234 .await
3235 });
3236 }
3237 } else if let Some(project_id) = self.remote_id() {
3238 let rpc = self.client.clone();
3239 let message = request.to_proto(project_id, buffer);
3240 return cx.spawn(|this, cx| async move {
3241 let response = rpc.request(message).await?;
3242 request
3243 .response_from_proto(response, this, buffer_handle, cx)
3244 .await
3245 });
3246 }
3247 Task::ready(Ok(Default::default()))
3248 }
3249
3250 pub fn find_or_create_local_worktree(
3251 &mut self,
3252 abs_path: impl AsRef<Path>,
3253 visible: bool,
3254 cx: &mut ModelContext<Self>,
3255 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3256 let abs_path = abs_path.as_ref();
3257 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3258 Task::ready(Ok((tree.clone(), relative_path.into())))
3259 } else {
3260 let worktree = self.create_local_worktree(abs_path, visible, cx);
3261 cx.foreground()
3262 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3263 }
3264 }
3265
3266 pub fn find_local_worktree(
3267 &self,
3268 abs_path: &Path,
3269 cx: &AppContext,
3270 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3271 for tree in self.worktrees(cx) {
3272 if let Some(relative_path) = tree
3273 .read(cx)
3274 .as_local()
3275 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3276 {
3277 return Some((tree.clone(), relative_path.into()));
3278 }
3279 }
3280 None
3281 }
3282
3283 pub fn is_shared(&self) -> bool {
3284 match &self.client_state {
3285 ProjectClientState::Local { is_shared, .. } => *is_shared,
3286 ProjectClientState::Remote { .. } => false,
3287 }
3288 }
3289
3290 fn create_local_worktree(
3291 &mut self,
3292 abs_path: impl AsRef<Path>,
3293 visible: bool,
3294 cx: &mut ModelContext<Self>,
3295 ) -> Task<Result<ModelHandle<Worktree>>> {
3296 let fs = self.fs.clone();
3297 let client = self.client.clone();
3298 let next_entry_id = self.next_entry_id.clone();
3299 let path: Arc<Path> = abs_path.as_ref().into();
3300 let task = self
3301 .loading_local_worktrees
3302 .entry(path.clone())
3303 .or_insert_with(|| {
3304 cx.spawn(|project, mut cx| {
3305 async move {
3306 let worktree = Worktree::local(
3307 client.clone(),
3308 path.clone(),
3309 visible,
3310 fs,
3311 next_entry_id,
3312 &mut cx,
3313 )
3314 .await;
3315 project.update(&mut cx, |project, _| {
3316 project.loading_local_worktrees.remove(&path);
3317 });
3318 let worktree = worktree?;
3319
3320 let (remote_project_id, is_shared) =
3321 project.update(&mut cx, |project, cx| {
3322 project.add_worktree(&worktree, cx);
3323 (project.remote_id(), project.is_shared())
3324 });
3325
3326 if let Some(project_id) = remote_project_id {
3327 if is_shared {
3328 worktree
3329 .update(&mut cx, |worktree, cx| {
3330 worktree.as_local_mut().unwrap().share(project_id, cx)
3331 })
3332 .await?;
3333 } else {
3334 worktree
3335 .update(&mut cx, |worktree, cx| {
3336 worktree.as_local_mut().unwrap().register(project_id, cx)
3337 })
3338 .await?;
3339 }
3340 }
3341
3342 Ok(worktree)
3343 }
3344 .map_err(|err| Arc::new(err))
3345 })
3346 .shared()
3347 })
3348 .clone();
3349 cx.foreground().spawn(async move {
3350 match task.await {
3351 Ok(worktree) => Ok(worktree),
3352 Err(err) => Err(anyhow!("{}", err)),
3353 }
3354 })
3355 }
3356
3357 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3358 self.worktrees.retain(|worktree| {
3359 worktree
3360 .upgrade(cx)
3361 .map_or(false, |w| w.read(cx).id() != id)
3362 });
3363 cx.notify();
3364 }
3365
3366 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3367 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3368 if worktree.read(cx).is_local() {
3369 cx.subscribe(&worktree, |this, worktree, _, cx| {
3370 this.update_local_worktree_buffers(worktree, cx);
3371 })
3372 .detach();
3373 }
3374
3375 let push_strong_handle = {
3376 let worktree = worktree.read(cx);
3377 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3378 };
3379 if push_strong_handle {
3380 self.worktrees
3381 .push(WorktreeHandle::Strong(worktree.clone()));
3382 } else {
3383 cx.observe_release(&worktree, |this, _, cx| {
3384 this.worktrees
3385 .retain(|worktree| worktree.upgrade(cx).is_some());
3386 cx.notify();
3387 })
3388 .detach();
3389 self.worktrees
3390 .push(WorktreeHandle::Weak(worktree.downgrade()));
3391 }
3392 cx.notify();
3393 }
3394
3395 fn update_local_worktree_buffers(
3396 &mut self,
3397 worktree_handle: ModelHandle<Worktree>,
3398 cx: &mut ModelContext<Self>,
3399 ) {
3400 let snapshot = worktree_handle.read(cx).snapshot();
3401 let mut buffers_to_delete = Vec::new();
3402 let mut renamed_buffers = Vec::new();
3403 for (buffer_id, buffer) in &self.opened_buffers {
3404 if let Some(buffer) = buffer.upgrade(cx) {
3405 buffer.update(cx, |buffer, cx| {
3406 if let Some(old_file) = File::from_dyn(buffer.file()) {
3407 if old_file.worktree != worktree_handle {
3408 return;
3409 }
3410
3411 let new_file = if let Some(entry) = old_file
3412 .entry_id
3413 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3414 {
3415 File {
3416 is_local: true,
3417 entry_id: Some(entry.id),
3418 mtime: entry.mtime,
3419 path: entry.path.clone(),
3420 worktree: worktree_handle.clone(),
3421 }
3422 } else if let Some(entry) =
3423 snapshot.entry_for_path(old_file.path().as_ref())
3424 {
3425 File {
3426 is_local: true,
3427 entry_id: Some(entry.id),
3428 mtime: entry.mtime,
3429 path: entry.path.clone(),
3430 worktree: worktree_handle.clone(),
3431 }
3432 } else {
3433 File {
3434 is_local: true,
3435 entry_id: None,
3436 path: old_file.path().clone(),
3437 mtime: old_file.mtime(),
3438 worktree: worktree_handle.clone(),
3439 }
3440 };
3441
3442 let old_path = old_file.abs_path(cx);
3443 if new_file.abs_path(cx) != old_path {
3444 renamed_buffers.push((cx.handle(), old_path));
3445 }
3446
3447 if let Some(project_id) = self.remote_id() {
3448 self.client
3449 .send(proto::UpdateBufferFile {
3450 project_id,
3451 buffer_id: *buffer_id as u64,
3452 file: Some(new_file.to_proto()),
3453 })
3454 .log_err();
3455 }
3456 buffer.file_updated(Box::new(new_file), cx).detach();
3457 }
3458 });
3459 } else {
3460 buffers_to_delete.push(*buffer_id);
3461 }
3462 }
3463
3464 for buffer_id in buffers_to_delete {
3465 self.opened_buffers.remove(&buffer_id);
3466 }
3467
3468 for (buffer, old_path) in renamed_buffers {
3469 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3470 self.assign_language_to_buffer(&buffer, cx);
3471 self.register_buffer_with_language_server(&buffer, cx);
3472 }
3473 }
3474
3475 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3476 let new_active_entry = entry.and_then(|project_path| {
3477 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3478 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3479 Some(entry.id)
3480 });
3481 if new_active_entry != self.active_entry {
3482 self.active_entry = new_active_entry;
3483 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3484 }
3485 }
3486
3487 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3488 self.language_server_statuses
3489 .values()
3490 .any(|status| status.pending_diagnostic_updates > 0)
3491 }
3492
3493 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3494 let mut summary = DiagnosticSummary::default();
3495 for (_, path_summary) in self.diagnostic_summaries(cx) {
3496 summary.error_count += path_summary.error_count;
3497 summary.warning_count += path_summary.warning_count;
3498 }
3499 summary
3500 }
3501
3502 pub fn diagnostic_summaries<'a>(
3503 &'a self,
3504 cx: &'a AppContext,
3505 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3506 self.worktrees(cx).flat_map(move |worktree| {
3507 let worktree = worktree.read(cx);
3508 let worktree_id = worktree.id();
3509 worktree
3510 .diagnostic_summaries()
3511 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3512 })
3513 }
3514
3515 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3516 if self
3517 .language_server_statuses
3518 .values()
3519 .map(|status| status.pending_diagnostic_updates)
3520 .sum::<isize>()
3521 == 1
3522 {
3523 cx.emit(Event::DiskBasedDiagnosticsStarted);
3524 }
3525 }
3526
3527 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3528 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3529 if self
3530 .language_server_statuses
3531 .values()
3532 .map(|status| status.pending_diagnostic_updates)
3533 .sum::<isize>()
3534 == 0
3535 {
3536 cx.emit(Event::DiskBasedDiagnosticsFinished);
3537 }
3538 }
3539
3540 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3541 self.active_entry
3542 }
3543
3544 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3545 self.worktree_for_id(path.worktree_id, cx)?
3546 .read(cx)
3547 .entry_for_path(&path.path)
3548 .map(|entry| entry.id)
3549 }
3550
3551 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3552 let worktree = self.worktree_for_entry(entry_id, cx)?;
3553 let worktree = worktree.read(cx);
3554 let worktree_id = worktree.id();
3555 let path = worktree.entry_for_id(entry_id)?.path.clone();
3556 Some(ProjectPath { worktree_id, path })
3557 }
3558
3559 // RPC message handlers
3560
3561 async fn handle_unshare_project(
3562 this: ModelHandle<Self>,
3563 _: TypedEnvelope<proto::UnshareProject>,
3564 _: Arc<Client>,
3565 mut cx: AsyncAppContext,
3566 ) -> Result<()> {
3567 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3568 Ok(())
3569 }
3570
3571 async fn handle_add_collaborator(
3572 this: ModelHandle<Self>,
3573 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3574 _: Arc<Client>,
3575 mut cx: AsyncAppContext,
3576 ) -> Result<()> {
3577 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3578 let collaborator = envelope
3579 .payload
3580 .collaborator
3581 .take()
3582 .ok_or_else(|| anyhow!("empty collaborator"))?;
3583
3584 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3585 this.update(&mut cx, |this, cx| {
3586 this.collaborators
3587 .insert(collaborator.peer_id, collaborator);
3588 cx.notify();
3589 });
3590
3591 Ok(())
3592 }
3593
3594 async fn handle_remove_collaborator(
3595 this: ModelHandle<Self>,
3596 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3597 _: Arc<Client>,
3598 mut cx: AsyncAppContext,
3599 ) -> Result<()> {
3600 this.update(&mut cx, |this, cx| {
3601 let peer_id = PeerId(envelope.payload.peer_id);
3602 let replica_id = this
3603 .collaborators
3604 .remove(&peer_id)
3605 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3606 .replica_id;
3607 for (_, buffer) in &this.opened_buffers {
3608 if let Some(buffer) = buffer.upgrade(cx) {
3609 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3610 }
3611 }
3612 cx.emit(Event::CollaboratorLeft(peer_id));
3613 cx.notify();
3614 Ok(())
3615 })
3616 }
3617
3618 async fn handle_register_worktree(
3619 this: ModelHandle<Self>,
3620 envelope: TypedEnvelope<proto::RegisterWorktree>,
3621 client: Arc<Client>,
3622 mut cx: AsyncAppContext,
3623 ) -> Result<()> {
3624 this.update(&mut cx, |this, cx| {
3625 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3626 let replica_id = this.replica_id();
3627 let worktree = proto::Worktree {
3628 id: envelope.payload.worktree_id,
3629 root_name: envelope.payload.root_name,
3630 entries: Default::default(),
3631 diagnostic_summaries: Default::default(),
3632 visible: envelope.payload.visible,
3633 };
3634 let (worktree, load_task) =
3635 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3636 this.add_worktree(&worktree, cx);
3637 load_task.detach();
3638 Ok(())
3639 })
3640 }
3641
3642 async fn handle_unregister_worktree(
3643 this: ModelHandle<Self>,
3644 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3645 _: Arc<Client>,
3646 mut cx: AsyncAppContext,
3647 ) -> Result<()> {
3648 this.update(&mut cx, |this, cx| {
3649 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3650 this.remove_worktree(worktree_id, cx);
3651 Ok(())
3652 })
3653 }
3654
3655 async fn handle_update_worktree(
3656 this: ModelHandle<Self>,
3657 envelope: TypedEnvelope<proto::UpdateWorktree>,
3658 _: Arc<Client>,
3659 mut cx: AsyncAppContext,
3660 ) -> Result<()> {
3661 this.update(&mut cx, |this, cx| {
3662 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3663 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3664 worktree.update(cx, |worktree, _| {
3665 let worktree = worktree.as_remote_mut().unwrap();
3666 worktree.update_from_remote(envelope)
3667 })?;
3668 }
3669 Ok(())
3670 })
3671 }
3672
3673 async fn handle_update_diagnostic_summary(
3674 this: ModelHandle<Self>,
3675 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3676 _: Arc<Client>,
3677 mut cx: AsyncAppContext,
3678 ) -> Result<()> {
3679 this.update(&mut cx, |this, cx| {
3680 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3681 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3682 if let Some(summary) = envelope.payload.summary {
3683 let project_path = ProjectPath {
3684 worktree_id,
3685 path: Path::new(&summary.path).into(),
3686 };
3687 worktree.update(cx, |worktree, _| {
3688 worktree
3689 .as_remote_mut()
3690 .unwrap()
3691 .update_diagnostic_summary(project_path.path.clone(), &summary);
3692 });
3693 cx.emit(Event::DiagnosticsUpdated(project_path));
3694 }
3695 }
3696 Ok(())
3697 })
3698 }
3699
3700 async fn handle_start_language_server(
3701 this: ModelHandle<Self>,
3702 envelope: TypedEnvelope<proto::StartLanguageServer>,
3703 _: Arc<Client>,
3704 mut cx: AsyncAppContext,
3705 ) -> Result<()> {
3706 let server = envelope
3707 .payload
3708 .server
3709 .ok_or_else(|| anyhow!("invalid server"))?;
3710 this.update(&mut cx, |this, cx| {
3711 this.language_server_statuses.insert(
3712 server.id as usize,
3713 LanguageServerStatus {
3714 name: server.name,
3715 pending_work: Default::default(),
3716 pending_diagnostic_updates: 0,
3717 },
3718 );
3719 cx.notify();
3720 });
3721 Ok(())
3722 }
3723
3724 async fn handle_update_language_server(
3725 this: ModelHandle<Self>,
3726 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3727 _: Arc<Client>,
3728 mut cx: AsyncAppContext,
3729 ) -> Result<()> {
3730 let language_server_id = envelope.payload.language_server_id as usize;
3731 match envelope
3732 .payload
3733 .variant
3734 .ok_or_else(|| anyhow!("invalid variant"))?
3735 {
3736 proto::update_language_server::Variant::WorkStart(payload) => {
3737 this.update(&mut cx, |this, cx| {
3738 this.on_lsp_work_start(language_server_id, payload.token, cx);
3739 })
3740 }
3741 proto::update_language_server::Variant::WorkProgress(payload) => {
3742 this.update(&mut cx, |this, cx| {
3743 this.on_lsp_work_progress(
3744 language_server_id,
3745 payload.token,
3746 LanguageServerProgress {
3747 message: payload.message,
3748 percentage: payload.percentage.map(|p| p as usize),
3749 last_update_at: Instant::now(),
3750 },
3751 cx,
3752 );
3753 })
3754 }
3755 proto::update_language_server::Variant::WorkEnd(payload) => {
3756 this.update(&mut cx, |this, cx| {
3757 this.on_lsp_work_end(language_server_id, payload.token, cx);
3758 })
3759 }
3760 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3761 this.update(&mut cx, |this, cx| {
3762 this.disk_based_diagnostics_started(cx);
3763 })
3764 }
3765 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3766 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3767 }
3768 }
3769
3770 Ok(())
3771 }
3772
3773 async fn handle_update_buffer(
3774 this: ModelHandle<Self>,
3775 envelope: TypedEnvelope<proto::UpdateBuffer>,
3776 _: Arc<Client>,
3777 mut cx: AsyncAppContext,
3778 ) -> Result<()> {
3779 this.update(&mut cx, |this, cx| {
3780 let payload = envelope.payload.clone();
3781 let buffer_id = payload.buffer_id;
3782 let ops = payload
3783 .operations
3784 .into_iter()
3785 .map(|op| language::proto::deserialize_operation(op))
3786 .collect::<Result<Vec<_>, _>>()?;
3787 match this.opened_buffers.entry(buffer_id) {
3788 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3789 OpenBuffer::Strong(buffer) => {
3790 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3791 }
3792 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3793 OpenBuffer::Weak(_) => {}
3794 },
3795 hash_map::Entry::Vacant(e) => {
3796 e.insert(OpenBuffer::Loading(ops));
3797 }
3798 }
3799 Ok(())
3800 })
3801 }
3802
3803 async fn handle_update_buffer_file(
3804 this: ModelHandle<Self>,
3805 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3806 _: Arc<Client>,
3807 mut cx: AsyncAppContext,
3808 ) -> Result<()> {
3809 this.update(&mut cx, |this, cx| {
3810 let payload = envelope.payload.clone();
3811 let buffer_id = payload.buffer_id;
3812 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3813 let worktree = this
3814 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3815 .ok_or_else(|| anyhow!("no such worktree"))?;
3816 let file = File::from_proto(file, worktree.clone(), cx)?;
3817 let buffer = this
3818 .opened_buffers
3819 .get_mut(&buffer_id)
3820 .and_then(|b| b.upgrade(cx))
3821 .ok_or_else(|| anyhow!("no such buffer"))?;
3822 buffer.update(cx, |buffer, cx| {
3823 buffer.file_updated(Box::new(file), cx).detach();
3824 });
3825 Ok(())
3826 })
3827 }
3828
3829 async fn handle_save_buffer(
3830 this: ModelHandle<Self>,
3831 envelope: TypedEnvelope<proto::SaveBuffer>,
3832 _: Arc<Client>,
3833 mut cx: AsyncAppContext,
3834 ) -> Result<proto::BufferSaved> {
3835 let buffer_id = envelope.payload.buffer_id;
3836 let requested_version = deserialize_version(envelope.payload.version);
3837
3838 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3839 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3840 let buffer = this
3841 .opened_buffers
3842 .get(&buffer_id)
3843 .and_then(|buffer| buffer.upgrade(cx))
3844 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3845 Ok::<_, anyhow::Error>((project_id, buffer))
3846 })?;
3847 buffer
3848 .update(&mut cx, |buffer, _| {
3849 buffer.wait_for_version(requested_version)
3850 })
3851 .await;
3852
3853 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3854 Ok(proto::BufferSaved {
3855 project_id,
3856 buffer_id,
3857 version: serialize_version(&saved_version),
3858 mtime: Some(mtime.into()),
3859 })
3860 }
3861
3862 async fn handle_reload_buffers(
3863 this: ModelHandle<Self>,
3864 envelope: TypedEnvelope<proto::ReloadBuffers>,
3865 _: Arc<Client>,
3866 mut cx: AsyncAppContext,
3867 ) -> Result<proto::ReloadBuffersResponse> {
3868 let sender_id = envelope.original_sender_id()?;
3869 let reload = this.update(&mut cx, |this, cx| {
3870 let mut buffers = HashSet::default();
3871 for buffer_id in &envelope.payload.buffer_ids {
3872 buffers.insert(
3873 this.opened_buffers
3874 .get(buffer_id)
3875 .and_then(|buffer| buffer.upgrade(cx))
3876 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3877 );
3878 }
3879 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3880 })?;
3881
3882 let project_transaction = reload.await?;
3883 let project_transaction = this.update(&mut cx, |this, cx| {
3884 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3885 });
3886 Ok(proto::ReloadBuffersResponse {
3887 transaction: Some(project_transaction),
3888 })
3889 }
3890
3891 async fn handle_format_buffers(
3892 this: ModelHandle<Self>,
3893 envelope: TypedEnvelope<proto::FormatBuffers>,
3894 _: Arc<Client>,
3895 mut cx: AsyncAppContext,
3896 ) -> Result<proto::FormatBuffersResponse> {
3897 let sender_id = envelope.original_sender_id()?;
3898 let format = this.update(&mut cx, |this, cx| {
3899 let mut buffers = HashSet::default();
3900 for buffer_id in &envelope.payload.buffer_ids {
3901 buffers.insert(
3902 this.opened_buffers
3903 .get(buffer_id)
3904 .and_then(|buffer| buffer.upgrade(cx))
3905 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3906 );
3907 }
3908 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3909 })?;
3910
3911 let project_transaction = format.await?;
3912 let project_transaction = this.update(&mut cx, |this, cx| {
3913 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3914 });
3915 Ok(proto::FormatBuffersResponse {
3916 transaction: Some(project_transaction),
3917 })
3918 }
3919
3920 async fn handle_get_completions(
3921 this: ModelHandle<Self>,
3922 envelope: TypedEnvelope<proto::GetCompletions>,
3923 _: Arc<Client>,
3924 mut cx: AsyncAppContext,
3925 ) -> Result<proto::GetCompletionsResponse> {
3926 let position = envelope
3927 .payload
3928 .position
3929 .and_then(language::proto::deserialize_anchor)
3930 .ok_or_else(|| anyhow!("invalid position"))?;
3931 let version = deserialize_version(envelope.payload.version);
3932 let buffer = this.read_with(&cx, |this, cx| {
3933 this.opened_buffers
3934 .get(&envelope.payload.buffer_id)
3935 .and_then(|buffer| buffer.upgrade(cx))
3936 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3937 })?;
3938 buffer
3939 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3940 .await;
3941 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3942 let completions = this
3943 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3944 .await?;
3945
3946 Ok(proto::GetCompletionsResponse {
3947 completions: completions
3948 .iter()
3949 .map(language::proto::serialize_completion)
3950 .collect(),
3951 version: serialize_version(&version),
3952 })
3953 }
3954
3955 async fn handle_apply_additional_edits_for_completion(
3956 this: ModelHandle<Self>,
3957 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3958 _: Arc<Client>,
3959 mut cx: AsyncAppContext,
3960 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3961 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3962 let buffer = this
3963 .opened_buffers
3964 .get(&envelope.payload.buffer_id)
3965 .and_then(|buffer| buffer.upgrade(cx))
3966 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3967 let language = buffer.read(cx).language();
3968 let completion = language::proto::deserialize_completion(
3969 envelope
3970 .payload
3971 .completion
3972 .ok_or_else(|| anyhow!("invalid completion"))?,
3973 language,
3974 )?;
3975 Ok::<_, anyhow::Error>(
3976 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3977 )
3978 })?;
3979
3980 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3981 transaction: apply_additional_edits
3982 .await?
3983 .as_ref()
3984 .map(language::proto::serialize_transaction),
3985 })
3986 }
3987
3988 async fn handle_get_code_actions(
3989 this: ModelHandle<Self>,
3990 envelope: TypedEnvelope<proto::GetCodeActions>,
3991 _: Arc<Client>,
3992 mut cx: AsyncAppContext,
3993 ) -> Result<proto::GetCodeActionsResponse> {
3994 let start = envelope
3995 .payload
3996 .start
3997 .and_then(language::proto::deserialize_anchor)
3998 .ok_or_else(|| anyhow!("invalid start"))?;
3999 let end = envelope
4000 .payload
4001 .end
4002 .and_then(language::proto::deserialize_anchor)
4003 .ok_or_else(|| anyhow!("invalid end"))?;
4004 let buffer = this.update(&mut cx, |this, cx| {
4005 this.opened_buffers
4006 .get(&envelope.payload.buffer_id)
4007 .and_then(|buffer| buffer.upgrade(cx))
4008 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4009 })?;
4010 buffer
4011 .update(&mut cx, |buffer, _| {
4012 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4013 })
4014 .await;
4015
4016 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4017 let code_actions = this.update(&mut cx, |this, cx| {
4018 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4019 })?;
4020
4021 Ok(proto::GetCodeActionsResponse {
4022 actions: code_actions
4023 .await?
4024 .iter()
4025 .map(language::proto::serialize_code_action)
4026 .collect(),
4027 version: serialize_version(&version),
4028 })
4029 }
4030
4031 async fn handle_apply_code_action(
4032 this: ModelHandle<Self>,
4033 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4034 _: Arc<Client>,
4035 mut cx: AsyncAppContext,
4036 ) -> Result<proto::ApplyCodeActionResponse> {
4037 let sender_id = envelope.original_sender_id()?;
4038 let action = language::proto::deserialize_code_action(
4039 envelope
4040 .payload
4041 .action
4042 .ok_or_else(|| anyhow!("invalid action"))?,
4043 )?;
4044 let apply_code_action = this.update(&mut cx, |this, cx| {
4045 let buffer = this
4046 .opened_buffers
4047 .get(&envelope.payload.buffer_id)
4048 .and_then(|buffer| buffer.upgrade(cx))
4049 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4050 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4051 })?;
4052
4053 let project_transaction = apply_code_action.await?;
4054 let project_transaction = this.update(&mut cx, |this, cx| {
4055 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4056 });
4057 Ok(proto::ApplyCodeActionResponse {
4058 transaction: Some(project_transaction),
4059 })
4060 }
4061
4062 async fn handle_lsp_command<T: LspCommand>(
4063 this: ModelHandle<Self>,
4064 envelope: TypedEnvelope<T::ProtoRequest>,
4065 _: Arc<Client>,
4066 mut cx: AsyncAppContext,
4067 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4068 where
4069 <T::LspRequest as lsp::request::Request>::Result: Send,
4070 {
4071 let sender_id = envelope.original_sender_id()?;
4072 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4073 let buffer_handle = this.read_with(&cx, |this, _| {
4074 this.opened_buffers
4075 .get(&buffer_id)
4076 .and_then(|buffer| buffer.upgrade(&cx))
4077 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4078 })?;
4079 let request = T::from_proto(
4080 envelope.payload,
4081 this.clone(),
4082 buffer_handle.clone(),
4083 cx.clone(),
4084 )
4085 .await?;
4086 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4087 let response = this
4088 .update(&mut cx, |this, cx| {
4089 this.request_lsp(buffer_handle, request, cx)
4090 })
4091 .await?;
4092 this.update(&mut cx, |this, cx| {
4093 Ok(T::response_to_proto(
4094 response,
4095 this,
4096 sender_id,
4097 &buffer_version,
4098 cx,
4099 ))
4100 })
4101 }
4102
4103 async fn handle_get_project_symbols(
4104 this: ModelHandle<Self>,
4105 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4106 _: Arc<Client>,
4107 mut cx: AsyncAppContext,
4108 ) -> Result<proto::GetProjectSymbolsResponse> {
4109 let symbols = this
4110 .update(&mut cx, |this, cx| {
4111 this.symbols(&envelope.payload.query, cx)
4112 })
4113 .await?;
4114
4115 Ok(proto::GetProjectSymbolsResponse {
4116 symbols: symbols.iter().map(serialize_symbol).collect(),
4117 })
4118 }
4119
4120 async fn handle_search_project(
4121 this: ModelHandle<Self>,
4122 envelope: TypedEnvelope<proto::SearchProject>,
4123 _: Arc<Client>,
4124 mut cx: AsyncAppContext,
4125 ) -> Result<proto::SearchProjectResponse> {
4126 let peer_id = envelope.original_sender_id()?;
4127 let query = SearchQuery::from_proto(envelope.payload)?;
4128 let result = this
4129 .update(&mut cx, |this, cx| this.search(query, cx))
4130 .await?;
4131
4132 this.update(&mut cx, |this, cx| {
4133 let mut locations = Vec::new();
4134 for (buffer, ranges) in result {
4135 for range in ranges {
4136 let start = serialize_anchor(&range.start);
4137 let end = serialize_anchor(&range.end);
4138 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4139 locations.push(proto::Location {
4140 buffer: Some(buffer),
4141 start: Some(start),
4142 end: Some(end),
4143 });
4144 }
4145 }
4146 Ok(proto::SearchProjectResponse { locations })
4147 })
4148 }
4149
4150 async fn handle_open_buffer_for_symbol(
4151 this: ModelHandle<Self>,
4152 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4153 _: Arc<Client>,
4154 mut cx: AsyncAppContext,
4155 ) -> Result<proto::OpenBufferForSymbolResponse> {
4156 let peer_id = envelope.original_sender_id()?;
4157 let symbol = envelope
4158 .payload
4159 .symbol
4160 .ok_or_else(|| anyhow!("invalid symbol"))?;
4161 let symbol = this.read_with(&cx, |this, _| {
4162 let symbol = this.deserialize_symbol(symbol)?;
4163 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4164 if signature == symbol.signature {
4165 Ok(symbol)
4166 } else {
4167 Err(anyhow!("invalid symbol signature"))
4168 }
4169 })?;
4170 let buffer = this
4171 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4172 .await?;
4173
4174 Ok(proto::OpenBufferForSymbolResponse {
4175 buffer: Some(this.update(&mut cx, |this, cx| {
4176 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4177 })),
4178 })
4179 }
4180
4181 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4182 let mut hasher = Sha256::new();
4183 hasher.update(worktree_id.to_proto().to_be_bytes());
4184 hasher.update(path.to_string_lossy().as_bytes());
4185 hasher.update(self.nonce.to_be_bytes());
4186 hasher.finalize().as_slice().try_into().unwrap()
4187 }
4188
4189 async fn handle_open_buffer_by_id(
4190 this: ModelHandle<Self>,
4191 envelope: TypedEnvelope<proto::OpenBufferById>,
4192 _: Arc<Client>,
4193 mut cx: AsyncAppContext,
4194 ) -> Result<proto::OpenBufferResponse> {
4195 let peer_id = envelope.original_sender_id()?;
4196 let buffer = this
4197 .update(&mut cx, |this, cx| {
4198 this.open_buffer_by_id(envelope.payload.id, cx)
4199 })
4200 .await?;
4201 this.update(&mut cx, |this, cx| {
4202 Ok(proto::OpenBufferResponse {
4203 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4204 })
4205 })
4206 }
4207
4208 async fn handle_open_buffer_by_path(
4209 this: ModelHandle<Self>,
4210 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4211 _: Arc<Client>,
4212 mut cx: AsyncAppContext,
4213 ) -> Result<proto::OpenBufferResponse> {
4214 let peer_id = envelope.original_sender_id()?;
4215 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4216 let open_buffer = this.update(&mut cx, |this, cx| {
4217 this.open_buffer(
4218 ProjectPath {
4219 worktree_id,
4220 path: PathBuf::from(envelope.payload.path).into(),
4221 },
4222 cx,
4223 )
4224 });
4225
4226 let buffer = open_buffer.await?;
4227 this.update(&mut cx, |this, cx| {
4228 Ok(proto::OpenBufferResponse {
4229 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4230 })
4231 })
4232 }
4233
4234 fn serialize_project_transaction_for_peer(
4235 &mut self,
4236 project_transaction: ProjectTransaction,
4237 peer_id: PeerId,
4238 cx: &AppContext,
4239 ) -> proto::ProjectTransaction {
4240 let mut serialized_transaction = proto::ProjectTransaction {
4241 buffers: Default::default(),
4242 transactions: Default::default(),
4243 };
4244 for (buffer, transaction) in project_transaction.0 {
4245 serialized_transaction
4246 .buffers
4247 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4248 serialized_transaction
4249 .transactions
4250 .push(language::proto::serialize_transaction(&transaction));
4251 }
4252 serialized_transaction
4253 }
4254
4255 fn deserialize_project_transaction(
4256 &mut self,
4257 message: proto::ProjectTransaction,
4258 push_to_history: bool,
4259 cx: &mut ModelContext<Self>,
4260 ) -> Task<Result<ProjectTransaction>> {
4261 cx.spawn(|this, mut cx| async move {
4262 let mut project_transaction = ProjectTransaction::default();
4263 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4264 let buffer = this
4265 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4266 .await?;
4267 let transaction = language::proto::deserialize_transaction(transaction)?;
4268 project_transaction.0.insert(buffer, transaction);
4269 }
4270
4271 for (buffer, transaction) in &project_transaction.0 {
4272 buffer
4273 .update(&mut cx, |buffer, _| {
4274 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4275 })
4276 .await;
4277
4278 if push_to_history {
4279 buffer.update(&mut cx, |buffer, _| {
4280 buffer.push_transaction(transaction.clone(), Instant::now());
4281 });
4282 }
4283 }
4284
4285 Ok(project_transaction)
4286 })
4287 }
4288
4289 fn serialize_buffer_for_peer(
4290 &mut self,
4291 buffer: &ModelHandle<Buffer>,
4292 peer_id: PeerId,
4293 cx: &AppContext,
4294 ) -> proto::Buffer {
4295 let buffer_id = buffer.read(cx).remote_id();
4296 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4297 if shared_buffers.insert(buffer_id) {
4298 proto::Buffer {
4299 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4300 }
4301 } else {
4302 proto::Buffer {
4303 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4304 }
4305 }
4306 }
4307
4308 fn deserialize_buffer(
4309 &mut self,
4310 buffer: proto::Buffer,
4311 cx: &mut ModelContext<Self>,
4312 ) -> Task<Result<ModelHandle<Buffer>>> {
4313 let replica_id = self.replica_id();
4314
4315 let opened_buffer_tx = self.opened_buffer.0.clone();
4316 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4317 cx.spawn(|this, mut cx| async move {
4318 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4319 proto::buffer::Variant::Id(id) => {
4320 let buffer = loop {
4321 let buffer = this.read_with(&cx, |this, cx| {
4322 this.opened_buffers
4323 .get(&id)
4324 .and_then(|buffer| buffer.upgrade(cx))
4325 });
4326 if let Some(buffer) = buffer {
4327 break buffer;
4328 }
4329 opened_buffer_rx
4330 .next()
4331 .await
4332 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4333 };
4334 Ok(buffer)
4335 }
4336 proto::buffer::Variant::State(mut buffer) => {
4337 let mut buffer_worktree = None;
4338 let mut buffer_file = None;
4339 if let Some(file) = buffer.file.take() {
4340 this.read_with(&cx, |this, cx| {
4341 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4342 let worktree =
4343 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4344 anyhow!("no worktree found for id {}", file.worktree_id)
4345 })?;
4346 buffer_file =
4347 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4348 as Box<dyn language::File>);
4349 buffer_worktree = Some(worktree);
4350 Ok::<_, anyhow::Error>(())
4351 })?;
4352 }
4353
4354 let buffer = cx.add_model(|cx| {
4355 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4356 });
4357
4358 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4359
4360 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4361 Ok(buffer)
4362 }
4363 }
4364 })
4365 }
4366
4367 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4368 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4369 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4370 let start = serialized_symbol
4371 .start
4372 .ok_or_else(|| anyhow!("invalid start"))?;
4373 let end = serialized_symbol
4374 .end
4375 .ok_or_else(|| anyhow!("invalid end"))?;
4376 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4377 let path = PathBuf::from(serialized_symbol.path);
4378 let language = self.languages.select_language(&path);
4379 Ok(Symbol {
4380 source_worktree_id,
4381 worktree_id,
4382 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4383 label: language
4384 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4385 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4386 name: serialized_symbol.name,
4387 path,
4388 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4389 kind,
4390 signature: serialized_symbol
4391 .signature
4392 .try_into()
4393 .map_err(|_| anyhow!("invalid signature"))?,
4394 })
4395 }
4396
4397 async fn handle_buffer_saved(
4398 this: ModelHandle<Self>,
4399 envelope: TypedEnvelope<proto::BufferSaved>,
4400 _: Arc<Client>,
4401 mut cx: AsyncAppContext,
4402 ) -> Result<()> {
4403 let version = deserialize_version(envelope.payload.version);
4404 let mtime = envelope
4405 .payload
4406 .mtime
4407 .ok_or_else(|| anyhow!("missing mtime"))?
4408 .into();
4409
4410 this.update(&mut cx, |this, cx| {
4411 let buffer = this
4412 .opened_buffers
4413 .get(&envelope.payload.buffer_id)
4414 .and_then(|buffer| buffer.upgrade(cx));
4415 if let Some(buffer) = buffer {
4416 buffer.update(cx, |buffer, cx| {
4417 buffer.did_save(version, mtime, None, cx);
4418 });
4419 }
4420 Ok(())
4421 })
4422 }
4423
4424 async fn handle_buffer_reloaded(
4425 this: ModelHandle<Self>,
4426 envelope: TypedEnvelope<proto::BufferReloaded>,
4427 _: Arc<Client>,
4428 mut cx: AsyncAppContext,
4429 ) -> Result<()> {
4430 let payload = envelope.payload.clone();
4431 let version = deserialize_version(payload.version);
4432 let mtime = payload
4433 .mtime
4434 .ok_or_else(|| anyhow!("missing mtime"))?
4435 .into();
4436 this.update(&mut cx, |this, cx| {
4437 let buffer = this
4438 .opened_buffers
4439 .get(&payload.buffer_id)
4440 .and_then(|buffer| buffer.upgrade(cx));
4441 if let Some(buffer) = buffer {
4442 buffer.update(cx, |buffer, cx| {
4443 buffer.did_reload(version, mtime, cx);
4444 });
4445 }
4446 Ok(())
4447 })
4448 }
4449
4450 pub fn match_paths<'a>(
4451 &self,
4452 query: &'a str,
4453 include_ignored: bool,
4454 smart_case: bool,
4455 max_results: usize,
4456 cancel_flag: &'a AtomicBool,
4457 cx: &AppContext,
4458 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4459 let worktrees = self
4460 .worktrees(cx)
4461 .filter(|worktree| worktree.read(cx).is_visible())
4462 .collect::<Vec<_>>();
4463 let include_root_name = worktrees.len() > 1;
4464 let candidate_sets = worktrees
4465 .into_iter()
4466 .map(|worktree| CandidateSet {
4467 snapshot: worktree.read(cx).snapshot(),
4468 include_ignored,
4469 include_root_name,
4470 })
4471 .collect::<Vec<_>>();
4472
4473 let background = cx.background().clone();
4474 async move {
4475 fuzzy::match_paths(
4476 candidate_sets.as_slice(),
4477 query,
4478 smart_case,
4479 max_results,
4480 cancel_flag,
4481 background,
4482 )
4483 .await
4484 }
4485 }
4486
4487 fn edits_from_lsp(
4488 &mut self,
4489 buffer: &ModelHandle<Buffer>,
4490 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4491 version: Option<i32>,
4492 cx: &mut ModelContext<Self>,
4493 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4494 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4495 cx.background().spawn(async move {
4496 let snapshot = snapshot?;
4497 let mut lsp_edits = lsp_edits
4498 .into_iter()
4499 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4500 .peekable();
4501
4502 let mut edits = Vec::new();
4503 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4504 // Combine any LSP edits that are adjacent.
4505 //
4506 // Also, combine LSP edits that are separated from each other by only
4507 // a newline. This is important because for some code actions,
4508 // Rust-analyzer rewrites the entire buffer via a series of edits that
4509 // are separated by unchanged newline characters.
4510 //
4511 // In order for the diffing logic below to work properly, any edits that
4512 // cancel each other out must be combined into one.
4513 while let Some((next_range, next_text)) = lsp_edits.peek() {
4514 if next_range.start > range.end {
4515 if next_range.start.row > range.end.row + 1
4516 || next_range.start.column > 0
4517 || snapshot.clip_point_utf16(
4518 PointUtf16::new(range.end.row, u32::MAX),
4519 Bias::Left,
4520 ) > range.end
4521 {
4522 break;
4523 }
4524 new_text.push('\n');
4525 }
4526 range.end = next_range.end;
4527 new_text.push_str(&next_text);
4528 lsp_edits.next();
4529 }
4530
4531 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4532 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4533 {
4534 return Err(anyhow!("invalid edits received from language server"));
4535 }
4536
4537 // For multiline edits, perform a diff of the old and new text so that
4538 // we can identify the changes more precisely, preserving the locations
4539 // of any anchors positioned in the unchanged regions.
4540 if range.end.row > range.start.row {
4541 let mut offset = range.start.to_offset(&snapshot);
4542 let old_text = snapshot.text_for_range(range).collect::<String>();
4543
4544 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4545 let mut moved_since_edit = true;
4546 for change in diff.iter_all_changes() {
4547 let tag = change.tag();
4548 let value = change.value();
4549 match tag {
4550 ChangeTag::Equal => {
4551 offset += value.len();
4552 moved_since_edit = true;
4553 }
4554 ChangeTag::Delete => {
4555 let start = snapshot.anchor_after(offset);
4556 let end = snapshot.anchor_before(offset + value.len());
4557 if moved_since_edit {
4558 edits.push((start..end, String::new()));
4559 } else {
4560 edits.last_mut().unwrap().0.end = end;
4561 }
4562 offset += value.len();
4563 moved_since_edit = false;
4564 }
4565 ChangeTag::Insert => {
4566 if moved_since_edit {
4567 let anchor = snapshot.anchor_after(offset);
4568 edits.push((anchor.clone()..anchor, value.to_string()));
4569 } else {
4570 edits.last_mut().unwrap().1.push_str(value);
4571 }
4572 moved_since_edit = false;
4573 }
4574 }
4575 }
4576 } else if range.end == range.start {
4577 let anchor = snapshot.anchor_after(range.start);
4578 edits.push((anchor.clone()..anchor, new_text));
4579 } else {
4580 let edit_start = snapshot.anchor_after(range.start);
4581 let edit_end = snapshot.anchor_before(range.end);
4582 edits.push((edit_start..edit_end, new_text));
4583 }
4584 }
4585
4586 Ok(edits)
4587 })
4588 }
4589
4590 fn buffer_snapshot_for_lsp_version(
4591 &mut self,
4592 buffer: &ModelHandle<Buffer>,
4593 version: Option<i32>,
4594 cx: &AppContext,
4595 ) -> Result<TextBufferSnapshot> {
4596 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4597
4598 if let Some(version) = version {
4599 let buffer_id = buffer.read(cx).remote_id();
4600 let snapshots = self
4601 .buffer_snapshots
4602 .get_mut(&buffer_id)
4603 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4604 let mut found_snapshot = None;
4605 snapshots.retain(|(snapshot_version, snapshot)| {
4606 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4607 false
4608 } else {
4609 if *snapshot_version == version {
4610 found_snapshot = Some(snapshot.clone());
4611 }
4612 true
4613 }
4614 });
4615
4616 found_snapshot.ok_or_else(|| {
4617 anyhow!(
4618 "snapshot not found for buffer {} at version {}",
4619 buffer_id,
4620 version
4621 )
4622 })
4623 } else {
4624 Ok((buffer.read(cx)).text_snapshot())
4625 }
4626 }
4627
4628 fn language_server_for_buffer(
4629 &self,
4630 buffer: &Buffer,
4631 cx: &AppContext,
4632 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4633 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4634 let worktree_id = file.worktree_id(cx);
4635 self.language_servers
4636 .get(&(worktree_id, language.lsp_adapter()?.name()))
4637 } else {
4638 None
4639 }
4640 }
4641}
4642
4643impl WorktreeHandle {
4644 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4645 match self {
4646 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4647 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4648 }
4649 }
4650}
4651
4652impl OpenBuffer {
4653 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4654 match self {
4655 OpenBuffer::Strong(handle) => Some(handle.clone()),
4656 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4657 OpenBuffer::Loading(_) => None,
4658 }
4659 }
4660}
4661
4662struct CandidateSet {
4663 snapshot: Snapshot,
4664 include_ignored: bool,
4665 include_root_name: bool,
4666}
4667
4668impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4669 type Candidates = CandidateSetIter<'a>;
4670
4671 fn id(&self) -> usize {
4672 self.snapshot.id().to_usize()
4673 }
4674
4675 fn len(&self) -> usize {
4676 if self.include_ignored {
4677 self.snapshot.file_count()
4678 } else {
4679 self.snapshot.visible_file_count()
4680 }
4681 }
4682
4683 fn prefix(&self) -> Arc<str> {
4684 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4685 self.snapshot.root_name().into()
4686 } else if self.include_root_name {
4687 format!("{}/", self.snapshot.root_name()).into()
4688 } else {
4689 "".into()
4690 }
4691 }
4692
4693 fn candidates(&'a self, start: usize) -> Self::Candidates {
4694 CandidateSetIter {
4695 traversal: self.snapshot.files(self.include_ignored, start),
4696 }
4697 }
4698}
4699
4700struct CandidateSetIter<'a> {
4701 traversal: Traversal<'a>,
4702}
4703
4704impl<'a> Iterator for CandidateSetIter<'a> {
4705 type Item = PathMatchCandidate<'a>;
4706
4707 fn next(&mut self) -> Option<Self::Item> {
4708 self.traversal.next().map(|entry| {
4709 if let EntryKind::File(char_bag) = entry.kind {
4710 PathMatchCandidate {
4711 path: &entry.path,
4712 char_bag,
4713 }
4714 } else {
4715 unreachable!()
4716 }
4717 })
4718 }
4719}
4720
4721impl Entity for Project {
4722 type Event = Event;
4723
4724 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4725 match &self.client_state {
4726 ProjectClientState::Local { remote_id_rx, .. } => {
4727 if let Some(project_id) = *remote_id_rx.borrow() {
4728 self.client
4729 .send(proto::UnregisterProject { project_id })
4730 .log_err();
4731 }
4732 }
4733 ProjectClientState::Remote { remote_id, .. } => {
4734 self.client
4735 .send(proto::LeaveProject {
4736 project_id: *remote_id,
4737 })
4738 .log_err();
4739 }
4740 }
4741 }
4742
4743 fn app_will_quit(
4744 &mut self,
4745 _: &mut MutableAppContext,
4746 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4747 let shutdown_futures = self
4748 .language_servers
4749 .drain()
4750 .filter_map(|(_, (_, server))| server.shutdown())
4751 .collect::<Vec<_>>();
4752 Some(
4753 async move {
4754 futures::future::join_all(shutdown_futures).await;
4755 }
4756 .boxed(),
4757 )
4758 }
4759}
4760
4761impl Collaborator {
4762 fn from_proto(
4763 message: proto::Collaborator,
4764 user_store: &ModelHandle<UserStore>,
4765 cx: &mut AsyncAppContext,
4766 ) -> impl Future<Output = Result<Self>> {
4767 let user = user_store.update(cx, |user_store, cx| {
4768 user_store.fetch_user(message.user_id, cx)
4769 });
4770
4771 async move {
4772 Ok(Self {
4773 peer_id: PeerId(message.peer_id),
4774 user: user.await?,
4775 replica_id: message.replica_id as ReplicaId,
4776 })
4777 }
4778 }
4779}
4780
4781impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4782 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4783 Self {
4784 worktree_id,
4785 path: path.as_ref().into(),
4786 }
4787 }
4788}
4789
4790impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4791 fn from(options: lsp::CreateFileOptions) -> Self {
4792 Self {
4793 overwrite: options.overwrite.unwrap_or(false),
4794 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4795 }
4796 }
4797}
4798
4799impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4800 fn from(options: lsp::RenameFileOptions) -> Self {
4801 Self {
4802 overwrite: options.overwrite.unwrap_or(false),
4803 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4804 }
4805 }
4806}
4807
4808impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4809 fn from(options: lsp::DeleteFileOptions) -> Self {
4810 Self {
4811 recursive: options.recursive.unwrap_or(false),
4812 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4813 }
4814 }
4815}
4816
4817fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4818 proto::Symbol {
4819 source_worktree_id: symbol.source_worktree_id.to_proto(),
4820 worktree_id: symbol.worktree_id.to_proto(),
4821 language_server_name: symbol.language_server_name.0.to_string(),
4822 name: symbol.name.clone(),
4823 kind: unsafe { mem::transmute(symbol.kind) },
4824 path: symbol.path.to_string_lossy().to_string(),
4825 start: Some(proto::Point {
4826 row: symbol.range.start.row,
4827 column: symbol.range.start.column,
4828 }),
4829 end: Some(proto::Point {
4830 row: symbol.range.end.row,
4831 column: symbol.range.end.column,
4832 }),
4833 signature: symbol.signature.to_vec(),
4834 }
4835}
4836
4837fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4838 let mut path_components = path.components();
4839 let mut base_components = base.components();
4840 let mut components: Vec<Component> = Vec::new();
4841 loop {
4842 match (path_components.next(), base_components.next()) {
4843 (None, None) => break,
4844 (Some(a), None) => {
4845 components.push(a);
4846 components.extend(path_components.by_ref());
4847 break;
4848 }
4849 (None, _) => components.push(Component::ParentDir),
4850 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4851 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4852 (Some(a), Some(_)) => {
4853 components.push(Component::ParentDir);
4854 for _ in base_components {
4855 components.push(Component::ParentDir);
4856 }
4857 components.push(a);
4858 components.extend(path_components.by_ref());
4859 break;
4860 }
4861 }
4862 }
4863 components.iter().map(|c| c.as_os_str()).collect()
4864}
4865
4866impl Item for Buffer {
4867 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4868 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4869 }
4870}
4871
4872#[cfg(test)]
4873mod tests {
4874 use super::{Event, *};
4875 use fs::RealFs;
4876 use futures::{future, StreamExt};
4877 use gpui::test::subscribe;
4878 use language::{
4879 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4880 ToPoint,
4881 };
4882 use lsp::Url;
4883 use serde_json::json;
4884 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
4885 use unindent::Unindent as _;
4886 use util::{assert_set_eq, test::temp_tree};
4887 use worktree::WorktreeHandle as _;
4888
4889 #[gpui::test]
4890 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4891 let dir = temp_tree(json!({
4892 "root": {
4893 "apple": "",
4894 "banana": {
4895 "carrot": {
4896 "date": "",
4897 "endive": "",
4898 }
4899 },
4900 "fennel": {
4901 "grape": "",
4902 }
4903 }
4904 }));
4905
4906 let root_link_path = dir.path().join("root_link");
4907 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4908 unix::fs::symlink(
4909 &dir.path().join("root/fennel"),
4910 &dir.path().join("root/finnochio"),
4911 )
4912 .unwrap();
4913
4914 let project = Project::test(Arc::new(RealFs), cx);
4915
4916 let (tree, _) = project
4917 .update(cx, |project, cx| {
4918 project.find_or_create_local_worktree(&root_link_path, true, cx)
4919 })
4920 .await
4921 .unwrap();
4922
4923 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4924 .await;
4925 cx.read(|cx| {
4926 let tree = tree.read(cx);
4927 assert_eq!(tree.file_count(), 5);
4928 assert_eq!(
4929 tree.inode_for_path("fennel/grape"),
4930 tree.inode_for_path("finnochio/grape")
4931 );
4932 });
4933
4934 let cancel_flag = Default::default();
4935 let results = project
4936 .read_with(cx, |project, cx| {
4937 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4938 })
4939 .await;
4940 assert_eq!(
4941 results
4942 .into_iter()
4943 .map(|result| result.path)
4944 .collect::<Vec<Arc<Path>>>(),
4945 vec![
4946 PathBuf::from("banana/carrot/date").into(),
4947 PathBuf::from("banana/carrot/endive").into(),
4948 ]
4949 );
4950 }
4951
4952 #[gpui::test]
4953 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4954 cx.foreground().forbid_parking();
4955
4956 let mut rust_language = Language::new(
4957 LanguageConfig {
4958 name: "Rust".into(),
4959 path_suffixes: vec!["rs".to_string()],
4960 ..Default::default()
4961 },
4962 Some(tree_sitter_rust::language()),
4963 );
4964 let mut json_language = Language::new(
4965 LanguageConfig {
4966 name: "JSON".into(),
4967 path_suffixes: vec!["json".to_string()],
4968 ..Default::default()
4969 },
4970 None,
4971 );
4972 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4973 name: "the-rust-language-server",
4974 capabilities: lsp::ServerCapabilities {
4975 completion_provider: Some(lsp::CompletionOptions {
4976 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4977 ..Default::default()
4978 }),
4979 ..Default::default()
4980 },
4981 ..Default::default()
4982 });
4983 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4984 name: "the-json-language-server",
4985 capabilities: lsp::ServerCapabilities {
4986 completion_provider: Some(lsp::CompletionOptions {
4987 trigger_characters: Some(vec![":".to_string()]),
4988 ..Default::default()
4989 }),
4990 ..Default::default()
4991 },
4992 ..Default::default()
4993 });
4994
4995 let fs = FakeFs::new(cx.background());
4996 fs.insert_tree(
4997 "/the-root",
4998 json!({
4999 "test.rs": "const A: i32 = 1;",
5000 "test2.rs": "",
5001 "Cargo.toml": "a = 1",
5002 "package.json": "{\"a\": 1}",
5003 }),
5004 )
5005 .await;
5006
5007 let project = Project::test(fs.clone(), cx);
5008 project.update(cx, |project, _| {
5009 project.languages.add(Arc::new(rust_language));
5010 project.languages.add(Arc::new(json_language));
5011 });
5012
5013 let worktree_id = project
5014 .update(cx, |project, cx| {
5015 project.find_or_create_local_worktree("/the-root", true, cx)
5016 })
5017 .await
5018 .unwrap()
5019 .0
5020 .read_with(cx, |tree, _| tree.id());
5021
5022 // Open a buffer without an associated language server.
5023 let toml_buffer = project
5024 .update(cx, |project, cx| {
5025 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5026 })
5027 .await
5028 .unwrap();
5029
5030 // Open a buffer with an associated language server.
5031 let rust_buffer = project
5032 .update(cx, |project, cx| {
5033 project.open_buffer((worktree_id, "test.rs"), cx)
5034 })
5035 .await
5036 .unwrap();
5037
5038 // A server is started up, and it is notified about Rust files.
5039 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5040 assert_eq!(
5041 fake_rust_server
5042 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5043 .await
5044 .text_document,
5045 lsp::TextDocumentItem {
5046 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5047 version: 0,
5048 text: "const A: i32 = 1;".to_string(),
5049 language_id: Default::default()
5050 }
5051 );
5052
5053 // The buffer is configured based on the language server's capabilities.
5054 rust_buffer.read_with(cx, |buffer, _| {
5055 assert_eq!(
5056 buffer.completion_triggers(),
5057 &[".".to_string(), "::".to_string()]
5058 );
5059 });
5060 toml_buffer.read_with(cx, |buffer, _| {
5061 assert!(buffer.completion_triggers().is_empty());
5062 });
5063
5064 // Edit a buffer. The changes are reported to the language server.
5065 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5066 assert_eq!(
5067 fake_rust_server
5068 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5069 .await
5070 .text_document,
5071 lsp::VersionedTextDocumentIdentifier::new(
5072 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5073 1
5074 )
5075 );
5076
5077 // Open a third buffer with a different associated language server.
5078 let json_buffer = project
5079 .update(cx, |project, cx| {
5080 project.open_buffer((worktree_id, "package.json"), cx)
5081 })
5082 .await
5083 .unwrap();
5084
5085 // A json language server is started up and is only notified about the json buffer.
5086 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5087 assert_eq!(
5088 fake_json_server
5089 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5090 .await
5091 .text_document,
5092 lsp::TextDocumentItem {
5093 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5094 version: 0,
5095 text: "{\"a\": 1}".to_string(),
5096 language_id: Default::default()
5097 }
5098 );
5099
5100 // This buffer is configured based on the second language server's
5101 // capabilities.
5102 json_buffer.read_with(cx, |buffer, _| {
5103 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5104 });
5105
5106 // When opening another buffer whose language server is already running,
5107 // it is also configured based on the existing language server's capabilities.
5108 let rust_buffer2 = project
5109 .update(cx, |project, cx| {
5110 project.open_buffer((worktree_id, "test2.rs"), cx)
5111 })
5112 .await
5113 .unwrap();
5114 rust_buffer2.read_with(cx, |buffer, _| {
5115 assert_eq!(
5116 buffer.completion_triggers(),
5117 &[".".to_string(), "::".to_string()]
5118 );
5119 });
5120
5121 // Changes are reported only to servers matching the buffer's language.
5122 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5123 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5124 assert_eq!(
5125 fake_rust_server
5126 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5127 .await
5128 .text_document,
5129 lsp::VersionedTextDocumentIdentifier::new(
5130 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5131 1
5132 )
5133 );
5134
5135 // Save notifications are reported to all servers.
5136 toml_buffer
5137 .update(cx, |buffer, cx| buffer.save(cx))
5138 .await
5139 .unwrap();
5140 assert_eq!(
5141 fake_rust_server
5142 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5143 .await
5144 .text_document,
5145 lsp::TextDocumentIdentifier::new(
5146 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5147 )
5148 );
5149 assert_eq!(
5150 fake_json_server
5151 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5152 .await
5153 .text_document,
5154 lsp::TextDocumentIdentifier::new(
5155 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5156 )
5157 );
5158
5159 // Renames are reported only to servers matching the buffer's language.
5160 fs.rename(
5161 Path::new("/the-root/test2.rs"),
5162 Path::new("/the-root/test3.rs"),
5163 Default::default(),
5164 )
5165 .await
5166 .unwrap();
5167 assert_eq!(
5168 fake_rust_server
5169 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5170 .await
5171 .text_document,
5172 lsp::TextDocumentIdentifier::new(
5173 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5174 ),
5175 );
5176 assert_eq!(
5177 fake_rust_server
5178 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5179 .await
5180 .text_document,
5181 lsp::TextDocumentItem {
5182 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5183 version: 0,
5184 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5185 language_id: Default::default()
5186 },
5187 );
5188
5189 rust_buffer2.update(cx, |buffer, cx| {
5190 buffer.update_diagnostics(
5191 DiagnosticSet::from_sorted_entries(
5192 vec![DiagnosticEntry {
5193 diagnostic: Default::default(),
5194 range: Anchor::MIN..Anchor::MAX,
5195 }],
5196 &buffer.snapshot(),
5197 ),
5198 cx,
5199 );
5200 assert_eq!(
5201 buffer
5202 .snapshot()
5203 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5204 .count(),
5205 1
5206 );
5207 });
5208
5209 // When the rename changes the extension of the file, the buffer gets closed on the old
5210 // language server and gets opened on the new one.
5211 fs.rename(
5212 Path::new("/the-root/test3.rs"),
5213 Path::new("/the-root/test3.json"),
5214 Default::default(),
5215 )
5216 .await
5217 .unwrap();
5218 assert_eq!(
5219 fake_rust_server
5220 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5221 .await
5222 .text_document,
5223 lsp::TextDocumentIdentifier::new(
5224 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5225 ),
5226 );
5227 assert_eq!(
5228 fake_json_server
5229 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5230 .await
5231 .text_document,
5232 lsp::TextDocumentItem {
5233 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5234 version: 0,
5235 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5236 language_id: Default::default()
5237 },
5238 );
5239 // We clear the diagnostics, since the language has changed.
5240 rust_buffer2.read_with(cx, |buffer, _| {
5241 assert_eq!(
5242 buffer
5243 .snapshot()
5244 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5245 .count(),
5246 0
5247 );
5248 });
5249
5250 // The renamed file's version resets after changing language server.
5251 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5252 assert_eq!(
5253 fake_json_server
5254 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5255 .await
5256 .text_document,
5257 lsp::VersionedTextDocumentIdentifier::new(
5258 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5259 1
5260 )
5261 );
5262
5263 // Restart language servers
5264 project.update(cx, |project, cx| {
5265 project.restart_language_servers_for_buffers(
5266 vec![rust_buffer.clone(), json_buffer.clone()],
5267 cx,
5268 );
5269 });
5270
5271 let mut rust_shutdown_requests = fake_rust_server
5272 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5273 let mut json_shutdown_requests = fake_json_server
5274 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5275 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5276
5277 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5278 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5279
5280 // Ensure rust document is reopened in new rust language server
5281 assert_eq!(
5282 fake_rust_server
5283 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5284 .await
5285 .text_document,
5286 lsp::TextDocumentItem {
5287 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5288 version: 1,
5289 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5290 language_id: Default::default()
5291 }
5292 );
5293
5294 // Ensure json documents are reopened in new json language server
5295 assert_set_eq!(
5296 [
5297 fake_json_server
5298 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5299 .await
5300 .text_document,
5301 fake_json_server
5302 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5303 .await
5304 .text_document,
5305 ],
5306 [
5307 lsp::TextDocumentItem {
5308 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5309 version: 0,
5310 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5311 language_id: Default::default()
5312 },
5313 lsp::TextDocumentItem {
5314 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5315 version: 1,
5316 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5317 language_id: Default::default()
5318 }
5319 ]
5320 );
5321
5322 // Close notifications are reported only to servers matching the buffer's language.
5323 cx.update(|_| drop(json_buffer));
5324 let close_message = lsp::DidCloseTextDocumentParams {
5325 text_document: lsp::TextDocumentIdentifier::new(
5326 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5327 ),
5328 };
5329 assert_eq!(
5330 fake_json_server
5331 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5332 .await,
5333 close_message,
5334 );
5335 }
5336
5337 #[gpui::test]
5338 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5339 cx.foreground().forbid_parking();
5340 language::init_test(cx);
5341
5342 let fs = FakeFs::new(cx.background());
5343 fs.insert_tree(
5344 "/dir",
5345 json!({
5346 "a.rs": "let a = 1;",
5347 "b.rs": "let b = 2;"
5348 }),
5349 )
5350 .await;
5351
5352 let project = Project::test(fs, cx);
5353 let worktree_a_id = project
5354 .update(cx, |project, cx| {
5355 project.find_or_create_local_worktree("/dir/a.rs", true, cx)
5356 })
5357 .await
5358 .unwrap()
5359 .0
5360 .read_with(cx, |tree, _| tree.id());
5361 let worktree_b_id = project
5362 .update(cx, |project, cx| {
5363 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5364 })
5365 .await
5366 .unwrap()
5367 .0
5368 .read_with(cx, |tree, _| tree.id());
5369
5370 let buffer_a = project
5371 .update(cx, |project, cx| {
5372 project.open_buffer((worktree_a_id, ""), cx)
5373 })
5374 .await
5375 .unwrap();
5376 let buffer_b = project
5377 .update(cx, |project, cx| {
5378 project.open_buffer((worktree_b_id, ""), cx)
5379 })
5380 .await
5381 .unwrap();
5382
5383 project.update(cx, |project, cx| {
5384 project
5385 .update_diagnostics(
5386 lsp::PublishDiagnosticsParams {
5387 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5388 version: None,
5389 diagnostics: vec![lsp::Diagnostic {
5390 range: lsp::Range::new(
5391 lsp::Position::new(0, 4),
5392 lsp::Position::new(0, 5),
5393 ),
5394 severity: Some(lsp::DiagnosticSeverity::ERROR),
5395 message: "error 1".to_string(),
5396 ..Default::default()
5397 }],
5398 },
5399 &[],
5400 cx,
5401 )
5402 .unwrap();
5403 project
5404 .update_diagnostics(
5405 lsp::PublishDiagnosticsParams {
5406 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5407 version: None,
5408 diagnostics: vec![lsp::Diagnostic {
5409 range: lsp::Range::new(
5410 lsp::Position::new(0, 4),
5411 lsp::Position::new(0, 5),
5412 ),
5413 severity: Some(lsp::DiagnosticSeverity::WARNING),
5414 message: "error 2".to_string(),
5415 ..Default::default()
5416 }],
5417 },
5418 &[],
5419 cx,
5420 )
5421 .unwrap();
5422 });
5423
5424 buffer_a.read_with(cx, |buffer, _| {
5425 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5426 assert_eq!(
5427 chunks
5428 .iter()
5429 .map(|(s, d)| (s.as_str(), *d))
5430 .collect::<Vec<_>>(),
5431 &[
5432 ("let ", None),
5433 ("a", Some(DiagnosticSeverity::ERROR)),
5434 (" = 1;", None),
5435 ]
5436 );
5437 });
5438 buffer_b.read_with(cx, |buffer, _| {
5439 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5440 assert_eq!(
5441 chunks
5442 .iter()
5443 .map(|(s, d)| (s.as_str(), *d))
5444 .collect::<Vec<_>>(),
5445 &[
5446 ("let ", None),
5447 ("b", Some(DiagnosticSeverity::WARNING)),
5448 (" = 2;", None),
5449 ]
5450 );
5451 });
5452 }
5453
5454 #[gpui::test]
5455 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5456 cx.foreground().forbid_parking();
5457
5458 let progress_token = "the-progress-token";
5459 let mut language = Language::new(
5460 LanguageConfig {
5461 name: "Rust".into(),
5462 path_suffixes: vec!["rs".to_string()],
5463 ..Default::default()
5464 },
5465 Some(tree_sitter_rust::language()),
5466 );
5467 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5468 disk_based_diagnostics_progress_token: Some(progress_token),
5469 disk_based_diagnostics_sources: &["disk"],
5470 ..Default::default()
5471 });
5472
5473 let fs = FakeFs::new(cx.background());
5474 fs.insert_tree(
5475 "/dir",
5476 json!({
5477 "a.rs": "fn a() { A }",
5478 "b.rs": "const y: i32 = 1",
5479 }),
5480 )
5481 .await;
5482
5483 let project = Project::test(fs, cx);
5484 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5485
5486 let (tree, _) = project
5487 .update(cx, |project, cx| {
5488 project.find_or_create_local_worktree("/dir", true, cx)
5489 })
5490 .await
5491 .unwrap();
5492 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5493
5494 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5495 .await;
5496
5497 // Cause worktree to start the fake language server
5498 let _buffer = project
5499 .update(cx, |project, cx| {
5500 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5501 })
5502 .await
5503 .unwrap();
5504
5505 let mut events = subscribe(&project, cx);
5506
5507 let mut fake_server = fake_servers.next().await.unwrap();
5508 fake_server.start_progress(progress_token).await;
5509 assert_eq!(
5510 events.next().await.unwrap(),
5511 Event::DiskBasedDiagnosticsStarted
5512 );
5513
5514 fake_server.start_progress(progress_token).await;
5515 fake_server.end_progress(progress_token).await;
5516 fake_server.start_progress(progress_token).await;
5517
5518 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5519 lsp::PublishDiagnosticsParams {
5520 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5521 version: None,
5522 diagnostics: vec![lsp::Diagnostic {
5523 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5524 severity: Some(lsp::DiagnosticSeverity::ERROR),
5525 message: "undefined variable 'A'".to_string(),
5526 ..Default::default()
5527 }],
5528 },
5529 );
5530 assert_eq!(
5531 events.next().await.unwrap(),
5532 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5533 );
5534
5535 fake_server.end_progress(progress_token).await;
5536 fake_server.end_progress(progress_token).await;
5537 assert_eq!(
5538 events.next().await.unwrap(),
5539 Event::DiskBasedDiagnosticsUpdated
5540 );
5541 assert_eq!(
5542 events.next().await.unwrap(),
5543 Event::DiskBasedDiagnosticsFinished
5544 );
5545
5546 let buffer = project
5547 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5548 .await
5549 .unwrap();
5550
5551 buffer.read_with(cx, |buffer, _| {
5552 let snapshot = buffer.snapshot();
5553 let diagnostics = snapshot
5554 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5555 .collect::<Vec<_>>();
5556 assert_eq!(
5557 diagnostics,
5558 &[DiagnosticEntry {
5559 range: Point::new(0, 9)..Point::new(0, 10),
5560 diagnostic: Diagnostic {
5561 severity: lsp::DiagnosticSeverity::ERROR,
5562 message: "undefined variable 'A'".to_string(),
5563 group_id: 0,
5564 is_primary: true,
5565 ..Default::default()
5566 }
5567 }]
5568 )
5569 });
5570
5571 // Ensure publishing empty diagnostics twice only results in one update event.
5572 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5573 lsp::PublishDiagnosticsParams {
5574 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5575 version: None,
5576 diagnostics: Default::default(),
5577 },
5578 );
5579 assert_eq!(
5580 events.next().await.unwrap(),
5581 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5582 );
5583
5584 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5585 lsp::PublishDiagnosticsParams {
5586 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5587 version: None,
5588 diagnostics: Default::default(),
5589 },
5590 );
5591 cx.foreground().run_until_parked();
5592 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5593 }
5594
5595 #[gpui::test]
5596 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5597 cx.foreground().forbid_parking();
5598
5599 let progress_token = "the-progress-token";
5600 let mut language = Language::new(
5601 LanguageConfig {
5602 path_suffixes: vec!["rs".to_string()],
5603 ..Default::default()
5604 },
5605 None,
5606 );
5607 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5608 disk_based_diagnostics_sources: &["disk"],
5609 disk_based_diagnostics_progress_token: Some(progress_token),
5610 ..Default::default()
5611 });
5612
5613 let fs = FakeFs::new(cx.background());
5614 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5615
5616 let project = Project::test(fs, cx);
5617 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5618
5619 let worktree_id = project
5620 .update(cx, |project, cx| {
5621 project.find_or_create_local_worktree("/dir", true, cx)
5622 })
5623 .await
5624 .unwrap()
5625 .0
5626 .read_with(cx, |tree, _| tree.id());
5627
5628 let buffer = project
5629 .update(cx, |project, cx| {
5630 project.open_buffer((worktree_id, "a.rs"), cx)
5631 })
5632 .await
5633 .unwrap();
5634
5635 // Simulate diagnostics starting to update.
5636 let mut fake_server = fake_servers.next().await.unwrap();
5637 fake_server.start_progress(progress_token).await;
5638
5639 // Restart the server before the diagnostics finish updating.
5640 project.update(cx, |project, cx| {
5641 project.restart_language_servers_for_buffers([buffer], cx);
5642 });
5643 let mut events = subscribe(&project, cx);
5644
5645 // Simulate the newly started server sending more diagnostics.
5646 let mut fake_server = fake_servers.next().await.unwrap();
5647 fake_server.start_progress(progress_token).await;
5648 assert_eq!(
5649 events.next().await.unwrap(),
5650 Event::DiskBasedDiagnosticsStarted
5651 );
5652
5653 // All diagnostics are considered done, despite the old server's diagnostic
5654 // task never completing.
5655 fake_server.end_progress(progress_token).await;
5656 assert_eq!(
5657 events.next().await.unwrap(),
5658 Event::DiskBasedDiagnosticsUpdated
5659 );
5660 assert_eq!(
5661 events.next().await.unwrap(),
5662 Event::DiskBasedDiagnosticsFinished
5663 );
5664 project.read_with(cx, |project, _| {
5665 assert!(!project.is_running_disk_based_diagnostics());
5666 });
5667 }
5668
5669 #[gpui::test]
5670 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5671 cx.foreground().forbid_parking();
5672
5673 let mut language = Language::new(
5674 LanguageConfig {
5675 name: "Rust".into(),
5676 path_suffixes: vec!["rs".to_string()],
5677 ..Default::default()
5678 },
5679 Some(tree_sitter_rust::language()),
5680 );
5681 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5682 disk_based_diagnostics_sources: &["disk"],
5683 ..Default::default()
5684 });
5685
5686 let text = "
5687 fn a() { A }
5688 fn b() { BB }
5689 fn c() { CCC }
5690 "
5691 .unindent();
5692
5693 let fs = FakeFs::new(cx.background());
5694 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5695
5696 let project = Project::test(fs, cx);
5697 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5698
5699 let worktree_id = project
5700 .update(cx, |project, cx| {
5701 project.find_or_create_local_worktree("/dir", true, cx)
5702 })
5703 .await
5704 .unwrap()
5705 .0
5706 .read_with(cx, |tree, _| tree.id());
5707
5708 let buffer = project
5709 .update(cx, |project, cx| {
5710 project.open_buffer((worktree_id, "a.rs"), cx)
5711 })
5712 .await
5713 .unwrap();
5714
5715 let mut fake_server = fake_servers.next().await.unwrap();
5716 let open_notification = fake_server
5717 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5718 .await;
5719
5720 // Edit the buffer, moving the content down
5721 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5722 let change_notification_1 = fake_server
5723 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5724 .await;
5725 assert!(
5726 change_notification_1.text_document.version > open_notification.text_document.version
5727 );
5728
5729 // Report some diagnostics for the initial version of the buffer
5730 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5731 lsp::PublishDiagnosticsParams {
5732 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5733 version: Some(open_notification.text_document.version),
5734 diagnostics: vec![
5735 lsp::Diagnostic {
5736 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5737 severity: Some(DiagnosticSeverity::ERROR),
5738 message: "undefined variable 'A'".to_string(),
5739 source: Some("disk".to_string()),
5740 ..Default::default()
5741 },
5742 lsp::Diagnostic {
5743 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5744 severity: Some(DiagnosticSeverity::ERROR),
5745 message: "undefined variable 'BB'".to_string(),
5746 source: Some("disk".to_string()),
5747 ..Default::default()
5748 },
5749 lsp::Diagnostic {
5750 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5751 severity: Some(DiagnosticSeverity::ERROR),
5752 source: Some("disk".to_string()),
5753 message: "undefined variable 'CCC'".to_string(),
5754 ..Default::default()
5755 },
5756 ],
5757 },
5758 );
5759
5760 // The diagnostics have moved down since they were created.
5761 buffer.next_notification(cx).await;
5762 buffer.read_with(cx, |buffer, _| {
5763 assert_eq!(
5764 buffer
5765 .snapshot()
5766 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5767 .collect::<Vec<_>>(),
5768 &[
5769 DiagnosticEntry {
5770 range: Point::new(3, 9)..Point::new(3, 11),
5771 diagnostic: Diagnostic {
5772 severity: DiagnosticSeverity::ERROR,
5773 message: "undefined variable 'BB'".to_string(),
5774 is_disk_based: true,
5775 group_id: 1,
5776 is_primary: true,
5777 ..Default::default()
5778 },
5779 },
5780 DiagnosticEntry {
5781 range: Point::new(4, 9)..Point::new(4, 12),
5782 diagnostic: Diagnostic {
5783 severity: DiagnosticSeverity::ERROR,
5784 message: "undefined variable 'CCC'".to_string(),
5785 is_disk_based: true,
5786 group_id: 2,
5787 is_primary: true,
5788 ..Default::default()
5789 }
5790 }
5791 ]
5792 );
5793 assert_eq!(
5794 chunks_with_diagnostics(buffer, 0..buffer.len()),
5795 [
5796 ("\n\nfn a() { ".to_string(), None),
5797 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5798 (" }\nfn b() { ".to_string(), None),
5799 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5800 (" }\nfn c() { ".to_string(), None),
5801 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5802 (" }\n".to_string(), None),
5803 ]
5804 );
5805 assert_eq!(
5806 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5807 [
5808 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5809 (" }\nfn c() { ".to_string(), None),
5810 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5811 ]
5812 );
5813 });
5814
5815 // Ensure overlapping diagnostics are highlighted correctly.
5816 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5817 lsp::PublishDiagnosticsParams {
5818 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5819 version: Some(open_notification.text_document.version),
5820 diagnostics: vec![
5821 lsp::Diagnostic {
5822 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5823 severity: Some(DiagnosticSeverity::ERROR),
5824 message: "undefined variable 'A'".to_string(),
5825 source: Some("disk".to_string()),
5826 ..Default::default()
5827 },
5828 lsp::Diagnostic {
5829 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5830 severity: Some(DiagnosticSeverity::WARNING),
5831 message: "unreachable statement".to_string(),
5832 source: Some("disk".to_string()),
5833 ..Default::default()
5834 },
5835 ],
5836 },
5837 );
5838
5839 buffer.next_notification(cx).await;
5840 buffer.read_with(cx, |buffer, _| {
5841 assert_eq!(
5842 buffer
5843 .snapshot()
5844 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5845 .collect::<Vec<_>>(),
5846 &[
5847 DiagnosticEntry {
5848 range: Point::new(2, 9)..Point::new(2, 12),
5849 diagnostic: Diagnostic {
5850 severity: DiagnosticSeverity::WARNING,
5851 message: "unreachable statement".to_string(),
5852 is_disk_based: true,
5853 group_id: 1,
5854 is_primary: true,
5855 ..Default::default()
5856 }
5857 },
5858 DiagnosticEntry {
5859 range: Point::new(2, 9)..Point::new(2, 10),
5860 diagnostic: Diagnostic {
5861 severity: DiagnosticSeverity::ERROR,
5862 message: "undefined variable 'A'".to_string(),
5863 is_disk_based: true,
5864 group_id: 0,
5865 is_primary: true,
5866 ..Default::default()
5867 },
5868 }
5869 ]
5870 );
5871 assert_eq!(
5872 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5873 [
5874 ("fn a() { ".to_string(), None),
5875 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5876 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5877 ("\n".to_string(), None),
5878 ]
5879 );
5880 assert_eq!(
5881 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5882 [
5883 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5884 ("\n".to_string(), None),
5885 ]
5886 );
5887 });
5888
5889 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5890 // changes since the last save.
5891 buffer.update(cx, |buffer, cx| {
5892 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5893 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5894 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5895 });
5896 let change_notification_2 = fake_server
5897 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5898 .await;
5899 assert!(
5900 change_notification_2.text_document.version
5901 > change_notification_1.text_document.version
5902 );
5903
5904 // Handle out-of-order diagnostics
5905 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5906 lsp::PublishDiagnosticsParams {
5907 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5908 version: Some(change_notification_2.text_document.version),
5909 diagnostics: vec![
5910 lsp::Diagnostic {
5911 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5912 severity: Some(DiagnosticSeverity::ERROR),
5913 message: "undefined variable 'BB'".to_string(),
5914 source: Some("disk".to_string()),
5915 ..Default::default()
5916 },
5917 lsp::Diagnostic {
5918 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5919 severity: Some(DiagnosticSeverity::WARNING),
5920 message: "undefined variable 'A'".to_string(),
5921 source: Some("disk".to_string()),
5922 ..Default::default()
5923 },
5924 ],
5925 },
5926 );
5927
5928 buffer.next_notification(cx).await;
5929 buffer.read_with(cx, |buffer, _| {
5930 assert_eq!(
5931 buffer
5932 .snapshot()
5933 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5934 .collect::<Vec<_>>(),
5935 &[
5936 DiagnosticEntry {
5937 range: Point::new(2, 21)..Point::new(2, 22),
5938 diagnostic: Diagnostic {
5939 severity: DiagnosticSeverity::WARNING,
5940 message: "undefined variable 'A'".to_string(),
5941 is_disk_based: true,
5942 group_id: 1,
5943 is_primary: true,
5944 ..Default::default()
5945 }
5946 },
5947 DiagnosticEntry {
5948 range: Point::new(3, 9)..Point::new(3, 14),
5949 diagnostic: Diagnostic {
5950 severity: DiagnosticSeverity::ERROR,
5951 message: "undefined variable 'BB'".to_string(),
5952 is_disk_based: true,
5953 group_id: 0,
5954 is_primary: true,
5955 ..Default::default()
5956 },
5957 }
5958 ]
5959 );
5960 });
5961 }
5962
5963 #[gpui::test]
5964 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5965 cx.foreground().forbid_parking();
5966
5967 let text = concat!(
5968 "let one = ;\n", //
5969 "let two = \n",
5970 "let three = 3;\n",
5971 );
5972
5973 let fs = FakeFs::new(cx.background());
5974 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5975
5976 let project = Project::test(fs, cx);
5977 let worktree_id = project
5978 .update(cx, |project, cx| {
5979 project.find_or_create_local_worktree("/dir", true, cx)
5980 })
5981 .await
5982 .unwrap()
5983 .0
5984 .read_with(cx, |tree, _| tree.id());
5985
5986 let buffer = project
5987 .update(cx, |project, cx| {
5988 project.open_buffer((worktree_id, "a.rs"), cx)
5989 })
5990 .await
5991 .unwrap();
5992
5993 project.update(cx, |project, cx| {
5994 project
5995 .update_buffer_diagnostics(
5996 &buffer,
5997 vec![
5998 DiagnosticEntry {
5999 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6000 diagnostic: Diagnostic {
6001 severity: DiagnosticSeverity::ERROR,
6002 message: "syntax error 1".to_string(),
6003 ..Default::default()
6004 },
6005 },
6006 DiagnosticEntry {
6007 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6008 diagnostic: Diagnostic {
6009 severity: DiagnosticSeverity::ERROR,
6010 message: "syntax error 2".to_string(),
6011 ..Default::default()
6012 },
6013 },
6014 ],
6015 None,
6016 cx,
6017 )
6018 .unwrap();
6019 });
6020
6021 // An empty range is extended forward to include the following character.
6022 // At the end of a line, an empty range is extended backward to include
6023 // the preceding character.
6024 buffer.read_with(cx, |buffer, _| {
6025 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6026 assert_eq!(
6027 chunks
6028 .iter()
6029 .map(|(s, d)| (s.as_str(), *d))
6030 .collect::<Vec<_>>(),
6031 &[
6032 ("let one = ", None),
6033 (";", Some(DiagnosticSeverity::ERROR)),
6034 ("\nlet two =", None),
6035 (" ", Some(DiagnosticSeverity::ERROR)),
6036 ("\nlet three = 3;\n", None)
6037 ]
6038 );
6039 });
6040 }
6041
6042 #[gpui::test]
6043 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6044 cx.foreground().forbid_parking();
6045
6046 let mut language = Language::new(
6047 LanguageConfig {
6048 name: "Rust".into(),
6049 path_suffixes: vec!["rs".to_string()],
6050 ..Default::default()
6051 },
6052 Some(tree_sitter_rust::language()),
6053 );
6054 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6055
6056 let text = "
6057 fn a() {
6058 f1();
6059 }
6060 fn b() {
6061 f2();
6062 }
6063 fn c() {
6064 f3();
6065 }
6066 "
6067 .unindent();
6068
6069 let fs = FakeFs::new(cx.background());
6070 fs.insert_tree(
6071 "/dir",
6072 json!({
6073 "a.rs": text.clone(),
6074 }),
6075 )
6076 .await;
6077
6078 let project = Project::test(fs, cx);
6079 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6080
6081 let worktree_id = project
6082 .update(cx, |project, cx| {
6083 project.find_or_create_local_worktree("/dir", true, cx)
6084 })
6085 .await
6086 .unwrap()
6087 .0
6088 .read_with(cx, |tree, _| tree.id());
6089
6090 let buffer = project
6091 .update(cx, |project, cx| {
6092 project.open_buffer((worktree_id, "a.rs"), cx)
6093 })
6094 .await
6095 .unwrap();
6096
6097 let mut fake_server = fake_servers.next().await.unwrap();
6098 let lsp_document_version = fake_server
6099 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6100 .await
6101 .text_document
6102 .version;
6103
6104 // Simulate editing the buffer after the language server computes some edits.
6105 buffer.update(cx, |buffer, cx| {
6106 buffer.edit(
6107 [Point::new(0, 0)..Point::new(0, 0)],
6108 "// above first function\n",
6109 cx,
6110 );
6111 buffer.edit(
6112 [Point::new(2, 0)..Point::new(2, 0)],
6113 " // inside first function\n",
6114 cx,
6115 );
6116 buffer.edit(
6117 [Point::new(6, 4)..Point::new(6, 4)],
6118 "// inside second function ",
6119 cx,
6120 );
6121
6122 assert_eq!(
6123 buffer.text(),
6124 "
6125 // above first function
6126 fn a() {
6127 // inside first function
6128 f1();
6129 }
6130 fn b() {
6131 // inside second function f2();
6132 }
6133 fn c() {
6134 f3();
6135 }
6136 "
6137 .unindent()
6138 );
6139 });
6140
6141 let edits = project
6142 .update(cx, |project, cx| {
6143 project.edits_from_lsp(
6144 &buffer,
6145 vec![
6146 // replace body of first function
6147 lsp::TextEdit {
6148 range: lsp::Range::new(
6149 lsp::Position::new(0, 0),
6150 lsp::Position::new(3, 0),
6151 ),
6152 new_text: "
6153 fn a() {
6154 f10();
6155 }
6156 "
6157 .unindent(),
6158 },
6159 // edit inside second function
6160 lsp::TextEdit {
6161 range: lsp::Range::new(
6162 lsp::Position::new(4, 6),
6163 lsp::Position::new(4, 6),
6164 ),
6165 new_text: "00".into(),
6166 },
6167 // edit inside third function via two distinct edits
6168 lsp::TextEdit {
6169 range: lsp::Range::new(
6170 lsp::Position::new(7, 5),
6171 lsp::Position::new(7, 5),
6172 ),
6173 new_text: "4000".into(),
6174 },
6175 lsp::TextEdit {
6176 range: lsp::Range::new(
6177 lsp::Position::new(7, 5),
6178 lsp::Position::new(7, 6),
6179 ),
6180 new_text: "".into(),
6181 },
6182 ],
6183 Some(lsp_document_version),
6184 cx,
6185 )
6186 })
6187 .await
6188 .unwrap();
6189
6190 buffer.update(cx, |buffer, cx| {
6191 for (range, new_text) in edits {
6192 buffer.edit([range], new_text, cx);
6193 }
6194 assert_eq!(
6195 buffer.text(),
6196 "
6197 // above first function
6198 fn a() {
6199 // inside first function
6200 f10();
6201 }
6202 fn b() {
6203 // inside second function f200();
6204 }
6205 fn c() {
6206 f4000();
6207 }
6208 "
6209 .unindent()
6210 );
6211 });
6212 }
6213
6214 #[gpui::test]
6215 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6216 cx.foreground().forbid_parking();
6217
6218 let text = "
6219 use a::b;
6220 use a::c;
6221
6222 fn f() {
6223 b();
6224 c();
6225 }
6226 "
6227 .unindent();
6228
6229 let fs = FakeFs::new(cx.background());
6230 fs.insert_tree(
6231 "/dir",
6232 json!({
6233 "a.rs": text.clone(),
6234 }),
6235 )
6236 .await;
6237
6238 let project = Project::test(fs, cx);
6239 let worktree_id = project
6240 .update(cx, |project, cx| {
6241 project.find_or_create_local_worktree("/dir", true, cx)
6242 })
6243 .await
6244 .unwrap()
6245 .0
6246 .read_with(cx, |tree, _| tree.id());
6247
6248 let buffer = project
6249 .update(cx, |project, cx| {
6250 project.open_buffer((worktree_id, "a.rs"), cx)
6251 })
6252 .await
6253 .unwrap();
6254
6255 // Simulate the language server sending us a small edit in the form of a very large diff.
6256 // Rust-analyzer does this when performing a merge-imports code action.
6257 let edits = project
6258 .update(cx, |project, cx| {
6259 project.edits_from_lsp(
6260 &buffer,
6261 [
6262 // Replace the first use statement without editing the semicolon.
6263 lsp::TextEdit {
6264 range: lsp::Range::new(
6265 lsp::Position::new(0, 4),
6266 lsp::Position::new(0, 8),
6267 ),
6268 new_text: "a::{b, c}".into(),
6269 },
6270 // Reinsert the remainder of the file between the semicolon and the final
6271 // newline of the file.
6272 lsp::TextEdit {
6273 range: lsp::Range::new(
6274 lsp::Position::new(0, 9),
6275 lsp::Position::new(0, 9),
6276 ),
6277 new_text: "\n\n".into(),
6278 },
6279 lsp::TextEdit {
6280 range: lsp::Range::new(
6281 lsp::Position::new(0, 9),
6282 lsp::Position::new(0, 9),
6283 ),
6284 new_text: "
6285 fn f() {
6286 b();
6287 c();
6288 }"
6289 .unindent(),
6290 },
6291 // Delete everything after the first newline of the file.
6292 lsp::TextEdit {
6293 range: lsp::Range::new(
6294 lsp::Position::new(1, 0),
6295 lsp::Position::new(7, 0),
6296 ),
6297 new_text: "".into(),
6298 },
6299 ],
6300 None,
6301 cx,
6302 )
6303 })
6304 .await
6305 .unwrap();
6306
6307 buffer.update(cx, |buffer, cx| {
6308 let edits = edits
6309 .into_iter()
6310 .map(|(range, text)| {
6311 (
6312 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6313 text,
6314 )
6315 })
6316 .collect::<Vec<_>>();
6317
6318 assert_eq!(
6319 edits,
6320 [
6321 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6322 (Point::new(1, 0)..Point::new(2, 0), "".into())
6323 ]
6324 );
6325
6326 for (range, new_text) in edits {
6327 buffer.edit([range], new_text, cx);
6328 }
6329 assert_eq!(
6330 buffer.text(),
6331 "
6332 use a::{b, c};
6333
6334 fn f() {
6335 b();
6336 c();
6337 }
6338 "
6339 .unindent()
6340 );
6341 });
6342 }
6343
6344 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6345 buffer: &Buffer,
6346 range: Range<T>,
6347 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6348 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6349 for chunk in buffer.snapshot().chunks(range, true) {
6350 if chunks.last().map_or(false, |prev_chunk| {
6351 prev_chunk.1 == chunk.diagnostic_severity
6352 }) {
6353 chunks.last_mut().unwrap().0.push_str(chunk.text);
6354 } else {
6355 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6356 }
6357 }
6358 chunks
6359 }
6360
6361 #[gpui::test]
6362 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6363 let dir = temp_tree(json!({
6364 "root": {
6365 "dir1": {},
6366 "dir2": {
6367 "dir3": {}
6368 }
6369 }
6370 }));
6371
6372 let project = Project::test(Arc::new(RealFs), cx);
6373 let (tree, _) = project
6374 .update(cx, |project, cx| {
6375 project.find_or_create_local_worktree(&dir.path(), true, cx)
6376 })
6377 .await
6378 .unwrap();
6379
6380 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6381 .await;
6382
6383 let cancel_flag = Default::default();
6384 let results = project
6385 .read_with(cx, |project, cx| {
6386 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6387 })
6388 .await;
6389
6390 assert!(results.is_empty());
6391 }
6392
6393 #[gpui::test]
6394 async fn test_definition(cx: &mut gpui::TestAppContext) {
6395 let mut language = Language::new(
6396 LanguageConfig {
6397 name: "Rust".into(),
6398 path_suffixes: vec!["rs".to_string()],
6399 ..Default::default()
6400 },
6401 Some(tree_sitter_rust::language()),
6402 );
6403 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6404
6405 let fs = FakeFs::new(cx.background());
6406 fs.insert_tree(
6407 "/dir",
6408 json!({
6409 "a.rs": "const fn a() { A }",
6410 "b.rs": "const y: i32 = crate::a()",
6411 }),
6412 )
6413 .await;
6414
6415 let project = Project::test(fs, cx);
6416 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6417
6418 let (tree, _) = project
6419 .update(cx, |project, cx| {
6420 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6421 })
6422 .await
6423 .unwrap();
6424 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6425 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6426 .await;
6427
6428 let buffer = project
6429 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6430 .await
6431 .unwrap();
6432
6433 let fake_server = fake_servers.next().await.unwrap();
6434 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6435 let params = params.text_document_position_params;
6436 assert_eq!(
6437 params.text_document.uri.to_file_path().unwrap(),
6438 Path::new("/dir/b.rs"),
6439 );
6440 assert_eq!(params.position, lsp::Position::new(0, 22));
6441
6442 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6443 lsp::Location::new(
6444 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6445 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6446 ),
6447 )))
6448 });
6449
6450 let mut definitions = project
6451 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6452 .await
6453 .unwrap();
6454
6455 assert_eq!(definitions.len(), 1);
6456 let definition = definitions.pop().unwrap();
6457 cx.update(|cx| {
6458 let target_buffer = definition.buffer.read(cx);
6459 assert_eq!(
6460 target_buffer
6461 .file()
6462 .unwrap()
6463 .as_local()
6464 .unwrap()
6465 .abs_path(cx),
6466 Path::new("/dir/a.rs"),
6467 );
6468 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6469 assert_eq!(
6470 list_worktrees(&project, cx),
6471 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6472 );
6473
6474 drop(definition);
6475 });
6476 cx.read(|cx| {
6477 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6478 });
6479
6480 fn list_worktrees<'a>(
6481 project: &'a ModelHandle<Project>,
6482 cx: &'a AppContext,
6483 ) -> Vec<(&'a Path, bool)> {
6484 project
6485 .read(cx)
6486 .worktrees(cx)
6487 .map(|worktree| {
6488 let worktree = worktree.read(cx);
6489 (
6490 worktree.as_local().unwrap().abs_path().as_ref(),
6491 worktree.is_visible(),
6492 )
6493 })
6494 .collect::<Vec<_>>()
6495 }
6496 }
6497
6498 #[gpui::test(iterations = 10)]
6499 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6500 let mut language = Language::new(
6501 LanguageConfig {
6502 name: "TypeScript".into(),
6503 path_suffixes: vec!["ts".to_string()],
6504 ..Default::default()
6505 },
6506 None,
6507 );
6508 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6509
6510 let fs = FakeFs::new(cx.background());
6511 fs.insert_tree(
6512 "/dir",
6513 json!({
6514 "a.ts": "a",
6515 }),
6516 )
6517 .await;
6518
6519 let project = Project::test(fs, cx);
6520 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6521
6522 let (tree, _) = project
6523 .update(cx, |project, cx| {
6524 project.find_or_create_local_worktree("/dir", true, cx)
6525 })
6526 .await
6527 .unwrap();
6528 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6529 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6530 .await;
6531
6532 let buffer = project
6533 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6534 .await
6535 .unwrap();
6536
6537 let fake_server = fake_language_servers.next().await.unwrap();
6538
6539 // Language server returns code actions that contain commands, and not edits.
6540 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6541 fake_server
6542 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6543 Ok(Some(vec![
6544 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6545 title: "The code action".into(),
6546 command: Some(lsp::Command {
6547 title: "The command".into(),
6548 command: "_the/command".into(),
6549 arguments: Some(vec![json!("the-argument")]),
6550 }),
6551 ..Default::default()
6552 }),
6553 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6554 title: "two".into(),
6555 ..Default::default()
6556 }),
6557 ]))
6558 })
6559 .next()
6560 .await;
6561
6562 let action = actions.await.unwrap()[0].clone();
6563 let apply = project.update(cx, |project, cx| {
6564 project.apply_code_action(buffer.clone(), action, true, cx)
6565 });
6566
6567 // Resolving the code action does not populate its edits. In absence of
6568 // edits, we must execute the given command.
6569 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6570 |action, _| async move { Ok(action) },
6571 );
6572
6573 // While executing the command, the language server sends the editor
6574 // a `workspaceEdit` request.
6575 fake_server
6576 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6577 let fake = fake_server.clone();
6578 move |params, _| {
6579 assert_eq!(params.command, "_the/command");
6580 let fake = fake.clone();
6581 async move {
6582 fake.server
6583 .request::<lsp::request::ApplyWorkspaceEdit>(
6584 lsp::ApplyWorkspaceEditParams {
6585 label: None,
6586 edit: lsp::WorkspaceEdit {
6587 changes: Some(
6588 [(
6589 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6590 vec![lsp::TextEdit {
6591 range: lsp::Range::new(
6592 lsp::Position::new(0, 0),
6593 lsp::Position::new(0, 0),
6594 ),
6595 new_text: "X".into(),
6596 }],
6597 )]
6598 .into_iter()
6599 .collect(),
6600 ),
6601 ..Default::default()
6602 },
6603 },
6604 )
6605 .await
6606 .unwrap();
6607 Ok(Some(json!(null)))
6608 }
6609 }
6610 })
6611 .next()
6612 .await;
6613
6614 // Applying the code action returns a project transaction containing the edits
6615 // sent by the language server in its `workspaceEdit` request.
6616 let transaction = apply.await.unwrap();
6617 assert!(transaction.0.contains_key(&buffer));
6618 buffer.update(cx, |buffer, cx| {
6619 assert_eq!(buffer.text(), "Xa");
6620 buffer.undo(cx);
6621 assert_eq!(buffer.text(), "a");
6622 });
6623 }
6624
6625 #[gpui::test]
6626 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6627 let fs = FakeFs::new(cx.background());
6628 fs.insert_tree(
6629 "/dir",
6630 json!({
6631 "file1": "the old contents",
6632 }),
6633 )
6634 .await;
6635
6636 let project = Project::test(fs.clone(), cx);
6637 let worktree_id = project
6638 .update(cx, |p, cx| {
6639 p.find_or_create_local_worktree("/dir", true, cx)
6640 })
6641 .await
6642 .unwrap()
6643 .0
6644 .read_with(cx, |tree, _| tree.id());
6645
6646 let buffer = project
6647 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6648 .await
6649 .unwrap();
6650 buffer
6651 .update(cx, |buffer, cx| {
6652 assert_eq!(buffer.text(), "the old contents");
6653 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6654 buffer.save(cx)
6655 })
6656 .await
6657 .unwrap();
6658
6659 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6660 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6661 }
6662
6663 #[gpui::test]
6664 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6665 let fs = FakeFs::new(cx.background());
6666 fs.insert_tree(
6667 "/dir",
6668 json!({
6669 "file1": "the old contents",
6670 }),
6671 )
6672 .await;
6673
6674 let project = Project::test(fs.clone(), cx);
6675 let worktree_id = project
6676 .update(cx, |p, cx| {
6677 p.find_or_create_local_worktree("/dir/file1", true, cx)
6678 })
6679 .await
6680 .unwrap()
6681 .0
6682 .read_with(cx, |tree, _| tree.id());
6683
6684 let buffer = project
6685 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6686 .await
6687 .unwrap();
6688 buffer
6689 .update(cx, |buffer, cx| {
6690 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6691 buffer.save(cx)
6692 })
6693 .await
6694 .unwrap();
6695
6696 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6697 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6698 }
6699
6700 #[gpui::test]
6701 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6702 let fs = FakeFs::new(cx.background());
6703 fs.insert_tree("/dir", json!({})).await;
6704
6705 let project = Project::test(fs.clone(), cx);
6706 let (worktree, _) = project
6707 .update(cx, |project, cx| {
6708 project.find_or_create_local_worktree("/dir", true, cx)
6709 })
6710 .await
6711 .unwrap();
6712 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6713
6714 let buffer = project.update(cx, |project, cx| {
6715 project.create_buffer("", None, cx).unwrap()
6716 });
6717 buffer.update(cx, |buffer, cx| {
6718 buffer.edit([0..0], "abc", cx);
6719 assert!(buffer.is_dirty());
6720 assert!(!buffer.has_conflict());
6721 });
6722 project
6723 .update(cx, |project, cx| {
6724 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6725 })
6726 .await
6727 .unwrap();
6728 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6729 buffer.read_with(cx, |buffer, cx| {
6730 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6731 assert!(!buffer.is_dirty());
6732 assert!(!buffer.has_conflict());
6733 });
6734
6735 let opened_buffer = project
6736 .update(cx, |project, cx| {
6737 project.open_buffer((worktree_id, "file1"), cx)
6738 })
6739 .await
6740 .unwrap();
6741 assert_eq!(opened_buffer, buffer);
6742 }
6743
6744 #[gpui::test(retries = 5)]
6745 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6746 let dir = temp_tree(json!({
6747 "a": {
6748 "file1": "",
6749 "file2": "",
6750 "file3": "",
6751 },
6752 "b": {
6753 "c": {
6754 "file4": "",
6755 "file5": "",
6756 }
6757 }
6758 }));
6759
6760 let project = Project::test(Arc::new(RealFs), cx);
6761 let rpc = project.read_with(cx, |p, _| p.client.clone());
6762
6763 let (tree, _) = project
6764 .update(cx, |p, cx| {
6765 p.find_or_create_local_worktree(dir.path(), true, cx)
6766 })
6767 .await
6768 .unwrap();
6769 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6770
6771 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6772 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6773 async move { buffer.await.unwrap() }
6774 };
6775 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6776 tree.read_with(cx, |tree, _| {
6777 tree.entry_for_path(path)
6778 .expect(&format!("no entry for path {}", path))
6779 .id
6780 })
6781 };
6782
6783 let buffer2 = buffer_for_path("a/file2", cx).await;
6784 let buffer3 = buffer_for_path("a/file3", cx).await;
6785 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6786 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6787
6788 let file2_id = id_for_path("a/file2", &cx);
6789 let file3_id = id_for_path("a/file3", &cx);
6790 let file4_id = id_for_path("b/c/file4", &cx);
6791
6792 // Wait for the initial scan.
6793 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6794 .await;
6795
6796 // Create a remote copy of this worktree.
6797 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6798 let (remote, load_task) = cx.update(|cx| {
6799 Worktree::remote(
6800 1,
6801 1,
6802 initial_snapshot.to_proto(&Default::default(), true),
6803 rpc.clone(),
6804 cx,
6805 )
6806 });
6807 load_task.await;
6808
6809 cx.read(|cx| {
6810 assert!(!buffer2.read(cx).is_dirty());
6811 assert!(!buffer3.read(cx).is_dirty());
6812 assert!(!buffer4.read(cx).is_dirty());
6813 assert!(!buffer5.read(cx).is_dirty());
6814 });
6815
6816 // Rename and delete files and directories.
6817 tree.flush_fs_events(&cx).await;
6818 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6819 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6820 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6821 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6822 tree.flush_fs_events(&cx).await;
6823
6824 let expected_paths = vec![
6825 "a",
6826 "a/file1",
6827 "a/file2.new",
6828 "b",
6829 "d",
6830 "d/file3",
6831 "d/file4",
6832 ];
6833
6834 cx.read(|app| {
6835 assert_eq!(
6836 tree.read(app)
6837 .paths()
6838 .map(|p| p.to_str().unwrap())
6839 .collect::<Vec<_>>(),
6840 expected_paths
6841 );
6842
6843 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6844 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6845 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6846
6847 assert_eq!(
6848 buffer2.read(app).file().unwrap().path().as_ref(),
6849 Path::new("a/file2.new")
6850 );
6851 assert_eq!(
6852 buffer3.read(app).file().unwrap().path().as_ref(),
6853 Path::new("d/file3")
6854 );
6855 assert_eq!(
6856 buffer4.read(app).file().unwrap().path().as_ref(),
6857 Path::new("d/file4")
6858 );
6859 assert_eq!(
6860 buffer5.read(app).file().unwrap().path().as_ref(),
6861 Path::new("b/c/file5")
6862 );
6863
6864 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6865 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6866 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6867 assert!(buffer5.read(app).file().unwrap().is_deleted());
6868 });
6869
6870 // Update the remote worktree. Check that it becomes consistent with the
6871 // local worktree.
6872 remote.update(cx, |remote, cx| {
6873 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6874 &initial_snapshot,
6875 1,
6876 1,
6877 true,
6878 );
6879 remote
6880 .as_remote_mut()
6881 .unwrap()
6882 .snapshot
6883 .apply_remote_update(update_message)
6884 .unwrap();
6885
6886 assert_eq!(
6887 remote
6888 .paths()
6889 .map(|p| p.to_str().unwrap())
6890 .collect::<Vec<_>>(),
6891 expected_paths
6892 );
6893 });
6894 }
6895
6896 #[gpui::test]
6897 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6898 let fs = FakeFs::new(cx.background());
6899 fs.insert_tree(
6900 "/the-dir",
6901 json!({
6902 "a.txt": "a-contents",
6903 "b.txt": "b-contents",
6904 }),
6905 )
6906 .await;
6907
6908 let project = Project::test(fs.clone(), cx);
6909 let worktree_id = project
6910 .update(cx, |p, cx| {
6911 p.find_or_create_local_worktree("/the-dir", true, cx)
6912 })
6913 .await
6914 .unwrap()
6915 .0
6916 .read_with(cx, |tree, _| tree.id());
6917
6918 // Spawn multiple tasks to open paths, repeating some paths.
6919 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6920 (
6921 p.open_buffer((worktree_id, "a.txt"), cx),
6922 p.open_buffer((worktree_id, "b.txt"), cx),
6923 p.open_buffer((worktree_id, "a.txt"), cx),
6924 )
6925 });
6926
6927 let buffer_a_1 = buffer_a_1.await.unwrap();
6928 let buffer_a_2 = buffer_a_2.await.unwrap();
6929 let buffer_b = buffer_b.await.unwrap();
6930 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6931 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6932
6933 // There is only one buffer per path.
6934 let buffer_a_id = buffer_a_1.id();
6935 assert_eq!(buffer_a_2.id(), buffer_a_id);
6936
6937 // Open the same path again while it is still open.
6938 drop(buffer_a_1);
6939 let buffer_a_3 = project
6940 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6941 .await
6942 .unwrap();
6943
6944 // There's still only one buffer per path.
6945 assert_eq!(buffer_a_3.id(), buffer_a_id);
6946 }
6947
6948 #[gpui::test]
6949 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6950 use std::fs;
6951
6952 let dir = temp_tree(json!({
6953 "file1": "abc",
6954 "file2": "def",
6955 "file3": "ghi",
6956 }));
6957
6958 let project = Project::test(Arc::new(RealFs), cx);
6959 let (worktree, _) = project
6960 .update(cx, |p, cx| {
6961 p.find_or_create_local_worktree(dir.path(), true, cx)
6962 })
6963 .await
6964 .unwrap();
6965 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6966
6967 worktree.flush_fs_events(&cx).await;
6968 worktree
6969 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6970 .await;
6971
6972 let buffer1 = project
6973 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6974 .await
6975 .unwrap();
6976 let events = Rc::new(RefCell::new(Vec::new()));
6977
6978 // initially, the buffer isn't dirty.
6979 buffer1.update(cx, |buffer, cx| {
6980 cx.subscribe(&buffer1, {
6981 let events = events.clone();
6982 move |_, _, event, _| match event {
6983 BufferEvent::Operation(_) => {}
6984 _ => events.borrow_mut().push(event.clone()),
6985 }
6986 })
6987 .detach();
6988
6989 assert!(!buffer.is_dirty());
6990 assert!(events.borrow().is_empty());
6991
6992 buffer.edit(vec![1..2], "", cx);
6993 });
6994
6995 // after the first edit, the buffer is dirty, and emits a dirtied event.
6996 buffer1.update(cx, |buffer, cx| {
6997 assert!(buffer.text() == "ac");
6998 assert!(buffer.is_dirty());
6999 assert_eq!(
7000 *events.borrow(),
7001 &[language::Event::Edited, language::Event::Dirtied]
7002 );
7003 events.borrow_mut().clear();
7004 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7005 });
7006
7007 // after saving, the buffer is not dirty, and emits a saved event.
7008 buffer1.update(cx, |buffer, cx| {
7009 assert!(!buffer.is_dirty());
7010 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7011 events.borrow_mut().clear();
7012
7013 buffer.edit(vec![1..1], "B", cx);
7014 buffer.edit(vec![2..2], "D", cx);
7015 });
7016
7017 // after editing again, the buffer is dirty, and emits another dirty event.
7018 buffer1.update(cx, |buffer, cx| {
7019 assert!(buffer.text() == "aBDc");
7020 assert!(buffer.is_dirty());
7021 assert_eq!(
7022 *events.borrow(),
7023 &[
7024 language::Event::Edited,
7025 language::Event::Dirtied,
7026 language::Event::Edited,
7027 ],
7028 );
7029 events.borrow_mut().clear();
7030
7031 // TODO - currently, after restoring the buffer to its
7032 // previously-saved state, the is still considered dirty.
7033 buffer.edit([1..3], "", cx);
7034 assert!(buffer.text() == "ac");
7035 assert!(buffer.is_dirty());
7036 });
7037
7038 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7039
7040 // When a file is deleted, the buffer is considered dirty.
7041 let events = Rc::new(RefCell::new(Vec::new()));
7042 let buffer2 = project
7043 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
7044 .await
7045 .unwrap();
7046 buffer2.update(cx, |_, cx| {
7047 cx.subscribe(&buffer2, {
7048 let events = events.clone();
7049 move |_, _, event, _| events.borrow_mut().push(event.clone())
7050 })
7051 .detach();
7052 });
7053
7054 fs::remove_file(dir.path().join("file2")).unwrap();
7055 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7056 assert_eq!(
7057 *events.borrow(),
7058 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7059 );
7060
7061 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7062 let events = Rc::new(RefCell::new(Vec::new()));
7063 let buffer3 = project
7064 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
7065 .await
7066 .unwrap();
7067 buffer3.update(cx, |_, cx| {
7068 cx.subscribe(&buffer3, {
7069 let events = events.clone();
7070 move |_, _, event, _| events.borrow_mut().push(event.clone())
7071 })
7072 .detach();
7073 });
7074
7075 worktree.flush_fs_events(&cx).await;
7076 buffer3.update(cx, |buffer, cx| {
7077 buffer.edit(Some(0..0), "x", cx);
7078 });
7079 events.borrow_mut().clear();
7080 fs::remove_file(dir.path().join("file3")).unwrap();
7081 buffer3
7082 .condition(&cx, |_, _| !events.borrow().is_empty())
7083 .await;
7084 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7085 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7086 }
7087
7088 #[gpui::test]
7089 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7090 use std::fs;
7091
7092 let initial_contents = "aaa\nbbbbb\nc\n";
7093 let dir = temp_tree(json!({ "the-file": initial_contents }));
7094
7095 let project = Project::test(Arc::new(RealFs), cx);
7096 let (worktree, _) = project
7097 .update(cx, |p, cx| {
7098 p.find_or_create_local_worktree(dir.path(), true, cx)
7099 })
7100 .await
7101 .unwrap();
7102 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7103
7104 worktree
7105 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
7106 .await;
7107
7108 let abs_path = dir.path().join("the-file");
7109 let buffer = project
7110 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
7111 .await
7112 .unwrap();
7113
7114 // TODO
7115 // Add a cursor on each row.
7116 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
7117 // assert!(!buffer.is_dirty());
7118 // buffer.add_selection_set(
7119 // &(0..3)
7120 // .map(|row| Selection {
7121 // id: row as usize,
7122 // start: Point::new(row, 1),
7123 // end: Point::new(row, 1),
7124 // reversed: false,
7125 // goal: SelectionGoal::None,
7126 // })
7127 // .collect::<Vec<_>>(),
7128 // cx,
7129 // )
7130 // });
7131
7132 // Change the file on disk, adding two new lines of text, and removing
7133 // one line.
7134 buffer.read_with(cx, |buffer, _| {
7135 assert!(!buffer.is_dirty());
7136 assert!(!buffer.has_conflict());
7137 });
7138 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7139 fs::write(&abs_path, new_contents).unwrap();
7140
7141 // Because the buffer was not modified, it is reloaded from disk. Its
7142 // contents are edited according to the diff between the old and new
7143 // file contents.
7144 buffer
7145 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7146 .await;
7147
7148 buffer.update(cx, |buffer, _| {
7149 assert_eq!(buffer.text(), new_contents);
7150 assert!(!buffer.is_dirty());
7151 assert!(!buffer.has_conflict());
7152
7153 // TODO
7154 // let cursor_positions = buffer
7155 // .selection_set(selection_set_id)
7156 // .unwrap()
7157 // .selections::<Point>(&*buffer)
7158 // .map(|selection| {
7159 // assert_eq!(selection.start, selection.end);
7160 // selection.start
7161 // })
7162 // .collect::<Vec<_>>();
7163 // assert_eq!(
7164 // cursor_positions,
7165 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7166 // );
7167 });
7168
7169 // Modify the buffer
7170 buffer.update(cx, |buffer, cx| {
7171 buffer.edit(vec![0..0], " ", cx);
7172 assert!(buffer.is_dirty());
7173 assert!(!buffer.has_conflict());
7174 });
7175
7176 // Change the file on disk again, adding blank lines to the beginning.
7177 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
7178
7179 // Because the buffer is modified, it doesn't reload from disk, but is
7180 // marked as having a conflict.
7181 buffer
7182 .condition(&cx, |buffer, _| buffer.has_conflict())
7183 .await;
7184 }
7185
7186 #[gpui::test]
7187 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7188 cx.foreground().forbid_parking();
7189
7190 let fs = FakeFs::new(cx.background());
7191 fs.insert_tree(
7192 "/the-dir",
7193 json!({
7194 "a.rs": "
7195 fn foo(mut v: Vec<usize>) {
7196 for x in &v {
7197 v.push(1);
7198 }
7199 }
7200 "
7201 .unindent(),
7202 }),
7203 )
7204 .await;
7205
7206 let project = Project::test(fs.clone(), cx);
7207 let (worktree, _) = project
7208 .update(cx, |p, cx| {
7209 p.find_or_create_local_worktree("/the-dir", true, cx)
7210 })
7211 .await
7212 .unwrap();
7213 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7214
7215 let buffer = project
7216 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7217 .await
7218 .unwrap();
7219
7220 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7221 let message = lsp::PublishDiagnosticsParams {
7222 uri: buffer_uri.clone(),
7223 diagnostics: vec![
7224 lsp::Diagnostic {
7225 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7226 severity: Some(DiagnosticSeverity::WARNING),
7227 message: "error 1".to_string(),
7228 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7229 location: lsp::Location {
7230 uri: buffer_uri.clone(),
7231 range: lsp::Range::new(
7232 lsp::Position::new(1, 8),
7233 lsp::Position::new(1, 9),
7234 ),
7235 },
7236 message: "error 1 hint 1".to_string(),
7237 }]),
7238 ..Default::default()
7239 },
7240 lsp::Diagnostic {
7241 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7242 severity: Some(DiagnosticSeverity::HINT),
7243 message: "error 1 hint 1".to_string(),
7244 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7245 location: lsp::Location {
7246 uri: buffer_uri.clone(),
7247 range: lsp::Range::new(
7248 lsp::Position::new(1, 8),
7249 lsp::Position::new(1, 9),
7250 ),
7251 },
7252 message: "original diagnostic".to_string(),
7253 }]),
7254 ..Default::default()
7255 },
7256 lsp::Diagnostic {
7257 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7258 severity: Some(DiagnosticSeverity::ERROR),
7259 message: "error 2".to_string(),
7260 related_information: Some(vec![
7261 lsp::DiagnosticRelatedInformation {
7262 location: lsp::Location {
7263 uri: buffer_uri.clone(),
7264 range: lsp::Range::new(
7265 lsp::Position::new(1, 13),
7266 lsp::Position::new(1, 15),
7267 ),
7268 },
7269 message: "error 2 hint 1".to_string(),
7270 },
7271 lsp::DiagnosticRelatedInformation {
7272 location: lsp::Location {
7273 uri: buffer_uri.clone(),
7274 range: lsp::Range::new(
7275 lsp::Position::new(1, 13),
7276 lsp::Position::new(1, 15),
7277 ),
7278 },
7279 message: "error 2 hint 2".to_string(),
7280 },
7281 ]),
7282 ..Default::default()
7283 },
7284 lsp::Diagnostic {
7285 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7286 severity: Some(DiagnosticSeverity::HINT),
7287 message: "error 2 hint 1".to_string(),
7288 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7289 location: lsp::Location {
7290 uri: buffer_uri.clone(),
7291 range: lsp::Range::new(
7292 lsp::Position::new(2, 8),
7293 lsp::Position::new(2, 17),
7294 ),
7295 },
7296 message: "original diagnostic".to_string(),
7297 }]),
7298 ..Default::default()
7299 },
7300 lsp::Diagnostic {
7301 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7302 severity: Some(DiagnosticSeverity::HINT),
7303 message: "error 2 hint 2".to_string(),
7304 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7305 location: lsp::Location {
7306 uri: buffer_uri.clone(),
7307 range: lsp::Range::new(
7308 lsp::Position::new(2, 8),
7309 lsp::Position::new(2, 17),
7310 ),
7311 },
7312 message: "original diagnostic".to_string(),
7313 }]),
7314 ..Default::default()
7315 },
7316 ],
7317 version: None,
7318 };
7319
7320 project
7321 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7322 .unwrap();
7323 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7324
7325 assert_eq!(
7326 buffer
7327 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7328 .collect::<Vec<_>>(),
7329 &[
7330 DiagnosticEntry {
7331 range: Point::new(1, 8)..Point::new(1, 9),
7332 diagnostic: Diagnostic {
7333 severity: DiagnosticSeverity::WARNING,
7334 message: "error 1".to_string(),
7335 group_id: 0,
7336 is_primary: true,
7337 ..Default::default()
7338 }
7339 },
7340 DiagnosticEntry {
7341 range: Point::new(1, 8)..Point::new(1, 9),
7342 diagnostic: Diagnostic {
7343 severity: DiagnosticSeverity::HINT,
7344 message: "error 1 hint 1".to_string(),
7345 group_id: 0,
7346 is_primary: false,
7347 ..Default::default()
7348 }
7349 },
7350 DiagnosticEntry {
7351 range: Point::new(1, 13)..Point::new(1, 15),
7352 diagnostic: Diagnostic {
7353 severity: DiagnosticSeverity::HINT,
7354 message: "error 2 hint 1".to_string(),
7355 group_id: 1,
7356 is_primary: false,
7357 ..Default::default()
7358 }
7359 },
7360 DiagnosticEntry {
7361 range: Point::new(1, 13)..Point::new(1, 15),
7362 diagnostic: Diagnostic {
7363 severity: DiagnosticSeverity::HINT,
7364 message: "error 2 hint 2".to_string(),
7365 group_id: 1,
7366 is_primary: false,
7367 ..Default::default()
7368 }
7369 },
7370 DiagnosticEntry {
7371 range: Point::new(2, 8)..Point::new(2, 17),
7372 diagnostic: Diagnostic {
7373 severity: DiagnosticSeverity::ERROR,
7374 message: "error 2".to_string(),
7375 group_id: 1,
7376 is_primary: true,
7377 ..Default::default()
7378 }
7379 }
7380 ]
7381 );
7382
7383 assert_eq!(
7384 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7385 &[
7386 DiagnosticEntry {
7387 range: Point::new(1, 8)..Point::new(1, 9),
7388 diagnostic: Diagnostic {
7389 severity: DiagnosticSeverity::WARNING,
7390 message: "error 1".to_string(),
7391 group_id: 0,
7392 is_primary: true,
7393 ..Default::default()
7394 }
7395 },
7396 DiagnosticEntry {
7397 range: Point::new(1, 8)..Point::new(1, 9),
7398 diagnostic: Diagnostic {
7399 severity: DiagnosticSeverity::HINT,
7400 message: "error 1 hint 1".to_string(),
7401 group_id: 0,
7402 is_primary: false,
7403 ..Default::default()
7404 }
7405 },
7406 ]
7407 );
7408 assert_eq!(
7409 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7410 &[
7411 DiagnosticEntry {
7412 range: Point::new(1, 13)..Point::new(1, 15),
7413 diagnostic: Diagnostic {
7414 severity: DiagnosticSeverity::HINT,
7415 message: "error 2 hint 1".to_string(),
7416 group_id: 1,
7417 is_primary: false,
7418 ..Default::default()
7419 }
7420 },
7421 DiagnosticEntry {
7422 range: Point::new(1, 13)..Point::new(1, 15),
7423 diagnostic: Diagnostic {
7424 severity: DiagnosticSeverity::HINT,
7425 message: "error 2 hint 2".to_string(),
7426 group_id: 1,
7427 is_primary: false,
7428 ..Default::default()
7429 }
7430 },
7431 DiagnosticEntry {
7432 range: Point::new(2, 8)..Point::new(2, 17),
7433 diagnostic: Diagnostic {
7434 severity: DiagnosticSeverity::ERROR,
7435 message: "error 2".to_string(),
7436 group_id: 1,
7437 is_primary: true,
7438 ..Default::default()
7439 }
7440 }
7441 ]
7442 );
7443 }
7444
7445 #[gpui::test]
7446 async fn test_rename(cx: &mut gpui::TestAppContext) {
7447 cx.foreground().forbid_parking();
7448
7449 let mut language = Language::new(
7450 LanguageConfig {
7451 name: "Rust".into(),
7452 path_suffixes: vec!["rs".to_string()],
7453 ..Default::default()
7454 },
7455 Some(tree_sitter_rust::language()),
7456 );
7457 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7458
7459 let fs = FakeFs::new(cx.background());
7460 fs.insert_tree(
7461 "/dir",
7462 json!({
7463 "one.rs": "const ONE: usize = 1;",
7464 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7465 }),
7466 )
7467 .await;
7468
7469 let project = Project::test(fs.clone(), cx);
7470 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7471
7472 let (tree, _) = project
7473 .update(cx, |project, cx| {
7474 project.find_or_create_local_worktree("/dir", true, cx)
7475 })
7476 .await
7477 .unwrap();
7478 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7479 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7480 .await;
7481
7482 let buffer = project
7483 .update(cx, |project, cx| {
7484 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7485 })
7486 .await
7487 .unwrap();
7488
7489 let fake_server = fake_servers.next().await.unwrap();
7490
7491 let response = project.update(cx, |project, cx| {
7492 project.prepare_rename(buffer.clone(), 7, cx)
7493 });
7494 fake_server
7495 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7496 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7497 assert_eq!(params.position, lsp::Position::new(0, 7));
7498 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7499 lsp::Position::new(0, 6),
7500 lsp::Position::new(0, 9),
7501 ))))
7502 })
7503 .next()
7504 .await
7505 .unwrap();
7506 let range = response.await.unwrap().unwrap();
7507 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7508 assert_eq!(range, 6..9);
7509
7510 let response = project.update(cx, |project, cx| {
7511 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7512 });
7513 fake_server
7514 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7515 assert_eq!(
7516 params.text_document_position.text_document.uri.as_str(),
7517 "file:///dir/one.rs"
7518 );
7519 assert_eq!(
7520 params.text_document_position.position,
7521 lsp::Position::new(0, 7)
7522 );
7523 assert_eq!(params.new_name, "THREE");
7524 Ok(Some(lsp::WorkspaceEdit {
7525 changes: Some(
7526 [
7527 (
7528 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7529 vec![lsp::TextEdit::new(
7530 lsp::Range::new(
7531 lsp::Position::new(0, 6),
7532 lsp::Position::new(0, 9),
7533 ),
7534 "THREE".to_string(),
7535 )],
7536 ),
7537 (
7538 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7539 vec![
7540 lsp::TextEdit::new(
7541 lsp::Range::new(
7542 lsp::Position::new(0, 24),
7543 lsp::Position::new(0, 27),
7544 ),
7545 "THREE".to_string(),
7546 ),
7547 lsp::TextEdit::new(
7548 lsp::Range::new(
7549 lsp::Position::new(0, 35),
7550 lsp::Position::new(0, 38),
7551 ),
7552 "THREE".to_string(),
7553 ),
7554 ],
7555 ),
7556 ]
7557 .into_iter()
7558 .collect(),
7559 ),
7560 ..Default::default()
7561 }))
7562 })
7563 .next()
7564 .await
7565 .unwrap();
7566 let mut transaction = response.await.unwrap().0;
7567 assert_eq!(transaction.len(), 2);
7568 assert_eq!(
7569 transaction
7570 .remove_entry(&buffer)
7571 .unwrap()
7572 .0
7573 .read_with(cx, |buffer, _| buffer.text()),
7574 "const THREE: usize = 1;"
7575 );
7576 assert_eq!(
7577 transaction
7578 .into_keys()
7579 .next()
7580 .unwrap()
7581 .read_with(cx, |buffer, _| buffer.text()),
7582 "const TWO: usize = one::THREE + one::THREE;"
7583 );
7584 }
7585
7586 #[gpui::test]
7587 async fn test_search(cx: &mut gpui::TestAppContext) {
7588 let fs = FakeFs::new(cx.background());
7589 fs.insert_tree(
7590 "/dir",
7591 json!({
7592 "one.rs": "const ONE: usize = 1;",
7593 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7594 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7595 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7596 }),
7597 )
7598 .await;
7599 let project = Project::test(fs.clone(), cx);
7600 let (tree, _) = project
7601 .update(cx, |project, cx| {
7602 project.find_or_create_local_worktree("/dir", true, cx)
7603 })
7604 .await
7605 .unwrap();
7606 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7607 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7608 .await;
7609
7610 assert_eq!(
7611 search(&project, SearchQuery::text("TWO", false, true), cx)
7612 .await
7613 .unwrap(),
7614 HashMap::from_iter([
7615 ("two.rs".to_string(), vec![6..9]),
7616 ("three.rs".to_string(), vec![37..40])
7617 ])
7618 );
7619
7620 let buffer_4 = project
7621 .update(cx, |project, cx| {
7622 project.open_buffer((worktree_id, "four.rs"), cx)
7623 })
7624 .await
7625 .unwrap();
7626 buffer_4.update(cx, |buffer, cx| {
7627 buffer.edit([20..28, 31..43], "two::TWO", cx);
7628 });
7629
7630 assert_eq!(
7631 search(&project, SearchQuery::text("TWO", false, true), cx)
7632 .await
7633 .unwrap(),
7634 HashMap::from_iter([
7635 ("two.rs".to_string(), vec![6..9]),
7636 ("three.rs".to_string(), vec![37..40]),
7637 ("four.rs".to_string(), vec![25..28, 36..39])
7638 ])
7639 );
7640
7641 async fn search(
7642 project: &ModelHandle<Project>,
7643 query: SearchQuery,
7644 cx: &mut gpui::TestAppContext,
7645 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7646 let results = project
7647 .update(cx, |project, cx| project.search(query, cx))
7648 .await?;
7649
7650 Ok(results
7651 .into_iter()
7652 .map(|(buffer, ranges)| {
7653 buffer.read_with(cx, |buffer, _| {
7654 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7655 let ranges = ranges
7656 .into_iter()
7657 .map(|range| range.to_offset(buffer))
7658 .collect::<Vec<_>>();
7659 (path, ranges)
7660 })
7661 })
7662 .collect())
7663 }
7664 }
7665}