1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use util::{post_inc, ResultExt, TryFutureExt as _};
53
54pub use fs::*;
55pub use worktree::*;
56
57pub trait Item: Entity {
58 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
59}
60
61pub struct Project {
62 worktrees: Vec<WorktreeHandle>,
63 active_entry: Option<ProjectEntryId>,
64 languages: Arc<LanguageRegistry>,
65 language_servers:
66 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
67 started_language_servers:
68 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
69 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
70 language_server_settings: Arc<Mutex<serde_json::Value>>,
71 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
72 next_language_server_id: usize,
73 client: Arc<client::Client>,
74 next_entry_id: Arc<AtomicUsize>,
75 user_store: ModelHandle<UserStore>,
76 fs: Arc<dyn Fs>,
77 client_state: ProjectClientState,
78 collaborators: HashMap<PeerId, Collaborator>,
79 subscriptions: Vec<client::Subscription>,
80 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
81 shared_buffers: HashMap<PeerId, HashSet<u64>>,
82 loading_buffers: HashMap<
83 ProjectPath,
84 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
85 >,
86 loading_local_worktrees:
87 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
88 opened_buffers: HashMap<u64, OpenBuffer>,
89 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
90 nonce: u128,
91}
92
93enum OpenBuffer {
94 Strong(ModelHandle<Buffer>),
95 Weak(WeakModelHandle<Buffer>),
96 Loading(Vec<Operation>),
97}
98
99enum WorktreeHandle {
100 Strong(ModelHandle<Worktree>),
101 Weak(WeakModelHandle<Worktree>),
102}
103
104enum ProjectClientState {
105 Local {
106 is_shared: bool,
107 remote_id_tx: watch::Sender<Option<u64>>,
108 remote_id_rx: watch::Receiver<Option<u64>>,
109 _maintain_remote_id_task: Task<Option<()>>,
110 },
111 Remote {
112 sharing_has_stopped: bool,
113 remote_id: u64,
114 replica_id: ReplicaId,
115 _detect_unshare_task: Task<Option<()>>,
116 },
117}
118
119#[derive(Clone, Debug)]
120pub struct Collaborator {
121 pub user: Arc<User>,
122 pub peer_id: PeerId,
123 pub replica_id: ReplicaId,
124}
125
126#[derive(Clone, Debug, PartialEq)]
127pub enum Event {
128 ActiveEntryChanged(Option<ProjectEntryId>),
129 WorktreeRemoved(WorktreeId),
130 DiskBasedDiagnosticsStarted,
131 DiskBasedDiagnosticsUpdated,
132 DiskBasedDiagnosticsFinished,
133 DiagnosticsUpdated(ProjectPath),
134 RemoteIdChanged(Option<u64>),
135 CollaboratorLeft(PeerId),
136}
137
138#[derive(Serialize)]
139pub struct LanguageServerStatus {
140 pub name: String,
141 pub pending_work: BTreeMap<String, LanguageServerProgress>,
142 pub pending_diagnostic_updates: isize,
143}
144
145#[derive(Clone, Debug, Serialize)]
146pub struct LanguageServerProgress {
147 pub message: Option<String>,
148 pub percentage: Option<usize>,
149 #[serde(skip_serializing)]
150 pub last_update_at: Instant,
151}
152
153#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
154pub struct ProjectPath {
155 pub worktree_id: WorktreeId,
156 pub path: Arc<Path>,
157}
158
159#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
160pub struct DiagnosticSummary {
161 pub error_count: usize,
162 pub warning_count: usize,
163}
164
165#[derive(Debug)]
166pub struct Location {
167 pub buffer: ModelHandle<Buffer>,
168 pub range: Range<language::Anchor>,
169}
170
171#[derive(Debug)]
172pub struct DocumentHighlight {
173 pub range: Range<language::Anchor>,
174 pub kind: DocumentHighlightKind,
175}
176
177#[derive(Clone, Debug)]
178pub struct Symbol {
179 pub source_worktree_id: WorktreeId,
180 pub worktree_id: WorktreeId,
181 pub language_server_name: LanguageServerName,
182 pub path: PathBuf,
183 pub label: CodeLabel,
184 pub name: String,
185 pub kind: lsp::SymbolKind,
186 pub range: Range<PointUtf16>,
187 pub signature: [u8; 32],
188}
189
190#[derive(Default)]
191pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
192
193impl DiagnosticSummary {
194 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
195 let mut this = Self {
196 error_count: 0,
197 warning_count: 0,
198 };
199
200 for entry in diagnostics {
201 if entry.diagnostic.is_primary {
202 match entry.diagnostic.severity {
203 DiagnosticSeverity::ERROR => this.error_count += 1,
204 DiagnosticSeverity::WARNING => this.warning_count += 1,
205 _ => {}
206 }
207 }
208 }
209
210 this
211 }
212
213 pub fn is_empty(&self) -> bool {
214 self.error_count == 0 && self.warning_count == 0
215 }
216
217 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
218 proto::DiagnosticSummary {
219 path: path.to_string_lossy().to_string(),
220 error_count: self.error_count as u32,
221 warning_count: self.warning_count as u32,
222 }
223 }
224}
225
226#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
227pub struct ProjectEntryId(usize);
228
229impl ProjectEntryId {
230 pub const MAX: Self = Self(usize::MAX);
231
232 pub fn new(counter: &AtomicUsize) -> Self {
233 Self(counter.fetch_add(1, SeqCst))
234 }
235
236 pub fn from_proto(id: u64) -> Self {
237 Self(id as usize)
238 }
239
240 pub fn to_proto(&self) -> u64 {
241 self.0 as u64
242 }
243
244 pub fn to_usize(&self) -> usize {
245 self.0
246 }
247}
248
249impl Project {
250 pub fn init(client: &Arc<Client>) {
251 client.add_model_message_handler(Self::handle_add_collaborator);
252 client.add_model_message_handler(Self::handle_buffer_reloaded);
253 client.add_model_message_handler(Self::handle_buffer_saved);
254 client.add_model_message_handler(Self::handle_start_language_server);
255 client.add_model_message_handler(Self::handle_update_language_server);
256 client.add_model_message_handler(Self::handle_remove_collaborator);
257 client.add_model_message_handler(Self::handle_register_worktree);
258 client.add_model_message_handler(Self::handle_unregister_worktree);
259 client.add_model_message_handler(Self::handle_unshare_project);
260 client.add_model_message_handler(Self::handle_update_buffer_file);
261 client.add_model_message_handler(Self::handle_update_buffer);
262 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
263 client.add_model_message_handler(Self::handle_update_worktree);
264 client.add_model_request_handler(Self::handle_create_project_entry);
265 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
266 client.add_model_request_handler(Self::handle_apply_code_action);
267 client.add_model_request_handler(Self::handle_reload_buffers);
268 client.add_model_request_handler(Self::handle_format_buffers);
269 client.add_model_request_handler(Self::handle_get_code_actions);
270 client.add_model_request_handler(Self::handle_get_completions);
271 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
272 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
273 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
274 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
275 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
276 client.add_model_request_handler(Self::handle_search_project);
277 client.add_model_request_handler(Self::handle_get_project_symbols);
278 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
279 client.add_model_request_handler(Self::handle_open_buffer_by_id);
280 client.add_model_request_handler(Self::handle_open_buffer_by_path);
281 client.add_model_request_handler(Self::handle_save_buffer);
282 }
283
284 pub fn local(
285 client: Arc<Client>,
286 user_store: ModelHandle<UserStore>,
287 languages: Arc<LanguageRegistry>,
288 fs: Arc<dyn Fs>,
289 cx: &mut MutableAppContext,
290 ) -> ModelHandle<Self> {
291 cx.add_model(|cx: &mut ModelContext<Self>| {
292 let (remote_id_tx, remote_id_rx) = watch::channel();
293 let _maintain_remote_id_task = cx.spawn_weak({
294 let rpc = client.clone();
295 move |this, mut cx| {
296 async move {
297 let mut status = rpc.status();
298 while let Some(status) = status.next().await {
299 if let Some(this) = this.upgrade(&cx) {
300 if status.is_connected() {
301 this.update(&mut cx, |this, cx| this.register(cx)).await?;
302 } else {
303 this.update(&mut cx, |this, cx| this.unregister(cx));
304 }
305 }
306 }
307 Ok(())
308 }
309 .log_err()
310 }
311 });
312
313 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
314 Self {
315 worktrees: Default::default(),
316 collaborators: Default::default(),
317 opened_buffers: Default::default(),
318 shared_buffers: Default::default(),
319 loading_buffers: Default::default(),
320 loading_local_worktrees: Default::default(),
321 buffer_snapshots: Default::default(),
322 client_state: ProjectClientState::Local {
323 is_shared: false,
324 remote_id_tx,
325 remote_id_rx,
326 _maintain_remote_id_task,
327 },
328 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
329 subscriptions: Vec::new(),
330 active_entry: None,
331 languages,
332 client,
333 user_store,
334 fs,
335 next_entry_id: Default::default(),
336 language_servers: Default::default(),
337 started_language_servers: Default::default(),
338 language_server_statuses: Default::default(),
339 last_workspace_edits_by_language_server: Default::default(),
340 language_server_settings: Default::default(),
341 next_language_server_id: 0,
342 nonce: StdRng::from_entropy().gen(),
343 }
344 })
345 }
346
347 pub async fn remote(
348 remote_id: u64,
349 client: Arc<Client>,
350 user_store: ModelHandle<UserStore>,
351 languages: Arc<LanguageRegistry>,
352 fs: Arc<dyn Fs>,
353 cx: &mut AsyncAppContext,
354 ) -> Result<ModelHandle<Self>> {
355 client.authenticate_and_connect(true, &cx).await?;
356
357 let response = client
358 .request(proto::JoinProject {
359 project_id: remote_id,
360 })
361 .await?;
362
363 let replica_id = response.replica_id as ReplicaId;
364
365 let mut worktrees = Vec::new();
366 for worktree in response.worktrees {
367 let (worktree, load_task) = cx
368 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
369 worktrees.push(worktree);
370 load_task.detach();
371 }
372
373 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
374 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
375 let mut this = Self {
376 worktrees: Vec::new(),
377 loading_buffers: Default::default(),
378 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
379 shared_buffers: Default::default(),
380 loading_local_worktrees: Default::default(),
381 active_entry: None,
382 collaborators: Default::default(),
383 languages,
384 user_store: user_store.clone(),
385 fs,
386 next_entry_id: Default::default(),
387 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
388 client: client.clone(),
389 client_state: ProjectClientState::Remote {
390 sharing_has_stopped: false,
391 remote_id,
392 replica_id,
393 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
394 async move {
395 let mut status = client.status();
396 let is_connected =
397 status.next().await.map_or(false, |s| s.is_connected());
398 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
399 if !is_connected || status.next().await.is_some() {
400 if let Some(this) = this.upgrade(&cx) {
401 this.update(&mut cx, |this, cx| this.project_unshared(cx))
402 }
403 }
404 Ok(())
405 }
406 .log_err()
407 }),
408 },
409 language_servers: Default::default(),
410 started_language_servers: Default::default(),
411 language_server_settings: Default::default(),
412 language_server_statuses: response
413 .language_servers
414 .into_iter()
415 .map(|server| {
416 (
417 server.id as usize,
418 LanguageServerStatus {
419 name: server.name,
420 pending_work: Default::default(),
421 pending_diagnostic_updates: 0,
422 },
423 )
424 })
425 .collect(),
426 last_workspace_edits_by_language_server: Default::default(),
427 next_language_server_id: 0,
428 opened_buffers: Default::default(),
429 buffer_snapshots: Default::default(),
430 nonce: StdRng::from_entropy().gen(),
431 };
432 for worktree in worktrees {
433 this.add_worktree(&worktree, cx);
434 }
435 this
436 });
437
438 let user_ids = response
439 .collaborators
440 .iter()
441 .map(|peer| peer.user_id)
442 .collect();
443 user_store
444 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
445 .await?;
446 let mut collaborators = HashMap::default();
447 for message in response.collaborators {
448 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
449 collaborators.insert(collaborator.peer_id, collaborator);
450 }
451
452 this.update(cx, |this, _| {
453 this.collaborators = collaborators;
454 });
455
456 Ok(this)
457 }
458
459 #[cfg(any(test, feature = "test-support"))]
460 pub async fn test(
461 fs: Arc<dyn Fs>,
462 root_paths: impl IntoIterator<Item = impl AsRef<Path>>,
463 cx: &mut gpui::TestAppContext,
464 ) -> ModelHandle<Project> {
465 let languages = Arc::new(LanguageRegistry::test());
466 let http_client = client::test::FakeHttpClient::with_404_response();
467 let client = client::Client::new(http_client.clone());
468 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
469 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
470 for path in root_paths {
471 let (tree, _) = project
472 .update(cx, |project, cx| {
473 project.find_or_create_local_worktree(path, true, cx)
474 })
475 .await
476 .unwrap();
477 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
478 .await;
479 }
480 project
481 }
482
483 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
484 self.opened_buffers
485 .get(&remote_id)
486 .and_then(|buffer| buffer.upgrade(cx))
487 }
488
489 pub fn languages(&self) -> &Arc<LanguageRegistry> {
490 &self.languages
491 }
492
493 #[cfg(any(test, feature = "test-support"))]
494 pub fn check_invariants(&self, cx: &AppContext) {
495 if self.is_local() {
496 let mut worktree_root_paths = HashMap::default();
497 for worktree in self.worktrees(cx) {
498 let worktree = worktree.read(cx);
499 let abs_path = worktree.as_local().unwrap().abs_path().clone();
500 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
501 assert_eq!(
502 prev_worktree_id,
503 None,
504 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
505 abs_path,
506 worktree.id(),
507 prev_worktree_id
508 )
509 }
510 } else {
511 let replica_id = self.replica_id();
512 for buffer in self.opened_buffers.values() {
513 if let Some(buffer) = buffer.upgrade(cx) {
514 let buffer = buffer.read(cx);
515 assert_eq!(
516 buffer.deferred_ops_len(),
517 0,
518 "replica {}, buffer {} has deferred operations",
519 replica_id,
520 buffer.remote_id()
521 );
522 }
523 }
524 }
525 }
526
527 #[cfg(any(test, feature = "test-support"))]
528 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
529 let path = path.into();
530 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
531 self.opened_buffers.iter().any(|(_, buffer)| {
532 if let Some(buffer) = buffer.upgrade(cx) {
533 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
534 if file.worktree == worktree && file.path() == &path.path {
535 return true;
536 }
537 }
538 }
539 false
540 })
541 } else {
542 false
543 }
544 }
545
546 pub fn fs(&self) -> &Arc<dyn Fs> {
547 &self.fs
548 }
549
550 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
551 self.unshare(cx);
552 for worktree in &self.worktrees {
553 if let Some(worktree) = worktree.upgrade(cx) {
554 worktree.update(cx, |worktree, _| {
555 worktree.as_local_mut().unwrap().unregister();
556 });
557 }
558 }
559
560 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
561 *remote_id_tx.borrow_mut() = None;
562 }
563
564 self.subscriptions.clear();
565 }
566
567 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
568 self.unregister(cx);
569
570 let response = self.client.request(proto::RegisterProject {});
571 cx.spawn(|this, mut cx| async move {
572 let remote_id = response.await?.project_id;
573
574 let mut registrations = Vec::new();
575 this.update(&mut cx, |this, cx| {
576 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
577 *remote_id_tx.borrow_mut() = Some(remote_id);
578 }
579
580 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
581
582 this.subscriptions
583 .push(this.client.add_model_for_remote_entity(remote_id, cx));
584
585 for worktree in &this.worktrees {
586 if let Some(worktree) = worktree.upgrade(cx) {
587 registrations.push(worktree.update(cx, |worktree, cx| {
588 let worktree = worktree.as_local_mut().unwrap();
589 worktree.register(remote_id, cx)
590 }));
591 }
592 }
593 });
594
595 futures::future::try_join_all(registrations).await?;
596 Ok(())
597 })
598 }
599
600 pub fn remote_id(&self) -> Option<u64> {
601 match &self.client_state {
602 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
603 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
604 }
605 }
606
607 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
608 let mut id = None;
609 let mut watch = None;
610 match &self.client_state {
611 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
612 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
613 }
614
615 async move {
616 if let Some(id) = id {
617 return id;
618 }
619 let mut watch = watch.unwrap();
620 loop {
621 let id = *watch.borrow();
622 if let Some(id) = id {
623 return id;
624 }
625 watch.next().await;
626 }
627 }
628 }
629
630 pub fn replica_id(&self) -> ReplicaId {
631 match &self.client_state {
632 ProjectClientState::Local { .. } => 0,
633 ProjectClientState::Remote { replica_id, .. } => *replica_id,
634 }
635 }
636
637 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
638 &self.collaborators
639 }
640
641 pub fn worktrees<'a>(
642 &'a self,
643 cx: &'a AppContext,
644 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
645 self.worktrees
646 .iter()
647 .filter_map(move |worktree| worktree.upgrade(cx))
648 }
649
650 pub fn visible_worktrees<'a>(
651 &'a self,
652 cx: &'a AppContext,
653 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
654 self.worktrees.iter().filter_map(|worktree| {
655 worktree.upgrade(cx).and_then(|worktree| {
656 if worktree.read(cx).is_visible() {
657 Some(worktree)
658 } else {
659 None
660 }
661 })
662 })
663 }
664
665 pub fn worktree_for_id(
666 &self,
667 id: WorktreeId,
668 cx: &AppContext,
669 ) -> Option<ModelHandle<Worktree>> {
670 self.worktrees(cx)
671 .find(|worktree| worktree.read(cx).id() == id)
672 }
673
674 pub fn worktree_for_entry(
675 &self,
676 entry_id: ProjectEntryId,
677 cx: &AppContext,
678 ) -> Option<ModelHandle<Worktree>> {
679 self.worktrees(cx)
680 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
681 }
682
683 pub fn worktree_id_for_entry(
684 &self,
685 entry_id: ProjectEntryId,
686 cx: &AppContext,
687 ) -> Option<WorktreeId> {
688 self.worktree_for_entry(entry_id, cx)
689 .map(|worktree| worktree.read(cx).id())
690 }
691
692 pub fn create_file(
693 &mut self,
694 project_path: impl Into<ProjectPath>,
695 cx: &mut ModelContext<Self>,
696 ) -> Option<Task<Result<Entry>>> {
697 let project_path = project_path.into();
698 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
699
700 if self.is_local() {
701 Some(worktree.update(cx, |worktree, cx| {
702 worktree.as_local_mut().unwrap().write_file(
703 project_path.path,
704 Default::default(),
705 cx,
706 )
707 }))
708 } else {
709 let client = self.client.clone();
710 let project_id = self.remote_id().unwrap();
711
712 Some(cx.spawn_weak(|_, mut cx| async move {
713 let response = client
714 .request(proto::CreateProjectEntry {
715 worktree_id: project_path.worktree_id.to_proto(),
716 project_id,
717 path: project_path.path.as_os_str().as_bytes().to_vec(),
718 is_directory: false,
719 })
720 .await?;
721 worktree.update(&mut cx, |worktree, _| {
722 let worktree = worktree.as_remote_mut().unwrap();
723 worktree.snapshot.insert_entry(
724 response
725 .entry
726 .ok_or_else(|| anyhow!("missing entry in response"))?,
727 )
728 })
729 }))
730 }
731 }
732
733 pub fn rename_entry(
734 &mut self,
735 entry_id: ProjectEntryId,
736 new_path: impl Into<Arc<Path>>,
737 cx: &mut ModelContext<Self>,
738 ) -> Option<Task<Result<Entry>>> {
739 if self.is_local() {
740 let worktree = self.worktree_for_entry(entry_id, cx)?;
741
742 worktree.update(cx, |worktree, cx| {
743 worktree
744 .as_local_mut()
745 .unwrap()
746 .rename_entry(entry_id, new_path, cx)
747 })
748 } else {
749 todo!()
750 }
751 }
752
753 pub fn can_share(&self, cx: &AppContext) -> bool {
754 self.is_local() && self.visible_worktrees(cx).next().is_some()
755 }
756
757 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
758 let rpc = self.client.clone();
759 cx.spawn(|this, mut cx| async move {
760 let project_id = this.update(&mut cx, |this, cx| {
761 if let ProjectClientState::Local {
762 is_shared,
763 remote_id_rx,
764 ..
765 } = &mut this.client_state
766 {
767 *is_shared = true;
768
769 for open_buffer in this.opened_buffers.values_mut() {
770 match open_buffer {
771 OpenBuffer::Strong(_) => {}
772 OpenBuffer::Weak(buffer) => {
773 if let Some(buffer) = buffer.upgrade(cx) {
774 *open_buffer = OpenBuffer::Strong(buffer);
775 }
776 }
777 OpenBuffer::Loading(_) => unreachable!(),
778 }
779 }
780
781 for worktree_handle in this.worktrees.iter_mut() {
782 match worktree_handle {
783 WorktreeHandle::Strong(_) => {}
784 WorktreeHandle::Weak(worktree) => {
785 if let Some(worktree) = worktree.upgrade(cx) {
786 *worktree_handle = WorktreeHandle::Strong(worktree);
787 }
788 }
789 }
790 }
791
792 remote_id_rx
793 .borrow()
794 .ok_or_else(|| anyhow!("no project id"))
795 } else {
796 Err(anyhow!("can't share a remote project"))
797 }
798 })?;
799
800 rpc.request(proto::ShareProject { project_id }).await?;
801
802 let mut tasks = Vec::new();
803 this.update(&mut cx, |this, cx| {
804 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
805 worktree.update(cx, |worktree, cx| {
806 let worktree = worktree.as_local_mut().unwrap();
807 tasks.push(worktree.share(project_id, cx));
808 });
809 }
810 });
811 for task in tasks {
812 task.await?;
813 }
814 this.update(&mut cx, |_, cx| cx.notify());
815 Ok(())
816 })
817 }
818
819 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
820 let rpc = self.client.clone();
821
822 if let ProjectClientState::Local {
823 is_shared,
824 remote_id_rx,
825 ..
826 } = &mut self.client_state
827 {
828 if !*is_shared {
829 return;
830 }
831
832 *is_shared = false;
833 self.collaborators.clear();
834 self.shared_buffers.clear();
835 for worktree_handle in self.worktrees.iter_mut() {
836 if let WorktreeHandle::Strong(worktree) = worktree_handle {
837 let is_visible = worktree.update(cx, |worktree, _| {
838 worktree.as_local_mut().unwrap().unshare();
839 worktree.is_visible()
840 });
841 if !is_visible {
842 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
843 }
844 }
845 }
846
847 for open_buffer in self.opened_buffers.values_mut() {
848 match open_buffer {
849 OpenBuffer::Strong(buffer) => {
850 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
851 }
852 _ => {}
853 }
854 }
855
856 if let Some(project_id) = *remote_id_rx.borrow() {
857 rpc.send(proto::UnshareProject { project_id }).log_err();
858 }
859
860 cx.notify();
861 } else {
862 log::error!("attempted to unshare a remote project");
863 }
864 }
865
866 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
867 if let ProjectClientState::Remote {
868 sharing_has_stopped,
869 ..
870 } = &mut self.client_state
871 {
872 *sharing_has_stopped = true;
873 self.collaborators.clear();
874 cx.notify();
875 }
876 }
877
878 pub fn is_read_only(&self) -> bool {
879 match &self.client_state {
880 ProjectClientState::Local { .. } => false,
881 ProjectClientState::Remote {
882 sharing_has_stopped,
883 ..
884 } => *sharing_has_stopped,
885 }
886 }
887
888 pub fn is_local(&self) -> bool {
889 match &self.client_state {
890 ProjectClientState::Local { .. } => true,
891 ProjectClientState::Remote { .. } => false,
892 }
893 }
894
895 pub fn is_remote(&self) -> bool {
896 !self.is_local()
897 }
898
899 pub fn create_buffer(
900 &mut self,
901 text: &str,
902 language: Option<Arc<Language>>,
903 cx: &mut ModelContext<Self>,
904 ) -> Result<ModelHandle<Buffer>> {
905 if self.is_remote() {
906 return Err(anyhow!("creating buffers as a guest is not supported yet"));
907 }
908
909 let buffer = cx.add_model(|cx| {
910 Buffer::new(self.replica_id(), text, cx)
911 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
912 });
913 self.register_buffer(&buffer, cx)?;
914 Ok(buffer)
915 }
916
917 pub fn open_path(
918 &mut self,
919 path: impl Into<ProjectPath>,
920 cx: &mut ModelContext<Self>,
921 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
922 let task = self.open_buffer(path, cx);
923 cx.spawn_weak(|_, cx| async move {
924 let buffer = task.await?;
925 let project_entry_id = buffer
926 .read_with(&cx, |buffer, cx| {
927 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
928 })
929 .ok_or_else(|| anyhow!("no project entry"))?;
930 Ok((project_entry_id, buffer.into()))
931 })
932 }
933
934 pub fn open_local_buffer(
935 &mut self,
936 abs_path: impl AsRef<Path>,
937 cx: &mut ModelContext<Self>,
938 ) -> Task<Result<ModelHandle<Buffer>>> {
939 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
940 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
941 } else {
942 Task::ready(Err(anyhow!("no such path")))
943 }
944 }
945
946 pub fn open_buffer(
947 &mut self,
948 path: impl Into<ProjectPath>,
949 cx: &mut ModelContext<Self>,
950 ) -> Task<Result<ModelHandle<Buffer>>> {
951 let project_path = path.into();
952 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
953 worktree
954 } else {
955 return Task::ready(Err(anyhow!("no such worktree")));
956 };
957
958 // If there is already a buffer for the given path, then return it.
959 let existing_buffer = self.get_open_buffer(&project_path, cx);
960 if let Some(existing_buffer) = existing_buffer {
961 return Task::ready(Ok(existing_buffer));
962 }
963
964 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
965 // If the given path is already being loaded, then wait for that existing
966 // task to complete and return the same buffer.
967 hash_map::Entry::Occupied(e) => e.get().clone(),
968
969 // Otherwise, record the fact that this path is now being loaded.
970 hash_map::Entry::Vacant(entry) => {
971 let (mut tx, rx) = postage::watch::channel();
972 entry.insert(rx.clone());
973
974 let load_buffer = if worktree.read(cx).is_local() {
975 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
976 } else {
977 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
978 };
979
980 cx.spawn(move |this, mut cx| async move {
981 let load_result = load_buffer.await;
982 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
983 // Record the fact that the buffer is no longer loading.
984 this.loading_buffers.remove(&project_path);
985 let buffer = load_result.map_err(Arc::new)?;
986 Ok(buffer)
987 }));
988 })
989 .detach();
990 rx
991 }
992 };
993
994 cx.foreground().spawn(async move {
995 loop {
996 if let Some(result) = loading_watch.borrow().as_ref() {
997 match result {
998 Ok(buffer) => return Ok(buffer.clone()),
999 Err(error) => return Err(anyhow!("{}", error)),
1000 }
1001 }
1002 loading_watch.next().await;
1003 }
1004 })
1005 }
1006
1007 fn open_local_buffer_internal(
1008 &mut self,
1009 path: &Arc<Path>,
1010 worktree: &ModelHandle<Worktree>,
1011 cx: &mut ModelContext<Self>,
1012 ) -> Task<Result<ModelHandle<Buffer>>> {
1013 let load_buffer = worktree.update(cx, |worktree, cx| {
1014 let worktree = worktree.as_local_mut().unwrap();
1015 worktree.load_buffer(path, cx)
1016 });
1017 cx.spawn(|this, mut cx| async move {
1018 let buffer = load_buffer.await?;
1019 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1020 Ok(buffer)
1021 })
1022 }
1023
1024 fn open_remote_buffer_internal(
1025 &mut self,
1026 path: &Arc<Path>,
1027 worktree: &ModelHandle<Worktree>,
1028 cx: &mut ModelContext<Self>,
1029 ) -> Task<Result<ModelHandle<Buffer>>> {
1030 let rpc = self.client.clone();
1031 let project_id = self.remote_id().unwrap();
1032 let remote_worktree_id = worktree.read(cx).id();
1033 let path = path.clone();
1034 let path_string = path.to_string_lossy().to_string();
1035 cx.spawn(|this, mut cx| async move {
1036 let response = rpc
1037 .request(proto::OpenBufferByPath {
1038 project_id,
1039 worktree_id: remote_worktree_id.to_proto(),
1040 path: path_string,
1041 })
1042 .await?;
1043 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1044 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1045 .await
1046 })
1047 }
1048
1049 fn open_local_buffer_via_lsp(
1050 &mut self,
1051 abs_path: lsp::Url,
1052 lsp_adapter: Arc<dyn LspAdapter>,
1053 lsp_server: Arc<LanguageServer>,
1054 cx: &mut ModelContext<Self>,
1055 ) -> Task<Result<ModelHandle<Buffer>>> {
1056 cx.spawn(|this, mut cx| async move {
1057 let abs_path = abs_path
1058 .to_file_path()
1059 .map_err(|_| anyhow!("can't convert URI to path"))?;
1060 let (worktree, relative_path) = if let Some(result) =
1061 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1062 {
1063 result
1064 } else {
1065 let worktree = this
1066 .update(&mut cx, |this, cx| {
1067 this.create_local_worktree(&abs_path, false, cx)
1068 })
1069 .await?;
1070 this.update(&mut cx, |this, cx| {
1071 this.language_servers.insert(
1072 (worktree.read(cx).id(), lsp_adapter.name()),
1073 (lsp_adapter, lsp_server),
1074 );
1075 });
1076 (worktree, PathBuf::new())
1077 };
1078
1079 let project_path = ProjectPath {
1080 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1081 path: relative_path.into(),
1082 };
1083 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1084 .await
1085 })
1086 }
1087
1088 pub fn open_buffer_by_id(
1089 &mut self,
1090 id: u64,
1091 cx: &mut ModelContext<Self>,
1092 ) -> Task<Result<ModelHandle<Buffer>>> {
1093 if let Some(buffer) = self.buffer_for_id(id, cx) {
1094 Task::ready(Ok(buffer))
1095 } else if self.is_local() {
1096 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1097 } else if let Some(project_id) = self.remote_id() {
1098 let request = self
1099 .client
1100 .request(proto::OpenBufferById { project_id, id });
1101 cx.spawn(|this, mut cx| async move {
1102 let buffer = request
1103 .await?
1104 .buffer
1105 .ok_or_else(|| anyhow!("invalid buffer"))?;
1106 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1107 .await
1108 })
1109 } else {
1110 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1111 }
1112 }
1113
1114 pub fn save_buffer_as(
1115 &mut self,
1116 buffer: ModelHandle<Buffer>,
1117 abs_path: PathBuf,
1118 cx: &mut ModelContext<Project>,
1119 ) -> Task<Result<()>> {
1120 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1121 let old_path =
1122 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1123 cx.spawn(|this, mut cx| async move {
1124 if let Some(old_path) = old_path {
1125 this.update(&mut cx, |this, cx| {
1126 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1127 });
1128 }
1129 let (worktree, path) = worktree_task.await?;
1130 worktree
1131 .update(&mut cx, |worktree, cx| {
1132 worktree
1133 .as_local_mut()
1134 .unwrap()
1135 .save_buffer_as(buffer.clone(), path, cx)
1136 })
1137 .await?;
1138 this.update(&mut cx, |this, cx| {
1139 this.assign_language_to_buffer(&buffer, cx);
1140 this.register_buffer_with_language_server(&buffer, cx);
1141 });
1142 Ok(())
1143 })
1144 }
1145
1146 pub fn get_open_buffer(
1147 &mut self,
1148 path: &ProjectPath,
1149 cx: &mut ModelContext<Self>,
1150 ) -> Option<ModelHandle<Buffer>> {
1151 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1152 self.opened_buffers.values().find_map(|buffer| {
1153 let buffer = buffer.upgrade(cx)?;
1154 let file = File::from_dyn(buffer.read(cx).file())?;
1155 if file.worktree == worktree && file.path() == &path.path {
1156 Some(buffer)
1157 } else {
1158 None
1159 }
1160 })
1161 }
1162
1163 fn register_buffer(
1164 &mut self,
1165 buffer: &ModelHandle<Buffer>,
1166 cx: &mut ModelContext<Self>,
1167 ) -> Result<()> {
1168 let remote_id = buffer.read(cx).remote_id();
1169 let open_buffer = if self.is_remote() || self.is_shared() {
1170 OpenBuffer::Strong(buffer.clone())
1171 } else {
1172 OpenBuffer::Weak(buffer.downgrade())
1173 };
1174
1175 match self.opened_buffers.insert(remote_id, open_buffer) {
1176 None => {}
1177 Some(OpenBuffer::Loading(operations)) => {
1178 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1179 }
1180 Some(OpenBuffer::Weak(existing_handle)) => {
1181 if existing_handle.upgrade(cx).is_some() {
1182 Err(anyhow!(
1183 "already registered buffer with remote id {}",
1184 remote_id
1185 ))?
1186 }
1187 }
1188 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1189 "already registered buffer with remote id {}",
1190 remote_id
1191 ))?,
1192 }
1193 cx.subscribe(buffer, |this, buffer, event, cx| {
1194 this.on_buffer_event(buffer, event, cx);
1195 })
1196 .detach();
1197
1198 self.assign_language_to_buffer(buffer, cx);
1199 self.register_buffer_with_language_server(buffer, cx);
1200 cx.observe_release(buffer, |this, buffer, cx| {
1201 if let Some(file) = File::from_dyn(buffer.file()) {
1202 if file.is_local() {
1203 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1204 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1205 server
1206 .notify::<lsp::notification::DidCloseTextDocument>(
1207 lsp::DidCloseTextDocumentParams {
1208 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1209 },
1210 )
1211 .log_err();
1212 }
1213 }
1214 }
1215 })
1216 .detach();
1217
1218 Ok(())
1219 }
1220
1221 fn register_buffer_with_language_server(
1222 &mut self,
1223 buffer_handle: &ModelHandle<Buffer>,
1224 cx: &mut ModelContext<Self>,
1225 ) {
1226 let buffer = buffer_handle.read(cx);
1227 let buffer_id = buffer.remote_id();
1228 if let Some(file) = File::from_dyn(buffer.file()) {
1229 if file.is_local() {
1230 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1231 let initial_snapshot = buffer.text_snapshot();
1232
1233 let mut language_server = None;
1234 let mut language_id = None;
1235 if let Some(language) = buffer.language() {
1236 let worktree_id = file.worktree_id(cx);
1237 if let Some(adapter) = language.lsp_adapter() {
1238 language_id = adapter.id_for_language(language.name().as_ref());
1239 language_server = self
1240 .language_servers
1241 .get(&(worktree_id, adapter.name()))
1242 .cloned();
1243 }
1244 }
1245
1246 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1247 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1248 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1249 .log_err();
1250 }
1251 }
1252
1253 if let Some((_, server)) = language_server {
1254 server
1255 .notify::<lsp::notification::DidOpenTextDocument>(
1256 lsp::DidOpenTextDocumentParams {
1257 text_document: lsp::TextDocumentItem::new(
1258 uri,
1259 language_id.unwrap_or_default(),
1260 0,
1261 initial_snapshot.text(),
1262 ),
1263 }
1264 .clone(),
1265 )
1266 .log_err();
1267 buffer_handle.update(cx, |buffer, cx| {
1268 buffer.set_completion_triggers(
1269 server
1270 .capabilities()
1271 .completion_provider
1272 .as_ref()
1273 .and_then(|provider| provider.trigger_characters.clone())
1274 .unwrap_or(Vec::new()),
1275 cx,
1276 )
1277 });
1278 self.buffer_snapshots
1279 .insert(buffer_id, vec![(0, initial_snapshot)]);
1280 }
1281 }
1282 }
1283 }
1284
1285 fn unregister_buffer_from_language_server(
1286 &mut self,
1287 buffer: &ModelHandle<Buffer>,
1288 old_path: PathBuf,
1289 cx: &mut ModelContext<Self>,
1290 ) {
1291 buffer.update(cx, |buffer, cx| {
1292 buffer.update_diagnostics(Default::default(), cx);
1293 self.buffer_snapshots.remove(&buffer.remote_id());
1294 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1295 language_server
1296 .notify::<lsp::notification::DidCloseTextDocument>(
1297 lsp::DidCloseTextDocumentParams {
1298 text_document: lsp::TextDocumentIdentifier::new(
1299 lsp::Url::from_file_path(old_path).unwrap(),
1300 ),
1301 },
1302 )
1303 .log_err();
1304 }
1305 });
1306 }
1307
1308 fn on_buffer_event(
1309 &mut self,
1310 buffer: ModelHandle<Buffer>,
1311 event: &BufferEvent,
1312 cx: &mut ModelContext<Self>,
1313 ) -> Option<()> {
1314 match event {
1315 BufferEvent::Operation(operation) => {
1316 let project_id = self.remote_id()?;
1317 let request = self.client.request(proto::UpdateBuffer {
1318 project_id,
1319 buffer_id: buffer.read(cx).remote_id(),
1320 operations: vec![language::proto::serialize_operation(&operation)],
1321 });
1322 cx.background().spawn(request).detach_and_log_err(cx);
1323 }
1324 BufferEvent::Edited { .. } => {
1325 let (_, language_server) = self
1326 .language_server_for_buffer(buffer.read(cx), cx)?
1327 .clone();
1328 let buffer = buffer.read(cx);
1329 let file = File::from_dyn(buffer.file())?;
1330 let abs_path = file.as_local()?.abs_path(cx);
1331 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1332 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1333 let (version, prev_snapshot) = buffer_snapshots.last()?;
1334 let next_snapshot = buffer.text_snapshot();
1335 let next_version = version + 1;
1336
1337 let content_changes = buffer
1338 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1339 .map(|edit| {
1340 let edit_start = edit.new.start.0;
1341 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1342 let new_text = next_snapshot
1343 .text_for_range(edit.new.start.1..edit.new.end.1)
1344 .collect();
1345 lsp::TextDocumentContentChangeEvent {
1346 range: Some(lsp::Range::new(
1347 point_to_lsp(edit_start),
1348 point_to_lsp(edit_end),
1349 )),
1350 range_length: None,
1351 text: new_text,
1352 }
1353 })
1354 .collect();
1355
1356 buffer_snapshots.push((next_version, next_snapshot));
1357
1358 language_server
1359 .notify::<lsp::notification::DidChangeTextDocument>(
1360 lsp::DidChangeTextDocumentParams {
1361 text_document: lsp::VersionedTextDocumentIdentifier::new(
1362 uri,
1363 next_version,
1364 ),
1365 content_changes,
1366 },
1367 )
1368 .log_err();
1369 }
1370 BufferEvent::Saved => {
1371 let file = File::from_dyn(buffer.read(cx).file())?;
1372 let worktree_id = file.worktree_id(cx);
1373 let abs_path = file.as_local()?.abs_path(cx);
1374 let text_document = lsp::TextDocumentIdentifier {
1375 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1376 };
1377
1378 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1379 server
1380 .notify::<lsp::notification::DidSaveTextDocument>(
1381 lsp::DidSaveTextDocumentParams {
1382 text_document: text_document.clone(),
1383 text: None,
1384 },
1385 )
1386 .log_err();
1387 }
1388 }
1389 _ => {}
1390 }
1391
1392 None
1393 }
1394
1395 fn language_servers_for_worktree(
1396 &self,
1397 worktree_id: WorktreeId,
1398 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1399 self.language_servers.iter().filter_map(
1400 move |((language_server_worktree_id, _), server)| {
1401 if *language_server_worktree_id == worktree_id {
1402 Some(server)
1403 } else {
1404 None
1405 }
1406 },
1407 )
1408 }
1409
1410 fn assign_language_to_buffer(
1411 &mut self,
1412 buffer: &ModelHandle<Buffer>,
1413 cx: &mut ModelContext<Self>,
1414 ) -> Option<()> {
1415 // If the buffer has a language, set it and start the language server if we haven't already.
1416 let full_path = buffer.read(cx).file()?.full_path(cx);
1417 let language = self.languages.select_language(&full_path)?;
1418 buffer.update(cx, |buffer, cx| {
1419 buffer.set_language(Some(language.clone()), cx);
1420 });
1421
1422 let file = File::from_dyn(buffer.read(cx).file())?;
1423 let worktree = file.worktree.read(cx).as_local()?;
1424 let worktree_id = worktree.id();
1425 let worktree_abs_path = worktree.abs_path().clone();
1426 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1427
1428 None
1429 }
1430
1431 fn start_language_server(
1432 &mut self,
1433 worktree_id: WorktreeId,
1434 worktree_path: Arc<Path>,
1435 language: Arc<Language>,
1436 cx: &mut ModelContext<Self>,
1437 ) {
1438 let adapter = if let Some(adapter) = language.lsp_adapter() {
1439 adapter
1440 } else {
1441 return;
1442 };
1443 let key = (worktree_id, adapter.name());
1444 self.started_language_servers
1445 .entry(key.clone())
1446 .or_insert_with(|| {
1447 let server_id = post_inc(&mut self.next_language_server_id);
1448 let language_server = self.languages.start_language_server(
1449 server_id,
1450 language.clone(),
1451 worktree_path,
1452 self.client.http_client(),
1453 cx,
1454 );
1455 cx.spawn_weak(|this, mut cx| async move {
1456 let language_server = language_server?.await.log_err()?;
1457 let language_server = language_server
1458 .initialize(adapter.initialization_options())
1459 .await
1460 .log_err()?;
1461 let this = this.upgrade(&cx)?;
1462 let disk_based_diagnostics_progress_token =
1463 adapter.disk_based_diagnostics_progress_token();
1464
1465 language_server
1466 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1467 let this = this.downgrade();
1468 let adapter = adapter.clone();
1469 move |params, mut cx| {
1470 if let Some(this) = this.upgrade(&cx) {
1471 this.update(&mut cx, |this, cx| {
1472 this.on_lsp_diagnostics_published(
1473 server_id,
1474 params,
1475 &adapter,
1476 disk_based_diagnostics_progress_token,
1477 cx,
1478 );
1479 });
1480 }
1481 }
1482 })
1483 .detach();
1484
1485 language_server
1486 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1487 let settings = this
1488 .read_with(&cx, |this, _| this.language_server_settings.clone());
1489 move |params, _| {
1490 let settings = settings.lock().clone();
1491 async move {
1492 Ok(params
1493 .items
1494 .into_iter()
1495 .map(|item| {
1496 if let Some(section) = &item.section {
1497 settings
1498 .get(section)
1499 .cloned()
1500 .unwrap_or(serde_json::Value::Null)
1501 } else {
1502 settings.clone()
1503 }
1504 })
1505 .collect())
1506 }
1507 }
1508 })
1509 .detach();
1510
1511 language_server
1512 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1513 let this = this.downgrade();
1514 let adapter = adapter.clone();
1515 let language_server = language_server.clone();
1516 move |params, cx| {
1517 Self::on_lsp_workspace_edit(
1518 this,
1519 params,
1520 server_id,
1521 adapter.clone(),
1522 language_server.clone(),
1523 cx,
1524 )
1525 }
1526 })
1527 .detach();
1528
1529 language_server
1530 .on_notification::<lsp::notification::Progress, _>({
1531 let this = this.downgrade();
1532 move |params, mut cx| {
1533 if let Some(this) = this.upgrade(&cx) {
1534 this.update(&mut cx, |this, cx| {
1535 this.on_lsp_progress(
1536 params,
1537 server_id,
1538 disk_based_diagnostics_progress_token,
1539 cx,
1540 );
1541 });
1542 }
1543 }
1544 })
1545 .detach();
1546
1547 this.update(&mut cx, |this, cx| {
1548 this.language_servers
1549 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1550 this.language_server_statuses.insert(
1551 server_id,
1552 LanguageServerStatus {
1553 name: language_server.name().to_string(),
1554 pending_work: Default::default(),
1555 pending_diagnostic_updates: 0,
1556 },
1557 );
1558 language_server
1559 .notify::<lsp::notification::DidChangeConfiguration>(
1560 lsp::DidChangeConfigurationParams {
1561 settings: this.language_server_settings.lock().clone(),
1562 },
1563 )
1564 .ok();
1565
1566 if let Some(project_id) = this.remote_id() {
1567 this.client
1568 .send(proto::StartLanguageServer {
1569 project_id,
1570 server: Some(proto::LanguageServer {
1571 id: server_id as u64,
1572 name: language_server.name().to_string(),
1573 }),
1574 })
1575 .log_err();
1576 }
1577
1578 // Tell the language server about every open buffer in the worktree that matches the language.
1579 for buffer in this.opened_buffers.values() {
1580 if let Some(buffer_handle) = buffer.upgrade(cx) {
1581 let buffer = buffer_handle.read(cx);
1582 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1583 file
1584 } else {
1585 continue;
1586 };
1587 let language = if let Some(language) = buffer.language() {
1588 language
1589 } else {
1590 continue;
1591 };
1592 if file.worktree.read(cx).id() != key.0
1593 || language.lsp_adapter().map(|a| a.name())
1594 != Some(key.1.clone())
1595 {
1596 continue;
1597 }
1598
1599 let file = file.as_local()?;
1600 let versions = this
1601 .buffer_snapshots
1602 .entry(buffer.remote_id())
1603 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1604 let (version, initial_snapshot) = versions.last().unwrap();
1605 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1606 let language_id = adapter.id_for_language(language.name().as_ref());
1607 language_server
1608 .notify::<lsp::notification::DidOpenTextDocument>(
1609 lsp::DidOpenTextDocumentParams {
1610 text_document: lsp::TextDocumentItem::new(
1611 uri,
1612 language_id.unwrap_or_default(),
1613 *version,
1614 initial_snapshot.text(),
1615 ),
1616 },
1617 )
1618 .log_err()?;
1619 buffer_handle.update(cx, |buffer, cx| {
1620 buffer.set_completion_triggers(
1621 language_server
1622 .capabilities()
1623 .completion_provider
1624 .as_ref()
1625 .and_then(|provider| {
1626 provider.trigger_characters.clone()
1627 })
1628 .unwrap_or(Vec::new()),
1629 cx,
1630 )
1631 });
1632 }
1633 }
1634
1635 cx.notify();
1636 Some(())
1637 });
1638
1639 Some(language_server)
1640 })
1641 });
1642 }
1643
1644 pub fn restart_language_servers_for_buffers(
1645 &mut self,
1646 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1647 cx: &mut ModelContext<Self>,
1648 ) -> Option<()> {
1649 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1650 .into_iter()
1651 .filter_map(|buffer| {
1652 let file = File::from_dyn(buffer.read(cx).file())?;
1653 let worktree = file.worktree.read(cx).as_local()?;
1654 let worktree_id = worktree.id();
1655 let worktree_abs_path = worktree.abs_path().clone();
1656 let full_path = file.full_path(cx);
1657 Some((worktree_id, worktree_abs_path, full_path))
1658 })
1659 .collect();
1660 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1661 let language = self.languages.select_language(&full_path)?;
1662 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1663 }
1664
1665 None
1666 }
1667
1668 fn restart_language_server(
1669 &mut self,
1670 worktree_id: WorktreeId,
1671 worktree_path: Arc<Path>,
1672 language: Arc<Language>,
1673 cx: &mut ModelContext<Self>,
1674 ) {
1675 let adapter = if let Some(adapter) = language.lsp_adapter() {
1676 adapter
1677 } else {
1678 return;
1679 };
1680 let key = (worktree_id, adapter.name());
1681 let server_to_shutdown = self.language_servers.remove(&key);
1682 self.started_language_servers.remove(&key);
1683 server_to_shutdown
1684 .as_ref()
1685 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1686 cx.spawn_weak(|this, mut cx| async move {
1687 if let Some(this) = this.upgrade(&cx) {
1688 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1689 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1690 shutdown_task.await;
1691 }
1692 }
1693
1694 this.update(&mut cx, |this, cx| {
1695 this.start_language_server(worktree_id, worktree_path, language, cx);
1696 });
1697 }
1698 })
1699 .detach();
1700 }
1701
1702 fn on_lsp_diagnostics_published(
1703 &mut self,
1704 server_id: usize,
1705 mut params: lsp::PublishDiagnosticsParams,
1706 adapter: &Arc<dyn LspAdapter>,
1707 disk_based_diagnostics_progress_token: Option<&str>,
1708 cx: &mut ModelContext<Self>,
1709 ) {
1710 adapter.process_diagnostics(&mut params);
1711 if disk_based_diagnostics_progress_token.is_none() {
1712 self.disk_based_diagnostics_started(cx);
1713 self.broadcast_language_server_update(
1714 server_id,
1715 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1716 proto::LspDiskBasedDiagnosticsUpdating {},
1717 ),
1718 );
1719 }
1720 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1721 .log_err();
1722 if disk_based_diagnostics_progress_token.is_none() {
1723 self.disk_based_diagnostics_finished(cx);
1724 self.broadcast_language_server_update(
1725 server_id,
1726 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1727 proto::LspDiskBasedDiagnosticsUpdated {},
1728 ),
1729 );
1730 }
1731 }
1732
1733 fn on_lsp_progress(
1734 &mut self,
1735 progress: lsp::ProgressParams,
1736 server_id: usize,
1737 disk_based_diagnostics_progress_token: Option<&str>,
1738 cx: &mut ModelContext<Self>,
1739 ) {
1740 let token = match progress.token {
1741 lsp::NumberOrString::String(token) => token,
1742 lsp::NumberOrString::Number(token) => {
1743 log::info!("skipping numeric progress token {}", token);
1744 return;
1745 }
1746 };
1747 let progress = match progress.value {
1748 lsp::ProgressParamsValue::WorkDone(value) => value,
1749 };
1750 let language_server_status =
1751 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1752 status
1753 } else {
1754 return;
1755 };
1756 match progress {
1757 lsp::WorkDoneProgress::Begin(_) => {
1758 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1759 language_server_status.pending_diagnostic_updates += 1;
1760 if language_server_status.pending_diagnostic_updates == 1 {
1761 self.disk_based_diagnostics_started(cx);
1762 self.broadcast_language_server_update(
1763 server_id,
1764 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1765 proto::LspDiskBasedDiagnosticsUpdating {},
1766 ),
1767 );
1768 }
1769 } else {
1770 self.on_lsp_work_start(server_id, token.clone(), cx);
1771 self.broadcast_language_server_update(
1772 server_id,
1773 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1774 token,
1775 }),
1776 );
1777 }
1778 }
1779 lsp::WorkDoneProgress::Report(report) => {
1780 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1781 self.on_lsp_work_progress(
1782 server_id,
1783 token.clone(),
1784 LanguageServerProgress {
1785 message: report.message.clone(),
1786 percentage: report.percentage.map(|p| p as usize),
1787 last_update_at: Instant::now(),
1788 },
1789 cx,
1790 );
1791 self.broadcast_language_server_update(
1792 server_id,
1793 proto::update_language_server::Variant::WorkProgress(
1794 proto::LspWorkProgress {
1795 token,
1796 message: report.message,
1797 percentage: report.percentage.map(|p| p as u32),
1798 },
1799 ),
1800 );
1801 }
1802 }
1803 lsp::WorkDoneProgress::End(_) => {
1804 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1805 language_server_status.pending_diagnostic_updates -= 1;
1806 if language_server_status.pending_diagnostic_updates == 0 {
1807 self.disk_based_diagnostics_finished(cx);
1808 self.broadcast_language_server_update(
1809 server_id,
1810 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1811 proto::LspDiskBasedDiagnosticsUpdated {},
1812 ),
1813 );
1814 }
1815 } else {
1816 self.on_lsp_work_end(server_id, token.clone(), cx);
1817 self.broadcast_language_server_update(
1818 server_id,
1819 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1820 token,
1821 }),
1822 );
1823 }
1824 }
1825 }
1826 }
1827
1828 fn on_lsp_work_start(
1829 &mut self,
1830 language_server_id: usize,
1831 token: String,
1832 cx: &mut ModelContext<Self>,
1833 ) {
1834 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1835 status.pending_work.insert(
1836 token,
1837 LanguageServerProgress {
1838 message: None,
1839 percentage: None,
1840 last_update_at: Instant::now(),
1841 },
1842 );
1843 cx.notify();
1844 }
1845 }
1846
1847 fn on_lsp_work_progress(
1848 &mut self,
1849 language_server_id: usize,
1850 token: String,
1851 progress: LanguageServerProgress,
1852 cx: &mut ModelContext<Self>,
1853 ) {
1854 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1855 status.pending_work.insert(token, progress);
1856 cx.notify();
1857 }
1858 }
1859
1860 fn on_lsp_work_end(
1861 &mut self,
1862 language_server_id: usize,
1863 token: String,
1864 cx: &mut ModelContext<Self>,
1865 ) {
1866 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1867 status.pending_work.remove(&token);
1868 cx.notify();
1869 }
1870 }
1871
1872 async fn on_lsp_workspace_edit(
1873 this: WeakModelHandle<Self>,
1874 params: lsp::ApplyWorkspaceEditParams,
1875 server_id: usize,
1876 adapter: Arc<dyn LspAdapter>,
1877 language_server: Arc<LanguageServer>,
1878 mut cx: AsyncAppContext,
1879 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1880 let this = this
1881 .upgrade(&cx)
1882 .ok_or_else(|| anyhow!("project project closed"))?;
1883 let transaction = Self::deserialize_workspace_edit(
1884 this.clone(),
1885 params.edit,
1886 true,
1887 adapter.clone(),
1888 language_server.clone(),
1889 &mut cx,
1890 )
1891 .await
1892 .log_err();
1893 this.update(&mut cx, |this, _| {
1894 if let Some(transaction) = transaction {
1895 this.last_workspace_edits_by_language_server
1896 .insert(server_id, transaction);
1897 }
1898 });
1899 Ok(lsp::ApplyWorkspaceEditResponse {
1900 applied: true,
1901 failed_change: None,
1902 failure_reason: None,
1903 })
1904 }
1905
1906 fn broadcast_language_server_update(
1907 &self,
1908 language_server_id: usize,
1909 event: proto::update_language_server::Variant,
1910 ) {
1911 if let Some(project_id) = self.remote_id() {
1912 self.client
1913 .send(proto::UpdateLanguageServer {
1914 project_id,
1915 language_server_id: language_server_id as u64,
1916 variant: Some(event),
1917 })
1918 .log_err();
1919 }
1920 }
1921
1922 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1923 for (_, server) in self.language_servers.values() {
1924 server
1925 .notify::<lsp::notification::DidChangeConfiguration>(
1926 lsp::DidChangeConfigurationParams {
1927 settings: settings.clone(),
1928 },
1929 )
1930 .ok();
1931 }
1932 *self.language_server_settings.lock() = settings;
1933 }
1934
1935 pub fn language_server_statuses(
1936 &self,
1937 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1938 self.language_server_statuses.values()
1939 }
1940
1941 pub fn update_diagnostics(
1942 &mut self,
1943 params: lsp::PublishDiagnosticsParams,
1944 disk_based_sources: &[&str],
1945 cx: &mut ModelContext<Self>,
1946 ) -> Result<()> {
1947 let abs_path = params
1948 .uri
1949 .to_file_path()
1950 .map_err(|_| anyhow!("URI is not a file"))?;
1951 let mut next_group_id = 0;
1952 let mut diagnostics = Vec::default();
1953 let mut primary_diagnostic_group_ids = HashMap::default();
1954 let mut sources_by_group_id = HashMap::default();
1955 let mut supporting_diagnostics = HashMap::default();
1956 for diagnostic in ¶ms.diagnostics {
1957 let source = diagnostic.source.as_ref();
1958 let code = diagnostic.code.as_ref().map(|code| match code {
1959 lsp::NumberOrString::Number(code) => code.to_string(),
1960 lsp::NumberOrString::String(code) => code.clone(),
1961 });
1962 let range = range_from_lsp(diagnostic.range);
1963 let is_supporting = diagnostic
1964 .related_information
1965 .as_ref()
1966 .map_or(false, |infos| {
1967 infos.iter().any(|info| {
1968 primary_diagnostic_group_ids.contains_key(&(
1969 source,
1970 code.clone(),
1971 range_from_lsp(info.location.range),
1972 ))
1973 })
1974 });
1975
1976 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1977 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1978 });
1979
1980 if is_supporting {
1981 supporting_diagnostics.insert(
1982 (source, code.clone(), range),
1983 (diagnostic.severity, is_unnecessary),
1984 );
1985 } else {
1986 let group_id = post_inc(&mut next_group_id);
1987 let is_disk_based = source.map_or(false, |source| {
1988 disk_based_sources.contains(&source.as_str())
1989 });
1990
1991 sources_by_group_id.insert(group_id, source);
1992 primary_diagnostic_group_ids
1993 .insert((source, code.clone(), range.clone()), group_id);
1994
1995 diagnostics.push(DiagnosticEntry {
1996 range,
1997 diagnostic: Diagnostic {
1998 code: code.clone(),
1999 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2000 message: diagnostic.message.clone(),
2001 group_id,
2002 is_primary: true,
2003 is_valid: true,
2004 is_disk_based,
2005 is_unnecessary,
2006 },
2007 });
2008 if let Some(infos) = &diagnostic.related_information {
2009 for info in infos {
2010 if info.location.uri == params.uri && !info.message.is_empty() {
2011 let range = range_from_lsp(info.location.range);
2012 diagnostics.push(DiagnosticEntry {
2013 range,
2014 diagnostic: Diagnostic {
2015 code: code.clone(),
2016 severity: DiagnosticSeverity::INFORMATION,
2017 message: info.message.clone(),
2018 group_id,
2019 is_primary: false,
2020 is_valid: true,
2021 is_disk_based,
2022 is_unnecessary: false,
2023 },
2024 });
2025 }
2026 }
2027 }
2028 }
2029 }
2030
2031 for entry in &mut diagnostics {
2032 let diagnostic = &mut entry.diagnostic;
2033 if !diagnostic.is_primary {
2034 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2035 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2036 source,
2037 diagnostic.code.clone(),
2038 entry.range.clone(),
2039 )) {
2040 if let Some(severity) = severity {
2041 diagnostic.severity = severity;
2042 }
2043 diagnostic.is_unnecessary = is_unnecessary;
2044 }
2045 }
2046 }
2047
2048 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2049 Ok(())
2050 }
2051
2052 pub fn update_diagnostic_entries(
2053 &mut self,
2054 abs_path: PathBuf,
2055 version: Option<i32>,
2056 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2057 cx: &mut ModelContext<Project>,
2058 ) -> Result<(), anyhow::Error> {
2059 let (worktree, relative_path) = self
2060 .find_local_worktree(&abs_path, cx)
2061 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2062 if !worktree.read(cx).is_visible() {
2063 return Ok(());
2064 }
2065
2066 let project_path = ProjectPath {
2067 worktree_id: worktree.read(cx).id(),
2068 path: relative_path.into(),
2069 };
2070 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2071 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2072 }
2073
2074 let updated = worktree.update(cx, |worktree, cx| {
2075 worktree
2076 .as_local_mut()
2077 .ok_or_else(|| anyhow!("not a local worktree"))?
2078 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2079 })?;
2080 if updated {
2081 cx.emit(Event::DiagnosticsUpdated(project_path));
2082 }
2083 Ok(())
2084 }
2085
2086 fn update_buffer_diagnostics(
2087 &mut self,
2088 buffer: &ModelHandle<Buffer>,
2089 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2090 version: Option<i32>,
2091 cx: &mut ModelContext<Self>,
2092 ) -> Result<()> {
2093 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2094 Ordering::Equal
2095 .then_with(|| b.is_primary.cmp(&a.is_primary))
2096 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2097 .then_with(|| a.severity.cmp(&b.severity))
2098 .then_with(|| a.message.cmp(&b.message))
2099 }
2100
2101 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2102
2103 diagnostics.sort_unstable_by(|a, b| {
2104 Ordering::Equal
2105 .then_with(|| a.range.start.cmp(&b.range.start))
2106 .then_with(|| b.range.end.cmp(&a.range.end))
2107 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2108 });
2109
2110 let mut sanitized_diagnostics = Vec::new();
2111 let edits_since_save = Patch::new(
2112 snapshot
2113 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2114 .collect(),
2115 );
2116 for entry in diagnostics {
2117 let start;
2118 let end;
2119 if entry.diagnostic.is_disk_based {
2120 // Some diagnostics are based on files on disk instead of buffers'
2121 // current contents. Adjust these diagnostics' ranges to reflect
2122 // any unsaved edits.
2123 start = edits_since_save.old_to_new(entry.range.start);
2124 end = edits_since_save.old_to_new(entry.range.end);
2125 } else {
2126 start = entry.range.start;
2127 end = entry.range.end;
2128 }
2129
2130 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2131 ..snapshot.clip_point_utf16(end, Bias::Right);
2132
2133 // Expand empty ranges by one character
2134 if range.start == range.end {
2135 range.end.column += 1;
2136 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2137 if range.start == range.end && range.end.column > 0 {
2138 range.start.column -= 1;
2139 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2140 }
2141 }
2142
2143 sanitized_diagnostics.push(DiagnosticEntry {
2144 range,
2145 diagnostic: entry.diagnostic,
2146 });
2147 }
2148 drop(edits_since_save);
2149
2150 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2151 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2152 Ok(())
2153 }
2154
2155 pub fn reload_buffers(
2156 &self,
2157 buffers: HashSet<ModelHandle<Buffer>>,
2158 push_to_history: bool,
2159 cx: &mut ModelContext<Self>,
2160 ) -> Task<Result<ProjectTransaction>> {
2161 let mut local_buffers = Vec::new();
2162 let mut remote_buffers = None;
2163 for buffer_handle in buffers {
2164 let buffer = buffer_handle.read(cx);
2165 if buffer.is_dirty() {
2166 if let Some(file) = File::from_dyn(buffer.file()) {
2167 if file.is_local() {
2168 local_buffers.push(buffer_handle);
2169 } else {
2170 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2171 }
2172 }
2173 }
2174 }
2175
2176 let remote_buffers = self.remote_id().zip(remote_buffers);
2177 let client = self.client.clone();
2178
2179 cx.spawn(|this, mut cx| async move {
2180 let mut project_transaction = ProjectTransaction::default();
2181
2182 if let Some((project_id, remote_buffers)) = remote_buffers {
2183 let response = client
2184 .request(proto::ReloadBuffers {
2185 project_id,
2186 buffer_ids: remote_buffers
2187 .iter()
2188 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2189 .collect(),
2190 })
2191 .await?
2192 .transaction
2193 .ok_or_else(|| anyhow!("missing transaction"))?;
2194 project_transaction = this
2195 .update(&mut cx, |this, cx| {
2196 this.deserialize_project_transaction(response, push_to_history, cx)
2197 })
2198 .await?;
2199 }
2200
2201 for buffer in local_buffers {
2202 let transaction = buffer
2203 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2204 .await?;
2205 buffer.update(&mut cx, |buffer, cx| {
2206 if let Some(transaction) = transaction {
2207 if !push_to_history {
2208 buffer.forget_transaction(transaction.id);
2209 }
2210 project_transaction.0.insert(cx.handle(), transaction);
2211 }
2212 });
2213 }
2214
2215 Ok(project_transaction)
2216 })
2217 }
2218
2219 pub fn format(
2220 &self,
2221 buffers: HashSet<ModelHandle<Buffer>>,
2222 push_to_history: bool,
2223 cx: &mut ModelContext<Project>,
2224 ) -> Task<Result<ProjectTransaction>> {
2225 let mut local_buffers = Vec::new();
2226 let mut remote_buffers = None;
2227 for buffer_handle in buffers {
2228 let buffer = buffer_handle.read(cx);
2229 if let Some(file) = File::from_dyn(buffer.file()) {
2230 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2231 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2232 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2233 }
2234 } else {
2235 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2236 }
2237 } else {
2238 return Task::ready(Ok(Default::default()));
2239 }
2240 }
2241
2242 let remote_buffers = self.remote_id().zip(remote_buffers);
2243 let client = self.client.clone();
2244
2245 cx.spawn(|this, mut cx| async move {
2246 let mut project_transaction = ProjectTransaction::default();
2247
2248 if let Some((project_id, remote_buffers)) = remote_buffers {
2249 let response = client
2250 .request(proto::FormatBuffers {
2251 project_id,
2252 buffer_ids: remote_buffers
2253 .iter()
2254 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2255 .collect(),
2256 })
2257 .await?
2258 .transaction
2259 .ok_or_else(|| anyhow!("missing transaction"))?;
2260 project_transaction = this
2261 .update(&mut cx, |this, cx| {
2262 this.deserialize_project_transaction(response, push_to_history, cx)
2263 })
2264 .await?;
2265 }
2266
2267 for (buffer, buffer_abs_path, language_server) in local_buffers {
2268 let text_document = lsp::TextDocumentIdentifier::new(
2269 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2270 );
2271 let capabilities = &language_server.capabilities();
2272 let tab_size = cx.update(|cx| {
2273 let language_name = buffer.read(cx).language().map(|language| language.name());
2274 cx.global::<Settings>().tab_size(language_name.as_deref())
2275 });
2276 let lsp_edits = if capabilities
2277 .document_formatting_provider
2278 .as_ref()
2279 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2280 {
2281 language_server
2282 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2283 text_document,
2284 options: lsp::FormattingOptions {
2285 tab_size,
2286 insert_spaces: true,
2287 insert_final_newline: Some(true),
2288 ..Default::default()
2289 },
2290 work_done_progress_params: Default::default(),
2291 })
2292 .await?
2293 } else if capabilities
2294 .document_range_formatting_provider
2295 .as_ref()
2296 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2297 {
2298 let buffer_start = lsp::Position::new(0, 0);
2299 let buffer_end =
2300 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2301 language_server
2302 .request::<lsp::request::RangeFormatting>(
2303 lsp::DocumentRangeFormattingParams {
2304 text_document,
2305 range: lsp::Range::new(buffer_start, buffer_end),
2306 options: lsp::FormattingOptions {
2307 tab_size: 4,
2308 insert_spaces: true,
2309 insert_final_newline: Some(true),
2310 ..Default::default()
2311 },
2312 work_done_progress_params: Default::default(),
2313 },
2314 )
2315 .await?
2316 } else {
2317 continue;
2318 };
2319
2320 if let Some(lsp_edits) = lsp_edits {
2321 let edits = this
2322 .update(&mut cx, |this, cx| {
2323 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2324 })
2325 .await?;
2326 buffer.update(&mut cx, |buffer, cx| {
2327 buffer.finalize_last_transaction();
2328 buffer.start_transaction();
2329 for (range, text) in edits {
2330 buffer.edit([(range, text)], cx);
2331 }
2332 if buffer.end_transaction(cx).is_some() {
2333 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2334 if !push_to_history {
2335 buffer.forget_transaction(transaction.id);
2336 }
2337 project_transaction.0.insert(cx.handle(), transaction);
2338 }
2339 });
2340 }
2341 }
2342
2343 Ok(project_transaction)
2344 })
2345 }
2346
2347 pub fn definition<T: ToPointUtf16>(
2348 &self,
2349 buffer: &ModelHandle<Buffer>,
2350 position: T,
2351 cx: &mut ModelContext<Self>,
2352 ) -> Task<Result<Vec<Location>>> {
2353 let position = position.to_point_utf16(buffer.read(cx));
2354 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2355 }
2356
2357 pub fn references<T: ToPointUtf16>(
2358 &self,
2359 buffer: &ModelHandle<Buffer>,
2360 position: T,
2361 cx: &mut ModelContext<Self>,
2362 ) -> Task<Result<Vec<Location>>> {
2363 let position = position.to_point_utf16(buffer.read(cx));
2364 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2365 }
2366
2367 pub fn document_highlights<T: ToPointUtf16>(
2368 &self,
2369 buffer: &ModelHandle<Buffer>,
2370 position: T,
2371 cx: &mut ModelContext<Self>,
2372 ) -> Task<Result<Vec<DocumentHighlight>>> {
2373 let position = position.to_point_utf16(buffer.read(cx));
2374
2375 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2376 }
2377
2378 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2379 if self.is_local() {
2380 let mut requests = Vec::new();
2381 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2382 let worktree_id = *worktree_id;
2383 if let Some(worktree) = self
2384 .worktree_for_id(worktree_id, cx)
2385 .and_then(|worktree| worktree.read(cx).as_local())
2386 {
2387 let lsp_adapter = lsp_adapter.clone();
2388 let worktree_abs_path = worktree.abs_path().clone();
2389 requests.push(
2390 language_server
2391 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2392 query: query.to_string(),
2393 ..Default::default()
2394 })
2395 .log_err()
2396 .map(move |response| {
2397 (
2398 lsp_adapter,
2399 worktree_id,
2400 worktree_abs_path,
2401 response.unwrap_or_default(),
2402 )
2403 }),
2404 );
2405 }
2406 }
2407
2408 cx.spawn_weak(|this, cx| async move {
2409 let responses = futures::future::join_all(requests).await;
2410 let this = if let Some(this) = this.upgrade(&cx) {
2411 this
2412 } else {
2413 return Ok(Default::default());
2414 };
2415 this.read_with(&cx, |this, cx| {
2416 let mut symbols = Vec::new();
2417 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2418 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2419 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2420 let mut worktree_id = source_worktree_id;
2421 let path;
2422 if let Some((worktree, rel_path)) =
2423 this.find_local_worktree(&abs_path, cx)
2424 {
2425 worktree_id = worktree.read(cx).id();
2426 path = rel_path;
2427 } else {
2428 path = relativize_path(&worktree_abs_path, &abs_path);
2429 }
2430
2431 let label = this
2432 .languages
2433 .select_language(&path)
2434 .and_then(|language| {
2435 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2436 })
2437 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2438 let signature = this.symbol_signature(worktree_id, &path);
2439
2440 Some(Symbol {
2441 source_worktree_id,
2442 worktree_id,
2443 language_server_name: adapter.name(),
2444 name: lsp_symbol.name,
2445 kind: lsp_symbol.kind,
2446 label,
2447 path,
2448 range: range_from_lsp(lsp_symbol.location.range),
2449 signature,
2450 })
2451 }));
2452 }
2453 Ok(symbols)
2454 })
2455 })
2456 } else if let Some(project_id) = self.remote_id() {
2457 let request = self.client.request(proto::GetProjectSymbols {
2458 project_id,
2459 query: query.to_string(),
2460 });
2461 cx.spawn_weak(|this, cx| async move {
2462 let response = request.await?;
2463 let mut symbols = Vec::new();
2464 if let Some(this) = this.upgrade(&cx) {
2465 this.read_with(&cx, |this, _| {
2466 symbols.extend(
2467 response
2468 .symbols
2469 .into_iter()
2470 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2471 );
2472 })
2473 }
2474 Ok(symbols)
2475 })
2476 } else {
2477 Task::ready(Ok(Default::default()))
2478 }
2479 }
2480
2481 pub fn open_buffer_for_symbol(
2482 &mut self,
2483 symbol: &Symbol,
2484 cx: &mut ModelContext<Self>,
2485 ) -> Task<Result<ModelHandle<Buffer>>> {
2486 if self.is_local() {
2487 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2488 symbol.source_worktree_id,
2489 symbol.language_server_name.clone(),
2490 )) {
2491 server.clone()
2492 } else {
2493 return Task::ready(Err(anyhow!(
2494 "language server for worktree and language not found"
2495 )));
2496 };
2497
2498 let worktree_abs_path = if let Some(worktree_abs_path) = self
2499 .worktree_for_id(symbol.worktree_id, cx)
2500 .and_then(|worktree| worktree.read(cx).as_local())
2501 .map(|local_worktree| local_worktree.abs_path())
2502 {
2503 worktree_abs_path
2504 } else {
2505 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2506 };
2507 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2508 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2509 uri
2510 } else {
2511 return Task::ready(Err(anyhow!("invalid symbol path")));
2512 };
2513
2514 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2515 } else if let Some(project_id) = self.remote_id() {
2516 let request = self.client.request(proto::OpenBufferForSymbol {
2517 project_id,
2518 symbol: Some(serialize_symbol(symbol)),
2519 });
2520 cx.spawn(|this, mut cx| async move {
2521 let response = request.await?;
2522 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2523 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2524 .await
2525 })
2526 } else {
2527 Task::ready(Err(anyhow!("project does not have a remote id")))
2528 }
2529 }
2530
2531 pub fn completions<T: ToPointUtf16>(
2532 &self,
2533 source_buffer_handle: &ModelHandle<Buffer>,
2534 position: T,
2535 cx: &mut ModelContext<Self>,
2536 ) -> Task<Result<Vec<Completion>>> {
2537 let source_buffer_handle = source_buffer_handle.clone();
2538 let source_buffer = source_buffer_handle.read(cx);
2539 let buffer_id = source_buffer.remote_id();
2540 let language = source_buffer.language().cloned();
2541 let worktree;
2542 let buffer_abs_path;
2543 if let Some(file) = File::from_dyn(source_buffer.file()) {
2544 worktree = file.worktree.clone();
2545 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2546 } else {
2547 return Task::ready(Ok(Default::default()));
2548 };
2549
2550 let position = position.to_point_utf16(source_buffer);
2551 let anchor = source_buffer.anchor_after(position);
2552
2553 if worktree.read(cx).as_local().is_some() {
2554 let buffer_abs_path = buffer_abs_path.unwrap();
2555 let (_, lang_server) =
2556 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2557 server.clone()
2558 } else {
2559 return Task::ready(Ok(Default::default()));
2560 };
2561
2562 cx.spawn(|_, cx| async move {
2563 let completions = lang_server
2564 .request::<lsp::request::Completion>(lsp::CompletionParams {
2565 text_document_position: lsp::TextDocumentPositionParams::new(
2566 lsp::TextDocumentIdentifier::new(
2567 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2568 ),
2569 point_to_lsp(position),
2570 ),
2571 context: Default::default(),
2572 work_done_progress_params: Default::default(),
2573 partial_result_params: Default::default(),
2574 })
2575 .await
2576 .context("lsp completion request failed")?;
2577
2578 let completions = if let Some(completions) = completions {
2579 match completions {
2580 lsp::CompletionResponse::Array(completions) => completions,
2581 lsp::CompletionResponse::List(list) => list.items,
2582 }
2583 } else {
2584 Default::default()
2585 };
2586
2587 source_buffer_handle.read_with(&cx, |this, _| {
2588 let snapshot = this.snapshot();
2589 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2590 let mut range_for_token = None;
2591 Ok(completions
2592 .into_iter()
2593 .filter_map(|lsp_completion| {
2594 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2595 // If the language server provides a range to overwrite, then
2596 // check that the range is valid.
2597 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2598 let range = range_from_lsp(edit.range);
2599 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2600 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2601 if start != range.start || end != range.end {
2602 log::info!("completion out of expected range");
2603 return None;
2604 }
2605 (
2606 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2607 edit.new_text.clone(),
2608 )
2609 }
2610 // If the language server does not provide a range, then infer
2611 // the range based on the syntax tree.
2612 None => {
2613 if position != clipped_position {
2614 log::info!("completion out of expected range");
2615 return None;
2616 }
2617 let Range { start, end } = range_for_token
2618 .get_or_insert_with(|| {
2619 let offset = position.to_offset(&snapshot);
2620 snapshot
2621 .range_for_word_token_at(offset)
2622 .unwrap_or_else(|| offset..offset)
2623 })
2624 .clone();
2625 let text = lsp_completion
2626 .insert_text
2627 .as_ref()
2628 .unwrap_or(&lsp_completion.label)
2629 .clone();
2630 (
2631 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2632 text.clone(),
2633 )
2634 }
2635 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2636 log::info!("unsupported insert/replace completion");
2637 return None;
2638 }
2639 };
2640
2641 Some(Completion {
2642 old_range,
2643 new_text,
2644 label: language
2645 .as_ref()
2646 .and_then(|l| l.label_for_completion(&lsp_completion))
2647 .unwrap_or_else(|| {
2648 CodeLabel::plain(
2649 lsp_completion.label.clone(),
2650 lsp_completion.filter_text.as_deref(),
2651 )
2652 }),
2653 lsp_completion,
2654 })
2655 })
2656 .collect())
2657 })
2658 })
2659 } else if let Some(project_id) = self.remote_id() {
2660 let rpc = self.client.clone();
2661 let message = proto::GetCompletions {
2662 project_id,
2663 buffer_id,
2664 position: Some(language::proto::serialize_anchor(&anchor)),
2665 version: serialize_version(&source_buffer.version()),
2666 };
2667 cx.spawn_weak(|_, mut cx| async move {
2668 let response = rpc.request(message).await?;
2669
2670 source_buffer_handle
2671 .update(&mut cx, |buffer, _| {
2672 buffer.wait_for_version(deserialize_version(response.version))
2673 })
2674 .await;
2675
2676 response
2677 .completions
2678 .into_iter()
2679 .map(|completion| {
2680 language::proto::deserialize_completion(completion, language.as_ref())
2681 })
2682 .collect()
2683 })
2684 } else {
2685 Task::ready(Ok(Default::default()))
2686 }
2687 }
2688
2689 pub fn apply_additional_edits_for_completion(
2690 &self,
2691 buffer_handle: ModelHandle<Buffer>,
2692 completion: Completion,
2693 push_to_history: bool,
2694 cx: &mut ModelContext<Self>,
2695 ) -> Task<Result<Option<Transaction>>> {
2696 let buffer = buffer_handle.read(cx);
2697 let buffer_id = buffer.remote_id();
2698
2699 if self.is_local() {
2700 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2701 {
2702 server.clone()
2703 } else {
2704 return Task::ready(Ok(Default::default()));
2705 };
2706
2707 cx.spawn(|this, mut cx| async move {
2708 let resolved_completion = lang_server
2709 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2710 .await?;
2711 if let Some(edits) = resolved_completion.additional_text_edits {
2712 let edits = this
2713 .update(&mut cx, |this, cx| {
2714 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2715 })
2716 .await?;
2717 buffer_handle.update(&mut cx, |buffer, cx| {
2718 buffer.finalize_last_transaction();
2719 buffer.start_transaction();
2720 for (range, text) in edits {
2721 buffer.edit([(range, text)], cx);
2722 }
2723 let transaction = if buffer.end_transaction(cx).is_some() {
2724 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2725 if !push_to_history {
2726 buffer.forget_transaction(transaction.id);
2727 }
2728 Some(transaction)
2729 } else {
2730 None
2731 };
2732 Ok(transaction)
2733 })
2734 } else {
2735 Ok(None)
2736 }
2737 })
2738 } else if let Some(project_id) = self.remote_id() {
2739 let client = self.client.clone();
2740 cx.spawn(|_, mut cx| async move {
2741 let response = client
2742 .request(proto::ApplyCompletionAdditionalEdits {
2743 project_id,
2744 buffer_id,
2745 completion: Some(language::proto::serialize_completion(&completion)),
2746 })
2747 .await?;
2748
2749 if let Some(transaction) = response.transaction {
2750 let transaction = language::proto::deserialize_transaction(transaction)?;
2751 buffer_handle
2752 .update(&mut cx, |buffer, _| {
2753 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2754 })
2755 .await;
2756 if push_to_history {
2757 buffer_handle.update(&mut cx, |buffer, _| {
2758 buffer.push_transaction(transaction.clone(), Instant::now());
2759 });
2760 }
2761 Ok(Some(transaction))
2762 } else {
2763 Ok(None)
2764 }
2765 })
2766 } else {
2767 Task::ready(Err(anyhow!("project does not have a remote id")))
2768 }
2769 }
2770
2771 pub fn code_actions<T: Clone + ToOffset>(
2772 &self,
2773 buffer_handle: &ModelHandle<Buffer>,
2774 range: Range<T>,
2775 cx: &mut ModelContext<Self>,
2776 ) -> Task<Result<Vec<CodeAction>>> {
2777 let buffer_handle = buffer_handle.clone();
2778 let buffer = buffer_handle.read(cx);
2779 let snapshot = buffer.snapshot();
2780 let relevant_diagnostics = snapshot
2781 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2782 .map(|entry| entry.to_lsp_diagnostic_stub())
2783 .collect();
2784 let buffer_id = buffer.remote_id();
2785 let worktree;
2786 let buffer_abs_path;
2787 if let Some(file) = File::from_dyn(buffer.file()) {
2788 worktree = file.worktree.clone();
2789 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2790 } else {
2791 return Task::ready(Ok(Default::default()));
2792 };
2793 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2794
2795 if worktree.read(cx).as_local().is_some() {
2796 let buffer_abs_path = buffer_abs_path.unwrap();
2797 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2798 {
2799 server.clone()
2800 } else {
2801 return Task::ready(Ok(Default::default()));
2802 };
2803
2804 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2805 cx.foreground().spawn(async move {
2806 if !lang_server.capabilities().code_action_provider.is_some() {
2807 return Ok(Default::default());
2808 }
2809
2810 Ok(lang_server
2811 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2812 text_document: lsp::TextDocumentIdentifier::new(
2813 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2814 ),
2815 range: lsp_range,
2816 work_done_progress_params: Default::default(),
2817 partial_result_params: Default::default(),
2818 context: lsp::CodeActionContext {
2819 diagnostics: relevant_diagnostics,
2820 only: Some(vec![
2821 lsp::CodeActionKind::QUICKFIX,
2822 lsp::CodeActionKind::REFACTOR,
2823 lsp::CodeActionKind::REFACTOR_EXTRACT,
2824 lsp::CodeActionKind::SOURCE,
2825 ]),
2826 },
2827 })
2828 .await?
2829 .unwrap_or_default()
2830 .into_iter()
2831 .filter_map(|entry| {
2832 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2833 Some(CodeAction {
2834 range: range.clone(),
2835 lsp_action,
2836 })
2837 } else {
2838 None
2839 }
2840 })
2841 .collect())
2842 })
2843 } else if let Some(project_id) = self.remote_id() {
2844 let rpc = self.client.clone();
2845 let version = buffer.version();
2846 cx.spawn_weak(|_, mut cx| async move {
2847 let response = rpc
2848 .request(proto::GetCodeActions {
2849 project_id,
2850 buffer_id,
2851 start: Some(language::proto::serialize_anchor(&range.start)),
2852 end: Some(language::proto::serialize_anchor(&range.end)),
2853 version: serialize_version(&version),
2854 })
2855 .await?;
2856
2857 buffer_handle
2858 .update(&mut cx, |buffer, _| {
2859 buffer.wait_for_version(deserialize_version(response.version))
2860 })
2861 .await;
2862
2863 response
2864 .actions
2865 .into_iter()
2866 .map(language::proto::deserialize_code_action)
2867 .collect()
2868 })
2869 } else {
2870 Task::ready(Ok(Default::default()))
2871 }
2872 }
2873
2874 pub fn apply_code_action(
2875 &self,
2876 buffer_handle: ModelHandle<Buffer>,
2877 mut action: CodeAction,
2878 push_to_history: bool,
2879 cx: &mut ModelContext<Self>,
2880 ) -> Task<Result<ProjectTransaction>> {
2881 if self.is_local() {
2882 let buffer = buffer_handle.read(cx);
2883 let (lsp_adapter, lang_server) =
2884 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2885 server.clone()
2886 } else {
2887 return Task::ready(Ok(Default::default()));
2888 };
2889 let range = action.range.to_point_utf16(buffer);
2890
2891 cx.spawn(|this, mut cx| async move {
2892 if let Some(lsp_range) = action
2893 .lsp_action
2894 .data
2895 .as_mut()
2896 .and_then(|d| d.get_mut("codeActionParams"))
2897 .and_then(|d| d.get_mut("range"))
2898 {
2899 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2900 action.lsp_action = lang_server
2901 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2902 .await?;
2903 } else {
2904 let actions = this
2905 .update(&mut cx, |this, cx| {
2906 this.code_actions(&buffer_handle, action.range, cx)
2907 })
2908 .await?;
2909 action.lsp_action = actions
2910 .into_iter()
2911 .find(|a| a.lsp_action.title == action.lsp_action.title)
2912 .ok_or_else(|| anyhow!("code action is outdated"))?
2913 .lsp_action;
2914 }
2915
2916 if let Some(edit) = action.lsp_action.edit {
2917 Self::deserialize_workspace_edit(
2918 this,
2919 edit,
2920 push_to_history,
2921 lsp_adapter,
2922 lang_server,
2923 &mut cx,
2924 )
2925 .await
2926 } else if let Some(command) = action.lsp_action.command {
2927 this.update(&mut cx, |this, _| {
2928 this.last_workspace_edits_by_language_server
2929 .remove(&lang_server.server_id());
2930 });
2931 lang_server
2932 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2933 command: command.command,
2934 arguments: command.arguments.unwrap_or_default(),
2935 ..Default::default()
2936 })
2937 .await?;
2938 Ok(this.update(&mut cx, |this, _| {
2939 this.last_workspace_edits_by_language_server
2940 .remove(&lang_server.server_id())
2941 .unwrap_or_default()
2942 }))
2943 } else {
2944 Ok(ProjectTransaction::default())
2945 }
2946 })
2947 } else if let Some(project_id) = self.remote_id() {
2948 let client = self.client.clone();
2949 let request = proto::ApplyCodeAction {
2950 project_id,
2951 buffer_id: buffer_handle.read(cx).remote_id(),
2952 action: Some(language::proto::serialize_code_action(&action)),
2953 };
2954 cx.spawn(|this, mut cx| async move {
2955 let response = client
2956 .request(request)
2957 .await?
2958 .transaction
2959 .ok_or_else(|| anyhow!("missing transaction"))?;
2960 this.update(&mut cx, |this, cx| {
2961 this.deserialize_project_transaction(response, push_to_history, cx)
2962 })
2963 .await
2964 })
2965 } else {
2966 Task::ready(Err(anyhow!("project does not have a remote id")))
2967 }
2968 }
2969
2970 async fn deserialize_workspace_edit(
2971 this: ModelHandle<Self>,
2972 edit: lsp::WorkspaceEdit,
2973 push_to_history: bool,
2974 lsp_adapter: Arc<dyn LspAdapter>,
2975 language_server: Arc<LanguageServer>,
2976 cx: &mut AsyncAppContext,
2977 ) -> Result<ProjectTransaction> {
2978 let fs = this.read_with(cx, |this, _| this.fs.clone());
2979 let mut operations = Vec::new();
2980 if let Some(document_changes) = edit.document_changes {
2981 match document_changes {
2982 lsp::DocumentChanges::Edits(edits) => {
2983 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2984 }
2985 lsp::DocumentChanges::Operations(ops) => operations = ops,
2986 }
2987 } else if let Some(changes) = edit.changes {
2988 operations.extend(changes.into_iter().map(|(uri, edits)| {
2989 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2990 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2991 uri,
2992 version: None,
2993 },
2994 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2995 })
2996 }));
2997 }
2998
2999 let mut project_transaction = ProjectTransaction::default();
3000 for operation in operations {
3001 match operation {
3002 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3003 let abs_path = op
3004 .uri
3005 .to_file_path()
3006 .map_err(|_| anyhow!("can't convert URI to path"))?;
3007
3008 if let Some(parent_path) = abs_path.parent() {
3009 fs.create_dir(parent_path).await?;
3010 }
3011 if abs_path.ends_with("/") {
3012 fs.create_dir(&abs_path).await?;
3013 } else {
3014 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3015 .await?;
3016 }
3017 }
3018 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3019 let source_abs_path = op
3020 .old_uri
3021 .to_file_path()
3022 .map_err(|_| anyhow!("can't convert URI to path"))?;
3023 let target_abs_path = op
3024 .new_uri
3025 .to_file_path()
3026 .map_err(|_| anyhow!("can't convert URI to path"))?;
3027 fs.rename(
3028 &source_abs_path,
3029 &target_abs_path,
3030 op.options.map(Into::into).unwrap_or_default(),
3031 )
3032 .await?;
3033 }
3034 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3035 let abs_path = op
3036 .uri
3037 .to_file_path()
3038 .map_err(|_| anyhow!("can't convert URI to path"))?;
3039 let options = op.options.map(Into::into).unwrap_or_default();
3040 if abs_path.ends_with("/") {
3041 fs.remove_dir(&abs_path, options).await?;
3042 } else {
3043 fs.remove_file(&abs_path, options).await?;
3044 }
3045 }
3046 lsp::DocumentChangeOperation::Edit(op) => {
3047 let buffer_to_edit = this
3048 .update(cx, |this, cx| {
3049 this.open_local_buffer_via_lsp(
3050 op.text_document.uri,
3051 lsp_adapter.clone(),
3052 language_server.clone(),
3053 cx,
3054 )
3055 })
3056 .await?;
3057
3058 let edits = this
3059 .update(cx, |this, cx| {
3060 let edits = op.edits.into_iter().map(|edit| match edit {
3061 lsp::OneOf::Left(edit) => edit,
3062 lsp::OneOf::Right(edit) => edit.text_edit,
3063 });
3064 this.edits_from_lsp(
3065 &buffer_to_edit,
3066 edits,
3067 op.text_document.version,
3068 cx,
3069 )
3070 })
3071 .await?;
3072
3073 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3074 buffer.finalize_last_transaction();
3075 buffer.start_transaction();
3076 for (range, text) in edits {
3077 buffer.edit([(range, text)], cx);
3078 }
3079 let transaction = if buffer.end_transaction(cx).is_some() {
3080 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3081 if !push_to_history {
3082 buffer.forget_transaction(transaction.id);
3083 }
3084 Some(transaction)
3085 } else {
3086 None
3087 };
3088
3089 transaction
3090 });
3091 if let Some(transaction) = transaction {
3092 project_transaction.0.insert(buffer_to_edit, transaction);
3093 }
3094 }
3095 }
3096 }
3097
3098 Ok(project_transaction)
3099 }
3100
3101 pub fn prepare_rename<T: ToPointUtf16>(
3102 &self,
3103 buffer: ModelHandle<Buffer>,
3104 position: T,
3105 cx: &mut ModelContext<Self>,
3106 ) -> Task<Result<Option<Range<Anchor>>>> {
3107 let position = position.to_point_utf16(buffer.read(cx));
3108 self.request_lsp(buffer, PrepareRename { position }, cx)
3109 }
3110
3111 pub fn perform_rename<T: ToPointUtf16>(
3112 &self,
3113 buffer: ModelHandle<Buffer>,
3114 position: T,
3115 new_name: String,
3116 push_to_history: bool,
3117 cx: &mut ModelContext<Self>,
3118 ) -> Task<Result<ProjectTransaction>> {
3119 let position = position.to_point_utf16(buffer.read(cx));
3120 self.request_lsp(
3121 buffer,
3122 PerformRename {
3123 position,
3124 new_name,
3125 push_to_history,
3126 },
3127 cx,
3128 )
3129 }
3130
3131 pub fn search(
3132 &self,
3133 query: SearchQuery,
3134 cx: &mut ModelContext<Self>,
3135 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3136 if self.is_local() {
3137 let snapshots = self
3138 .visible_worktrees(cx)
3139 .filter_map(|tree| {
3140 let tree = tree.read(cx).as_local()?;
3141 Some(tree.snapshot())
3142 })
3143 .collect::<Vec<_>>();
3144
3145 let background = cx.background().clone();
3146 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3147 if path_count == 0 {
3148 return Task::ready(Ok(Default::default()));
3149 }
3150 let workers = background.num_cpus().min(path_count);
3151 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3152 cx.background()
3153 .spawn({
3154 let fs = self.fs.clone();
3155 let background = cx.background().clone();
3156 let query = query.clone();
3157 async move {
3158 let fs = &fs;
3159 let query = &query;
3160 let matching_paths_tx = &matching_paths_tx;
3161 let paths_per_worker = (path_count + workers - 1) / workers;
3162 let snapshots = &snapshots;
3163 background
3164 .scoped(|scope| {
3165 for worker_ix in 0..workers {
3166 let worker_start_ix = worker_ix * paths_per_worker;
3167 let worker_end_ix = worker_start_ix + paths_per_worker;
3168 scope.spawn(async move {
3169 let mut snapshot_start_ix = 0;
3170 let mut abs_path = PathBuf::new();
3171 for snapshot in snapshots {
3172 let snapshot_end_ix =
3173 snapshot_start_ix + snapshot.visible_file_count();
3174 if worker_end_ix <= snapshot_start_ix {
3175 break;
3176 } else if worker_start_ix > snapshot_end_ix {
3177 snapshot_start_ix = snapshot_end_ix;
3178 continue;
3179 } else {
3180 let start_in_snapshot = worker_start_ix
3181 .saturating_sub(snapshot_start_ix);
3182 let end_in_snapshot =
3183 cmp::min(worker_end_ix, snapshot_end_ix)
3184 - snapshot_start_ix;
3185
3186 for entry in snapshot
3187 .files(false, start_in_snapshot)
3188 .take(end_in_snapshot - start_in_snapshot)
3189 {
3190 if matching_paths_tx.is_closed() {
3191 break;
3192 }
3193
3194 abs_path.clear();
3195 abs_path.push(&snapshot.abs_path());
3196 abs_path.push(&entry.path);
3197 let matches = if let Some(file) =
3198 fs.open_sync(&abs_path).await.log_err()
3199 {
3200 query.detect(file).unwrap_or(false)
3201 } else {
3202 false
3203 };
3204
3205 if matches {
3206 let project_path =
3207 (snapshot.id(), entry.path.clone());
3208 if matching_paths_tx
3209 .send(project_path)
3210 .await
3211 .is_err()
3212 {
3213 break;
3214 }
3215 }
3216 }
3217
3218 snapshot_start_ix = snapshot_end_ix;
3219 }
3220 }
3221 });
3222 }
3223 })
3224 .await;
3225 }
3226 })
3227 .detach();
3228
3229 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3230 let open_buffers = self
3231 .opened_buffers
3232 .values()
3233 .filter_map(|b| b.upgrade(cx))
3234 .collect::<HashSet<_>>();
3235 cx.spawn(|this, cx| async move {
3236 for buffer in &open_buffers {
3237 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3238 buffers_tx.send((buffer.clone(), snapshot)).await?;
3239 }
3240
3241 let open_buffers = Rc::new(RefCell::new(open_buffers));
3242 while let Some(project_path) = matching_paths_rx.next().await {
3243 if buffers_tx.is_closed() {
3244 break;
3245 }
3246
3247 let this = this.clone();
3248 let open_buffers = open_buffers.clone();
3249 let buffers_tx = buffers_tx.clone();
3250 cx.spawn(|mut cx| async move {
3251 if let Some(buffer) = this
3252 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3253 .await
3254 .log_err()
3255 {
3256 if open_buffers.borrow_mut().insert(buffer.clone()) {
3257 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3258 buffers_tx.send((buffer, snapshot)).await?;
3259 }
3260 }
3261
3262 Ok::<_, anyhow::Error>(())
3263 })
3264 .detach();
3265 }
3266
3267 Ok::<_, anyhow::Error>(())
3268 })
3269 .detach_and_log_err(cx);
3270
3271 let background = cx.background().clone();
3272 cx.background().spawn(async move {
3273 let query = &query;
3274 let mut matched_buffers = Vec::new();
3275 for _ in 0..workers {
3276 matched_buffers.push(HashMap::default());
3277 }
3278 background
3279 .scoped(|scope| {
3280 for worker_matched_buffers in matched_buffers.iter_mut() {
3281 let mut buffers_rx = buffers_rx.clone();
3282 scope.spawn(async move {
3283 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3284 let buffer_matches = query
3285 .search(snapshot.as_rope())
3286 .await
3287 .iter()
3288 .map(|range| {
3289 snapshot.anchor_before(range.start)
3290 ..snapshot.anchor_after(range.end)
3291 })
3292 .collect::<Vec<_>>();
3293 if !buffer_matches.is_empty() {
3294 worker_matched_buffers
3295 .insert(buffer.clone(), buffer_matches);
3296 }
3297 }
3298 });
3299 }
3300 })
3301 .await;
3302 Ok(matched_buffers.into_iter().flatten().collect())
3303 })
3304 } else if let Some(project_id) = self.remote_id() {
3305 let request = self.client.request(query.to_proto(project_id));
3306 cx.spawn(|this, mut cx| async move {
3307 let response = request.await?;
3308 let mut result = HashMap::default();
3309 for location in response.locations {
3310 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3311 let target_buffer = this
3312 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3313 .await?;
3314 let start = location
3315 .start
3316 .and_then(deserialize_anchor)
3317 .ok_or_else(|| anyhow!("missing target start"))?;
3318 let end = location
3319 .end
3320 .and_then(deserialize_anchor)
3321 .ok_or_else(|| anyhow!("missing target end"))?;
3322 result
3323 .entry(target_buffer)
3324 .or_insert(Vec::new())
3325 .push(start..end)
3326 }
3327 Ok(result)
3328 })
3329 } else {
3330 Task::ready(Ok(Default::default()))
3331 }
3332 }
3333
3334 fn request_lsp<R: LspCommand>(
3335 &self,
3336 buffer_handle: ModelHandle<Buffer>,
3337 request: R,
3338 cx: &mut ModelContext<Self>,
3339 ) -> Task<Result<R::Response>>
3340 where
3341 <R::LspRequest as lsp::request::Request>::Result: Send,
3342 {
3343 let buffer = buffer_handle.read(cx);
3344 if self.is_local() {
3345 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3346 if let Some((file, (_, language_server))) =
3347 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3348 {
3349 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3350 return cx.spawn(|this, cx| async move {
3351 if !request.check_capabilities(&language_server.capabilities()) {
3352 return Ok(Default::default());
3353 }
3354
3355 let response = language_server
3356 .request::<R::LspRequest>(lsp_params)
3357 .await
3358 .context("lsp request failed")?;
3359 request
3360 .response_from_lsp(response, this, buffer_handle, cx)
3361 .await
3362 });
3363 }
3364 } else if let Some(project_id) = self.remote_id() {
3365 let rpc = self.client.clone();
3366 let message = request.to_proto(project_id, buffer);
3367 return cx.spawn(|this, cx| async move {
3368 let response = rpc.request(message).await?;
3369 request
3370 .response_from_proto(response, this, buffer_handle, cx)
3371 .await
3372 });
3373 }
3374 Task::ready(Ok(Default::default()))
3375 }
3376
3377 pub fn find_or_create_local_worktree(
3378 &mut self,
3379 abs_path: impl AsRef<Path>,
3380 visible: bool,
3381 cx: &mut ModelContext<Self>,
3382 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3383 let abs_path = abs_path.as_ref();
3384 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3385 Task::ready(Ok((tree.clone(), relative_path.into())))
3386 } else {
3387 let worktree = self.create_local_worktree(abs_path, visible, cx);
3388 cx.foreground()
3389 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3390 }
3391 }
3392
3393 pub fn find_local_worktree(
3394 &self,
3395 abs_path: &Path,
3396 cx: &AppContext,
3397 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3398 for tree in self.worktrees(cx) {
3399 if let Some(relative_path) = tree
3400 .read(cx)
3401 .as_local()
3402 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3403 {
3404 return Some((tree.clone(), relative_path.into()));
3405 }
3406 }
3407 None
3408 }
3409
3410 pub fn is_shared(&self) -> bool {
3411 match &self.client_state {
3412 ProjectClientState::Local { is_shared, .. } => *is_shared,
3413 ProjectClientState::Remote { .. } => false,
3414 }
3415 }
3416
3417 fn create_local_worktree(
3418 &mut self,
3419 abs_path: impl AsRef<Path>,
3420 visible: bool,
3421 cx: &mut ModelContext<Self>,
3422 ) -> Task<Result<ModelHandle<Worktree>>> {
3423 let fs = self.fs.clone();
3424 let client = self.client.clone();
3425 let next_entry_id = self.next_entry_id.clone();
3426 let path: Arc<Path> = abs_path.as_ref().into();
3427 let task = self
3428 .loading_local_worktrees
3429 .entry(path.clone())
3430 .or_insert_with(|| {
3431 cx.spawn(|project, mut cx| {
3432 async move {
3433 let worktree = Worktree::local(
3434 client.clone(),
3435 path.clone(),
3436 visible,
3437 fs,
3438 next_entry_id,
3439 &mut cx,
3440 )
3441 .await;
3442 project.update(&mut cx, |project, _| {
3443 project.loading_local_worktrees.remove(&path);
3444 });
3445 let worktree = worktree?;
3446
3447 let (remote_project_id, is_shared) =
3448 project.update(&mut cx, |project, cx| {
3449 project.add_worktree(&worktree, cx);
3450 (project.remote_id(), project.is_shared())
3451 });
3452
3453 if let Some(project_id) = remote_project_id {
3454 if is_shared {
3455 worktree
3456 .update(&mut cx, |worktree, cx| {
3457 worktree.as_local_mut().unwrap().share(project_id, cx)
3458 })
3459 .await?;
3460 } else {
3461 worktree
3462 .update(&mut cx, |worktree, cx| {
3463 worktree.as_local_mut().unwrap().register(project_id, cx)
3464 })
3465 .await?;
3466 }
3467 }
3468
3469 Ok(worktree)
3470 }
3471 .map_err(|err| Arc::new(err))
3472 })
3473 .shared()
3474 })
3475 .clone();
3476 cx.foreground().spawn(async move {
3477 match task.await {
3478 Ok(worktree) => Ok(worktree),
3479 Err(err) => Err(anyhow!("{}", err)),
3480 }
3481 })
3482 }
3483
3484 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3485 self.worktrees.retain(|worktree| {
3486 worktree
3487 .upgrade(cx)
3488 .map_or(false, |w| w.read(cx).id() != id)
3489 });
3490 cx.notify();
3491 }
3492
3493 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3494 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3495 if worktree.read(cx).is_local() {
3496 cx.subscribe(&worktree, |this, worktree, _, cx| {
3497 this.update_local_worktree_buffers(worktree, cx);
3498 })
3499 .detach();
3500 }
3501
3502 let push_strong_handle = {
3503 let worktree = worktree.read(cx);
3504 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3505 };
3506 if push_strong_handle {
3507 self.worktrees
3508 .push(WorktreeHandle::Strong(worktree.clone()));
3509 } else {
3510 cx.observe_release(&worktree, |this, _, cx| {
3511 this.worktrees
3512 .retain(|worktree| worktree.upgrade(cx).is_some());
3513 cx.notify();
3514 })
3515 .detach();
3516 self.worktrees
3517 .push(WorktreeHandle::Weak(worktree.downgrade()));
3518 }
3519 cx.notify();
3520 }
3521
3522 fn update_local_worktree_buffers(
3523 &mut self,
3524 worktree_handle: ModelHandle<Worktree>,
3525 cx: &mut ModelContext<Self>,
3526 ) {
3527 let snapshot = worktree_handle.read(cx).snapshot();
3528 let mut buffers_to_delete = Vec::new();
3529 let mut renamed_buffers = Vec::new();
3530 for (buffer_id, buffer) in &self.opened_buffers {
3531 if let Some(buffer) = buffer.upgrade(cx) {
3532 buffer.update(cx, |buffer, cx| {
3533 if let Some(old_file) = File::from_dyn(buffer.file()) {
3534 if old_file.worktree != worktree_handle {
3535 return;
3536 }
3537
3538 let new_file = if let Some(entry) = old_file
3539 .entry_id
3540 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3541 {
3542 File {
3543 is_local: true,
3544 entry_id: Some(entry.id),
3545 mtime: entry.mtime,
3546 path: entry.path.clone(),
3547 worktree: worktree_handle.clone(),
3548 }
3549 } else if let Some(entry) =
3550 snapshot.entry_for_path(old_file.path().as_ref())
3551 {
3552 File {
3553 is_local: true,
3554 entry_id: Some(entry.id),
3555 mtime: entry.mtime,
3556 path: entry.path.clone(),
3557 worktree: worktree_handle.clone(),
3558 }
3559 } else {
3560 File {
3561 is_local: true,
3562 entry_id: None,
3563 path: old_file.path().clone(),
3564 mtime: old_file.mtime(),
3565 worktree: worktree_handle.clone(),
3566 }
3567 };
3568
3569 let old_path = old_file.abs_path(cx);
3570 if new_file.abs_path(cx) != old_path {
3571 renamed_buffers.push((cx.handle(), old_path));
3572 }
3573
3574 if let Some(project_id) = self.remote_id() {
3575 self.client
3576 .send(proto::UpdateBufferFile {
3577 project_id,
3578 buffer_id: *buffer_id as u64,
3579 file: Some(new_file.to_proto()),
3580 })
3581 .log_err();
3582 }
3583 buffer.file_updated(Box::new(new_file), cx).detach();
3584 }
3585 });
3586 } else {
3587 buffers_to_delete.push(*buffer_id);
3588 }
3589 }
3590
3591 for buffer_id in buffers_to_delete {
3592 self.opened_buffers.remove(&buffer_id);
3593 }
3594
3595 for (buffer, old_path) in renamed_buffers {
3596 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3597 self.assign_language_to_buffer(&buffer, cx);
3598 self.register_buffer_with_language_server(&buffer, cx);
3599 }
3600 }
3601
3602 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3603 let new_active_entry = entry.and_then(|project_path| {
3604 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3605 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3606 Some(entry.id)
3607 });
3608 if new_active_entry != self.active_entry {
3609 self.active_entry = new_active_entry;
3610 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3611 }
3612 }
3613
3614 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3615 self.language_server_statuses
3616 .values()
3617 .any(|status| status.pending_diagnostic_updates > 0)
3618 }
3619
3620 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3621 let mut summary = DiagnosticSummary::default();
3622 for (_, path_summary) in self.diagnostic_summaries(cx) {
3623 summary.error_count += path_summary.error_count;
3624 summary.warning_count += path_summary.warning_count;
3625 }
3626 summary
3627 }
3628
3629 pub fn diagnostic_summaries<'a>(
3630 &'a self,
3631 cx: &'a AppContext,
3632 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3633 self.worktrees(cx).flat_map(move |worktree| {
3634 let worktree = worktree.read(cx);
3635 let worktree_id = worktree.id();
3636 worktree
3637 .diagnostic_summaries()
3638 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3639 })
3640 }
3641
3642 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3643 if self
3644 .language_server_statuses
3645 .values()
3646 .map(|status| status.pending_diagnostic_updates)
3647 .sum::<isize>()
3648 == 1
3649 {
3650 cx.emit(Event::DiskBasedDiagnosticsStarted);
3651 }
3652 }
3653
3654 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3655 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3656 if self
3657 .language_server_statuses
3658 .values()
3659 .map(|status| status.pending_diagnostic_updates)
3660 .sum::<isize>()
3661 == 0
3662 {
3663 cx.emit(Event::DiskBasedDiagnosticsFinished);
3664 }
3665 }
3666
3667 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3668 self.active_entry
3669 }
3670
3671 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3672 self.worktree_for_id(path.worktree_id, cx)?
3673 .read(cx)
3674 .entry_for_path(&path.path)
3675 .map(|entry| entry.id)
3676 }
3677
3678 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3679 let worktree = self.worktree_for_entry(entry_id, cx)?;
3680 let worktree = worktree.read(cx);
3681 let worktree_id = worktree.id();
3682 let path = worktree.entry_for_id(entry_id)?.path.clone();
3683 Some(ProjectPath { worktree_id, path })
3684 }
3685
3686 // RPC message handlers
3687
3688 async fn handle_unshare_project(
3689 this: ModelHandle<Self>,
3690 _: TypedEnvelope<proto::UnshareProject>,
3691 _: Arc<Client>,
3692 mut cx: AsyncAppContext,
3693 ) -> Result<()> {
3694 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3695 Ok(())
3696 }
3697
3698 async fn handle_add_collaborator(
3699 this: ModelHandle<Self>,
3700 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3701 _: Arc<Client>,
3702 mut cx: AsyncAppContext,
3703 ) -> Result<()> {
3704 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3705 let collaborator = envelope
3706 .payload
3707 .collaborator
3708 .take()
3709 .ok_or_else(|| anyhow!("empty collaborator"))?;
3710
3711 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3712 this.update(&mut cx, |this, cx| {
3713 this.collaborators
3714 .insert(collaborator.peer_id, collaborator);
3715 cx.notify();
3716 });
3717
3718 Ok(())
3719 }
3720
3721 async fn handle_remove_collaborator(
3722 this: ModelHandle<Self>,
3723 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3724 _: Arc<Client>,
3725 mut cx: AsyncAppContext,
3726 ) -> Result<()> {
3727 this.update(&mut cx, |this, cx| {
3728 let peer_id = PeerId(envelope.payload.peer_id);
3729 let replica_id = this
3730 .collaborators
3731 .remove(&peer_id)
3732 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3733 .replica_id;
3734 for (_, buffer) in &this.opened_buffers {
3735 if let Some(buffer) = buffer.upgrade(cx) {
3736 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3737 }
3738 }
3739 cx.emit(Event::CollaboratorLeft(peer_id));
3740 cx.notify();
3741 Ok(())
3742 })
3743 }
3744
3745 async fn handle_register_worktree(
3746 this: ModelHandle<Self>,
3747 envelope: TypedEnvelope<proto::RegisterWorktree>,
3748 client: Arc<Client>,
3749 mut cx: AsyncAppContext,
3750 ) -> Result<()> {
3751 this.update(&mut cx, |this, cx| {
3752 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3753 let replica_id = this.replica_id();
3754 let worktree = proto::Worktree {
3755 id: envelope.payload.worktree_id,
3756 root_name: envelope.payload.root_name,
3757 entries: Default::default(),
3758 diagnostic_summaries: Default::default(),
3759 visible: envelope.payload.visible,
3760 };
3761 let (worktree, load_task) =
3762 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3763 this.add_worktree(&worktree, cx);
3764 load_task.detach();
3765 Ok(())
3766 })
3767 }
3768
3769 async fn handle_unregister_worktree(
3770 this: ModelHandle<Self>,
3771 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3772 _: Arc<Client>,
3773 mut cx: AsyncAppContext,
3774 ) -> Result<()> {
3775 this.update(&mut cx, |this, cx| {
3776 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3777 this.remove_worktree(worktree_id, cx);
3778 Ok(())
3779 })
3780 }
3781
3782 async fn handle_update_worktree(
3783 this: ModelHandle<Self>,
3784 envelope: TypedEnvelope<proto::UpdateWorktree>,
3785 _: Arc<Client>,
3786 mut cx: AsyncAppContext,
3787 ) -> Result<()> {
3788 this.update(&mut cx, |this, cx| {
3789 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3790 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3791 worktree.update(cx, |worktree, _| {
3792 let worktree = worktree.as_remote_mut().unwrap();
3793 worktree.update_from_remote(envelope)
3794 })?;
3795 }
3796 Ok(())
3797 })
3798 }
3799
3800 async fn handle_create_project_entry(
3801 this: ModelHandle<Self>,
3802 envelope: TypedEnvelope<proto::CreateProjectEntry>,
3803 _: Arc<Client>,
3804 mut cx: AsyncAppContext,
3805 ) -> Result<proto::CreateProjectEntryResponse> {
3806 let entry = this
3807 .update(&mut cx, |this, cx| {
3808 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3809 let worktree = this
3810 .worktree_for_id(worktree_id, cx)
3811 .ok_or_else(|| anyhow!("worktree not found"))?;
3812 worktree.update(cx, |worktree, cx| {
3813 let worktree = worktree.as_local_mut().unwrap();
3814 if envelope.payload.is_directory {
3815 unimplemented!("can't yet create directories");
3816 } else {
3817 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
3818 anyhow::Ok(worktree.write_file(path, Default::default(), cx))
3819 }
3820 })
3821 })?
3822 .await?;
3823 Ok(proto::CreateProjectEntryResponse {
3824 entry: Some((&entry).into()),
3825 })
3826 }
3827
3828 async fn handle_update_diagnostic_summary(
3829 this: ModelHandle<Self>,
3830 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3831 _: Arc<Client>,
3832 mut cx: AsyncAppContext,
3833 ) -> Result<()> {
3834 this.update(&mut cx, |this, cx| {
3835 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3836 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3837 if let Some(summary) = envelope.payload.summary {
3838 let project_path = ProjectPath {
3839 worktree_id,
3840 path: Path::new(&summary.path).into(),
3841 };
3842 worktree.update(cx, |worktree, _| {
3843 worktree
3844 .as_remote_mut()
3845 .unwrap()
3846 .update_diagnostic_summary(project_path.path.clone(), &summary);
3847 });
3848 cx.emit(Event::DiagnosticsUpdated(project_path));
3849 }
3850 }
3851 Ok(())
3852 })
3853 }
3854
3855 async fn handle_start_language_server(
3856 this: ModelHandle<Self>,
3857 envelope: TypedEnvelope<proto::StartLanguageServer>,
3858 _: Arc<Client>,
3859 mut cx: AsyncAppContext,
3860 ) -> Result<()> {
3861 let server = envelope
3862 .payload
3863 .server
3864 .ok_or_else(|| anyhow!("invalid server"))?;
3865 this.update(&mut cx, |this, cx| {
3866 this.language_server_statuses.insert(
3867 server.id as usize,
3868 LanguageServerStatus {
3869 name: server.name,
3870 pending_work: Default::default(),
3871 pending_diagnostic_updates: 0,
3872 },
3873 );
3874 cx.notify();
3875 });
3876 Ok(())
3877 }
3878
3879 async fn handle_update_language_server(
3880 this: ModelHandle<Self>,
3881 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3882 _: Arc<Client>,
3883 mut cx: AsyncAppContext,
3884 ) -> Result<()> {
3885 let language_server_id = envelope.payload.language_server_id as usize;
3886 match envelope
3887 .payload
3888 .variant
3889 .ok_or_else(|| anyhow!("invalid variant"))?
3890 {
3891 proto::update_language_server::Variant::WorkStart(payload) => {
3892 this.update(&mut cx, |this, cx| {
3893 this.on_lsp_work_start(language_server_id, payload.token, cx);
3894 })
3895 }
3896 proto::update_language_server::Variant::WorkProgress(payload) => {
3897 this.update(&mut cx, |this, cx| {
3898 this.on_lsp_work_progress(
3899 language_server_id,
3900 payload.token,
3901 LanguageServerProgress {
3902 message: payload.message,
3903 percentage: payload.percentage.map(|p| p as usize),
3904 last_update_at: Instant::now(),
3905 },
3906 cx,
3907 );
3908 })
3909 }
3910 proto::update_language_server::Variant::WorkEnd(payload) => {
3911 this.update(&mut cx, |this, cx| {
3912 this.on_lsp_work_end(language_server_id, payload.token, cx);
3913 })
3914 }
3915 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3916 this.update(&mut cx, |this, cx| {
3917 this.disk_based_diagnostics_started(cx);
3918 })
3919 }
3920 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3921 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3922 }
3923 }
3924
3925 Ok(())
3926 }
3927
3928 async fn handle_update_buffer(
3929 this: ModelHandle<Self>,
3930 envelope: TypedEnvelope<proto::UpdateBuffer>,
3931 _: Arc<Client>,
3932 mut cx: AsyncAppContext,
3933 ) -> Result<()> {
3934 this.update(&mut cx, |this, cx| {
3935 let payload = envelope.payload.clone();
3936 let buffer_id = payload.buffer_id;
3937 let ops = payload
3938 .operations
3939 .into_iter()
3940 .map(|op| language::proto::deserialize_operation(op))
3941 .collect::<Result<Vec<_>, _>>()?;
3942 match this.opened_buffers.entry(buffer_id) {
3943 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3944 OpenBuffer::Strong(buffer) => {
3945 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3946 }
3947 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3948 OpenBuffer::Weak(_) => {}
3949 },
3950 hash_map::Entry::Vacant(e) => {
3951 e.insert(OpenBuffer::Loading(ops));
3952 }
3953 }
3954 Ok(())
3955 })
3956 }
3957
3958 async fn handle_update_buffer_file(
3959 this: ModelHandle<Self>,
3960 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3961 _: Arc<Client>,
3962 mut cx: AsyncAppContext,
3963 ) -> Result<()> {
3964 this.update(&mut cx, |this, cx| {
3965 let payload = envelope.payload.clone();
3966 let buffer_id = payload.buffer_id;
3967 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3968 let worktree = this
3969 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3970 .ok_or_else(|| anyhow!("no such worktree"))?;
3971 let file = File::from_proto(file, worktree.clone(), cx)?;
3972 let buffer = this
3973 .opened_buffers
3974 .get_mut(&buffer_id)
3975 .and_then(|b| b.upgrade(cx))
3976 .ok_or_else(|| anyhow!("no such buffer"))?;
3977 buffer.update(cx, |buffer, cx| {
3978 buffer.file_updated(Box::new(file), cx).detach();
3979 });
3980 Ok(())
3981 })
3982 }
3983
3984 async fn handle_save_buffer(
3985 this: ModelHandle<Self>,
3986 envelope: TypedEnvelope<proto::SaveBuffer>,
3987 _: Arc<Client>,
3988 mut cx: AsyncAppContext,
3989 ) -> Result<proto::BufferSaved> {
3990 let buffer_id = envelope.payload.buffer_id;
3991 let requested_version = deserialize_version(envelope.payload.version);
3992
3993 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3994 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3995 let buffer = this
3996 .opened_buffers
3997 .get(&buffer_id)
3998 .and_then(|buffer| buffer.upgrade(cx))
3999 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4000 Ok::<_, anyhow::Error>((project_id, buffer))
4001 })?;
4002 buffer
4003 .update(&mut cx, |buffer, _| {
4004 buffer.wait_for_version(requested_version)
4005 })
4006 .await;
4007
4008 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4009 Ok(proto::BufferSaved {
4010 project_id,
4011 buffer_id,
4012 version: serialize_version(&saved_version),
4013 mtime: Some(mtime.into()),
4014 })
4015 }
4016
4017 async fn handle_reload_buffers(
4018 this: ModelHandle<Self>,
4019 envelope: TypedEnvelope<proto::ReloadBuffers>,
4020 _: Arc<Client>,
4021 mut cx: AsyncAppContext,
4022 ) -> Result<proto::ReloadBuffersResponse> {
4023 let sender_id = envelope.original_sender_id()?;
4024 let reload = this.update(&mut cx, |this, cx| {
4025 let mut buffers = HashSet::default();
4026 for buffer_id in &envelope.payload.buffer_ids {
4027 buffers.insert(
4028 this.opened_buffers
4029 .get(buffer_id)
4030 .and_then(|buffer| buffer.upgrade(cx))
4031 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4032 );
4033 }
4034 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4035 })?;
4036
4037 let project_transaction = reload.await?;
4038 let project_transaction = this.update(&mut cx, |this, cx| {
4039 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4040 });
4041 Ok(proto::ReloadBuffersResponse {
4042 transaction: Some(project_transaction),
4043 })
4044 }
4045
4046 async fn handle_format_buffers(
4047 this: ModelHandle<Self>,
4048 envelope: TypedEnvelope<proto::FormatBuffers>,
4049 _: Arc<Client>,
4050 mut cx: AsyncAppContext,
4051 ) -> Result<proto::FormatBuffersResponse> {
4052 let sender_id = envelope.original_sender_id()?;
4053 let format = this.update(&mut cx, |this, cx| {
4054 let mut buffers = HashSet::default();
4055 for buffer_id in &envelope.payload.buffer_ids {
4056 buffers.insert(
4057 this.opened_buffers
4058 .get(buffer_id)
4059 .and_then(|buffer| buffer.upgrade(cx))
4060 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4061 );
4062 }
4063 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4064 })?;
4065
4066 let project_transaction = format.await?;
4067 let project_transaction = this.update(&mut cx, |this, cx| {
4068 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4069 });
4070 Ok(proto::FormatBuffersResponse {
4071 transaction: Some(project_transaction),
4072 })
4073 }
4074
4075 async fn handle_get_completions(
4076 this: ModelHandle<Self>,
4077 envelope: TypedEnvelope<proto::GetCompletions>,
4078 _: Arc<Client>,
4079 mut cx: AsyncAppContext,
4080 ) -> Result<proto::GetCompletionsResponse> {
4081 let position = envelope
4082 .payload
4083 .position
4084 .and_then(language::proto::deserialize_anchor)
4085 .ok_or_else(|| anyhow!("invalid position"))?;
4086 let version = deserialize_version(envelope.payload.version);
4087 let buffer = this.read_with(&cx, |this, cx| {
4088 this.opened_buffers
4089 .get(&envelope.payload.buffer_id)
4090 .and_then(|buffer| buffer.upgrade(cx))
4091 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4092 })?;
4093 buffer
4094 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4095 .await;
4096 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4097 let completions = this
4098 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4099 .await?;
4100
4101 Ok(proto::GetCompletionsResponse {
4102 completions: completions
4103 .iter()
4104 .map(language::proto::serialize_completion)
4105 .collect(),
4106 version: serialize_version(&version),
4107 })
4108 }
4109
4110 async fn handle_apply_additional_edits_for_completion(
4111 this: ModelHandle<Self>,
4112 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4113 _: Arc<Client>,
4114 mut cx: AsyncAppContext,
4115 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4116 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4117 let buffer = this
4118 .opened_buffers
4119 .get(&envelope.payload.buffer_id)
4120 .and_then(|buffer| buffer.upgrade(cx))
4121 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4122 let language = buffer.read(cx).language();
4123 let completion = language::proto::deserialize_completion(
4124 envelope
4125 .payload
4126 .completion
4127 .ok_or_else(|| anyhow!("invalid completion"))?,
4128 language,
4129 )?;
4130 Ok::<_, anyhow::Error>(
4131 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4132 )
4133 })?;
4134
4135 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4136 transaction: apply_additional_edits
4137 .await?
4138 .as_ref()
4139 .map(language::proto::serialize_transaction),
4140 })
4141 }
4142
4143 async fn handle_get_code_actions(
4144 this: ModelHandle<Self>,
4145 envelope: TypedEnvelope<proto::GetCodeActions>,
4146 _: Arc<Client>,
4147 mut cx: AsyncAppContext,
4148 ) -> Result<proto::GetCodeActionsResponse> {
4149 let start = envelope
4150 .payload
4151 .start
4152 .and_then(language::proto::deserialize_anchor)
4153 .ok_or_else(|| anyhow!("invalid start"))?;
4154 let end = envelope
4155 .payload
4156 .end
4157 .and_then(language::proto::deserialize_anchor)
4158 .ok_or_else(|| anyhow!("invalid end"))?;
4159 let buffer = this.update(&mut cx, |this, cx| {
4160 this.opened_buffers
4161 .get(&envelope.payload.buffer_id)
4162 .and_then(|buffer| buffer.upgrade(cx))
4163 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4164 })?;
4165 buffer
4166 .update(&mut cx, |buffer, _| {
4167 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4168 })
4169 .await;
4170
4171 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4172 let code_actions = this.update(&mut cx, |this, cx| {
4173 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4174 })?;
4175
4176 Ok(proto::GetCodeActionsResponse {
4177 actions: code_actions
4178 .await?
4179 .iter()
4180 .map(language::proto::serialize_code_action)
4181 .collect(),
4182 version: serialize_version(&version),
4183 })
4184 }
4185
4186 async fn handle_apply_code_action(
4187 this: ModelHandle<Self>,
4188 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4189 _: Arc<Client>,
4190 mut cx: AsyncAppContext,
4191 ) -> Result<proto::ApplyCodeActionResponse> {
4192 let sender_id = envelope.original_sender_id()?;
4193 let action = language::proto::deserialize_code_action(
4194 envelope
4195 .payload
4196 .action
4197 .ok_or_else(|| anyhow!("invalid action"))?,
4198 )?;
4199 let apply_code_action = this.update(&mut cx, |this, cx| {
4200 let buffer = this
4201 .opened_buffers
4202 .get(&envelope.payload.buffer_id)
4203 .and_then(|buffer| buffer.upgrade(cx))
4204 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4205 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4206 })?;
4207
4208 let project_transaction = apply_code_action.await?;
4209 let project_transaction = this.update(&mut cx, |this, cx| {
4210 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4211 });
4212 Ok(proto::ApplyCodeActionResponse {
4213 transaction: Some(project_transaction),
4214 })
4215 }
4216
4217 async fn handle_lsp_command<T: LspCommand>(
4218 this: ModelHandle<Self>,
4219 envelope: TypedEnvelope<T::ProtoRequest>,
4220 _: Arc<Client>,
4221 mut cx: AsyncAppContext,
4222 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4223 where
4224 <T::LspRequest as lsp::request::Request>::Result: Send,
4225 {
4226 let sender_id = envelope.original_sender_id()?;
4227 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4228 let buffer_handle = this.read_with(&cx, |this, _| {
4229 this.opened_buffers
4230 .get(&buffer_id)
4231 .and_then(|buffer| buffer.upgrade(&cx))
4232 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4233 })?;
4234 let request = T::from_proto(
4235 envelope.payload,
4236 this.clone(),
4237 buffer_handle.clone(),
4238 cx.clone(),
4239 )
4240 .await?;
4241 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4242 let response = this
4243 .update(&mut cx, |this, cx| {
4244 this.request_lsp(buffer_handle, request, cx)
4245 })
4246 .await?;
4247 this.update(&mut cx, |this, cx| {
4248 Ok(T::response_to_proto(
4249 response,
4250 this,
4251 sender_id,
4252 &buffer_version,
4253 cx,
4254 ))
4255 })
4256 }
4257
4258 async fn handle_get_project_symbols(
4259 this: ModelHandle<Self>,
4260 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4261 _: Arc<Client>,
4262 mut cx: AsyncAppContext,
4263 ) -> Result<proto::GetProjectSymbolsResponse> {
4264 let symbols = this
4265 .update(&mut cx, |this, cx| {
4266 this.symbols(&envelope.payload.query, cx)
4267 })
4268 .await?;
4269
4270 Ok(proto::GetProjectSymbolsResponse {
4271 symbols: symbols.iter().map(serialize_symbol).collect(),
4272 })
4273 }
4274
4275 async fn handle_search_project(
4276 this: ModelHandle<Self>,
4277 envelope: TypedEnvelope<proto::SearchProject>,
4278 _: Arc<Client>,
4279 mut cx: AsyncAppContext,
4280 ) -> Result<proto::SearchProjectResponse> {
4281 let peer_id = envelope.original_sender_id()?;
4282 let query = SearchQuery::from_proto(envelope.payload)?;
4283 let result = this
4284 .update(&mut cx, |this, cx| this.search(query, cx))
4285 .await?;
4286
4287 this.update(&mut cx, |this, cx| {
4288 let mut locations = Vec::new();
4289 for (buffer, ranges) in result {
4290 for range in ranges {
4291 let start = serialize_anchor(&range.start);
4292 let end = serialize_anchor(&range.end);
4293 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4294 locations.push(proto::Location {
4295 buffer: Some(buffer),
4296 start: Some(start),
4297 end: Some(end),
4298 });
4299 }
4300 }
4301 Ok(proto::SearchProjectResponse { locations })
4302 })
4303 }
4304
4305 async fn handle_open_buffer_for_symbol(
4306 this: ModelHandle<Self>,
4307 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4308 _: Arc<Client>,
4309 mut cx: AsyncAppContext,
4310 ) -> Result<proto::OpenBufferForSymbolResponse> {
4311 let peer_id = envelope.original_sender_id()?;
4312 let symbol = envelope
4313 .payload
4314 .symbol
4315 .ok_or_else(|| anyhow!("invalid symbol"))?;
4316 let symbol = this.read_with(&cx, |this, _| {
4317 let symbol = this.deserialize_symbol(symbol)?;
4318 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4319 if signature == symbol.signature {
4320 Ok(symbol)
4321 } else {
4322 Err(anyhow!("invalid symbol signature"))
4323 }
4324 })?;
4325 let buffer = this
4326 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4327 .await?;
4328
4329 Ok(proto::OpenBufferForSymbolResponse {
4330 buffer: Some(this.update(&mut cx, |this, cx| {
4331 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4332 })),
4333 })
4334 }
4335
4336 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4337 let mut hasher = Sha256::new();
4338 hasher.update(worktree_id.to_proto().to_be_bytes());
4339 hasher.update(path.to_string_lossy().as_bytes());
4340 hasher.update(self.nonce.to_be_bytes());
4341 hasher.finalize().as_slice().try_into().unwrap()
4342 }
4343
4344 async fn handle_open_buffer_by_id(
4345 this: ModelHandle<Self>,
4346 envelope: TypedEnvelope<proto::OpenBufferById>,
4347 _: Arc<Client>,
4348 mut cx: AsyncAppContext,
4349 ) -> Result<proto::OpenBufferResponse> {
4350 let peer_id = envelope.original_sender_id()?;
4351 let buffer = this
4352 .update(&mut cx, |this, cx| {
4353 this.open_buffer_by_id(envelope.payload.id, cx)
4354 })
4355 .await?;
4356 this.update(&mut cx, |this, cx| {
4357 Ok(proto::OpenBufferResponse {
4358 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4359 })
4360 })
4361 }
4362
4363 async fn handle_open_buffer_by_path(
4364 this: ModelHandle<Self>,
4365 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4366 _: Arc<Client>,
4367 mut cx: AsyncAppContext,
4368 ) -> Result<proto::OpenBufferResponse> {
4369 let peer_id = envelope.original_sender_id()?;
4370 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4371 let open_buffer = this.update(&mut cx, |this, cx| {
4372 this.open_buffer(
4373 ProjectPath {
4374 worktree_id,
4375 path: PathBuf::from(envelope.payload.path).into(),
4376 },
4377 cx,
4378 )
4379 });
4380
4381 let buffer = open_buffer.await?;
4382 this.update(&mut cx, |this, cx| {
4383 Ok(proto::OpenBufferResponse {
4384 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4385 })
4386 })
4387 }
4388
4389 fn serialize_project_transaction_for_peer(
4390 &mut self,
4391 project_transaction: ProjectTransaction,
4392 peer_id: PeerId,
4393 cx: &AppContext,
4394 ) -> proto::ProjectTransaction {
4395 let mut serialized_transaction = proto::ProjectTransaction {
4396 buffers: Default::default(),
4397 transactions: Default::default(),
4398 };
4399 for (buffer, transaction) in project_transaction.0 {
4400 serialized_transaction
4401 .buffers
4402 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4403 serialized_transaction
4404 .transactions
4405 .push(language::proto::serialize_transaction(&transaction));
4406 }
4407 serialized_transaction
4408 }
4409
4410 fn deserialize_project_transaction(
4411 &mut self,
4412 message: proto::ProjectTransaction,
4413 push_to_history: bool,
4414 cx: &mut ModelContext<Self>,
4415 ) -> Task<Result<ProjectTransaction>> {
4416 cx.spawn(|this, mut cx| async move {
4417 let mut project_transaction = ProjectTransaction::default();
4418 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4419 let buffer = this
4420 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4421 .await?;
4422 let transaction = language::proto::deserialize_transaction(transaction)?;
4423 project_transaction.0.insert(buffer, transaction);
4424 }
4425
4426 for (buffer, transaction) in &project_transaction.0 {
4427 buffer
4428 .update(&mut cx, |buffer, _| {
4429 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4430 })
4431 .await;
4432
4433 if push_to_history {
4434 buffer.update(&mut cx, |buffer, _| {
4435 buffer.push_transaction(transaction.clone(), Instant::now());
4436 });
4437 }
4438 }
4439
4440 Ok(project_transaction)
4441 })
4442 }
4443
4444 fn serialize_buffer_for_peer(
4445 &mut self,
4446 buffer: &ModelHandle<Buffer>,
4447 peer_id: PeerId,
4448 cx: &AppContext,
4449 ) -> proto::Buffer {
4450 let buffer_id = buffer.read(cx).remote_id();
4451 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4452 if shared_buffers.insert(buffer_id) {
4453 proto::Buffer {
4454 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4455 }
4456 } else {
4457 proto::Buffer {
4458 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4459 }
4460 }
4461 }
4462
4463 fn deserialize_buffer(
4464 &mut self,
4465 buffer: proto::Buffer,
4466 cx: &mut ModelContext<Self>,
4467 ) -> Task<Result<ModelHandle<Buffer>>> {
4468 let replica_id = self.replica_id();
4469
4470 let opened_buffer_tx = self.opened_buffer.0.clone();
4471 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4472 cx.spawn(|this, mut cx| async move {
4473 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4474 proto::buffer::Variant::Id(id) => {
4475 let buffer = loop {
4476 let buffer = this.read_with(&cx, |this, cx| {
4477 this.opened_buffers
4478 .get(&id)
4479 .and_then(|buffer| buffer.upgrade(cx))
4480 });
4481 if let Some(buffer) = buffer {
4482 break buffer;
4483 }
4484 opened_buffer_rx
4485 .next()
4486 .await
4487 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4488 };
4489 Ok(buffer)
4490 }
4491 proto::buffer::Variant::State(mut buffer) => {
4492 let mut buffer_worktree = None;
4493 let mut buffer_file = None;
4494 if let Some(file) = buffer.file.take() {
4495 this.read_with(&cx, |this, cx| {
4496 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4497 let worktree =
4498 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4499 anyhow!("no worktree found for id {}", file.worktree_id)
4500 })?;
4501 buffer_file =
4502 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4503 as Box<dyn language::File>);
4504 buffer_worktree = Some(worktree);
4505 Ok::<_, anyhow::Error>(())
4506 })?;
4507 }
4508
4509 let buffer = cx.add_model(|cx| {
4510 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4511 });
4512
4513 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4514
4515 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4516 Ok(buffer)
4517 }
4518 }
4519 })
4520 }
4521
4522 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4523 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4524 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4525 let start = serialized_symbol
4526 .start
4527 .ok_or_else(|| anyhow!("invalid start"))?;
4528 let end = serialized_symbol
4529 .end
4530 .ok_or_else(|| anyhow!("invalid end"))?;
4531 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4532 let path = PathBuf::from(serialized_symbol.path);
4533 let language = self.languages.select_language(&path);
4534 Ok(Symbol {
4535 source_worktree_id,
4536 worktree_id,
4537 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4538 label: language
4539 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4540 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4541 name: serialized_symbol.name,
4542 path,
4543 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4544 kind,
4545 signature: serialized_symbol
4546 .signature
4547 .try_into()
4548 .map_err(|_| anyhow!("invalid signature"))?,
4549 })
4550 }
4551
4552 async fn handle_buffer_saved(
4553 this: ModelHandle<Self>,
4554 envelope: TypedEnvelope<proto::BufferSaved>,
4555 _: Arc<Client>,
4556 mut cx: AsyncAppContext,
4557 ) -> Result<()> {
4558 let version = deserialize_version(envelope.payload.version);
4559 let mtime = envelope
4560 .payload
4561 .mtime
4562 .ok_or_else(|| anyhow!("missing mtime"))?
4563 .into();
4564
4565 this.update(&mut cx, |this, cx| {
4566 let buffer = this
4567 .opened_buffers
4568 .get(&envelope.payload.buffer_id)
4569 .and_then(|buffer| buffer.upgrade(cx));
4570 if let Some(buffer) = buffer {
4571 buffer.update(cx, |buffer, cx| {
4572 buffer.did_save(version, mtime, None, cx);
4573 });
4574 }
4575 Ok(())
4576 })
4577 }
4578
4579 async fn handle_buffer_reloaded(
4580 this: ModelHandle<Self>,
4581 envelope: TypedEnvelope<proto::BufferReloaded>,
4582 _: Arc<Client>,
4583 mut cx: AsyncAppContext,
4584 ) -> Result<()> {
4585 let payload = envelope.payload.clone();
4586 let version = deserialize_version(payload.version);
4587 let mtime = payload
4588 .mtime
4589 .ok_or_else(|| anyhow!("missing mtime"))?
4590 .into();
4591 this.update(&mut cx, |this, cx| {
4592 let buffer = this
4593 .opened_buffers
4594 .get(&payload.buffer_id)
4595 .and_then(|buffer| buffer.upgrade(cx));
4596 if let Some(buffer) = buffer {
4597 buffer.update(cx, |buffer, cx| {
4598 buffer.did_reload(version, mtime, cx);
4599 });
4600 }
4601 Ok(())
4602 })
4603 }
4604
4605 pub fn match_paths<'a>(
4606 &self,
4607 query: &'a str,
4608 include_ignored: bool,
4609 smart_case: bool,
4610 max_results: usize,
4611 cancel_flag: &'a AtomicBool,
4612 cx: &AppContext,
4613 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4614 let worktrees = self
4615 .worktrees(cx)
4616 .filter(|worktree| worktree.read(cx).is_visible())
4617 .collect::<Vec<_>>();
4618 let include_root_name = worktrees.len() > 1;
4619 let candidate_sets = worktrees
4620 .into_iter()
4621 .map(|worktree| CandidateSet {
4622 snapshot: worktree.read(cx).snapshot(),
4623 include_ignored,
4624 include_root_name,
4625 })
4626 .collect::<Vec<_>>();
4627
4628 let background = cx.background().clone();
4629 async move {
4630 fuzzy::match_paths(
4631 candidate_sets.as_slice(),
4632 query,
4633 smart_case,
4634 max_results,
4635 cancel_flag,
4636 background,
4637 )
4638 .await
4639 }
4640 }
4641
4642 fn edits_from_lsp(
4643 &mut self,
4644 buffer: &ModelHandle<Buffer>,
4645 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4646 version: Option<i32>,
4647 cx: &mut ModelContext<Self>,
4648 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4649 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4650 cx.background().spawn(async move {
4651 let snapshot = snapshot?;
4652 let mut lsp_edits = lsp_edits
4653 .into_iter()
4654 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4655 .peekable();
4656
4657 let mut edits = Vec::new();
4658 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4659 // Combine any LSP edits that are adjacent.
4660 //
4661 // Also, combine LSP edits that are separated from each other by only
4662 // a newline. This is important because for some code actions,
4663 // Rust-analyzer rewrites the entire buffer via a series of edits that
4664 // are separated by unchanged newline characters.
4665 //
4666 // In order for the diffing logic below to work properly, any edits that
4667 // cancel each other out must be combined into one.
4668 while let Some((next_range, next_text)) = lsp_edits.peek() {
4669 if next_range.start > range.end {
4670 if next_range.start.row > range.end.row + 1
4671 || next_range.start.column > 0
4672 || snapshot.clip_point_utf16(
4673 PointUtf16::new(range.end.row, u32::MAX),
4674 Bias::Left,
4675 ) > range.end
4676 {
4677 break;
4678 }
4679 new_text.push('\n');
4680 }
4681 range.end = next_range.end;
4682 new_text.push_str(&next_text);
4683 lsp_edits.next();
4684 }
4685
4686 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4687 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4688 {
4689 return Err(anyhow!("invalid edits received from language server"));
4690 }
4691
4692 // For multiline edits, perform a diff of the old and new text so that
4693 // we can identify the changes more precisely, preserving the locations
4694 // of any anchors positioned in the unchanged regions.
4695 if range.end.row > range.start.row {
4696 let mut offset = range.start.to_offset(&snapshot);
4697 let old_text = snapshot.text_for_range(range).collect::<String>();
4698
4699 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4700 let mut moved_since_edit = true;
4701 for change in diff.iter_all_changes() {
4702 let tag = change.tag();
4703 let value = change.value();
4704 match tag {
4705 ChangeTag::Equal => {
4706 offset += value.len();
4707 moved_since_edit = true;
4708 }
4709 ChangeTag::Delete => {
4710 let start = snapshot.anchor_after(offset);
4711 let end = snapshot.anchor_before(offset + value.len());
4712 if moved_since_edit {
4713 edits.push((start..end, String::new()));
4714 } else {
4715 edits.last_mut().unwrap().0.end = end;
4716 }
4717 offset += value.len();
4718 moved_since_edit = false;
4719 }
4720 ChangeTag::Insert => {
4721 if moved_since_edit {
4722 let anchor = snapshot.anchor_after(offset);
4723 edits.push((anchor.clone()..anchor, value.to_string()));
4724 } else {
4725 edits.last_mut().unwrap().1.push_str(value);
4726 }
4727 moved_since_edit = false;
4728 }
4729 }
4730 }
4731 } else if range.end == range.start {
4732 let anchor = snapshot.anchor_after(range.start);
4733 edits.push((anchor.clone()..anchor, new_text));
4734 } else {
4735 let edit_start = snapshot.anchor_after(range.start);
4736 let edit_end = snapshot.anchor_before(range.end);
4737 edits.push((edit_start..edit_end, new_text));
4738 }
4739 }
4740
4741 Ok(edits)
4742 })
4743 }
4744
4745 fn buffer_snapshot_for_lsp_version(
4746 &mut self,
4747 buffer: &ModelHandle<Buffer>,
4748 version: Option<i32>,
4749 cx: &AppContext,
4750 ) -> Result<TextBufferSnapshot> {
4751 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4752
4753 if let Some(version) = version {
4754 let buffer_id = buffer.read(cx).remote_id();
4755 let snapshots = self
4756 .buffer_snapshots
4757 .get_mut(&buffer_id)
4758 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4759 let mut found_snapshot = None;
4760 snapshots.retain(|(snapshot_version, snapshot)| {
4761 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4762 false
4763 } else {
4764 if *snapshot_version == version {
4765 found_snapshot = Some(snapshot.clone());
4766 }
4767 true
4768 }
4769 });
4770
4771 found_snapshot.ok_or_else(|| {
4772 anyhow!(
4773 "snapshot not found for buffer {} at version {}",
4774 buffer_id,
4775 version
4776 )
4777 })
4778 } else {
4779 Ok((buffer.read(cx)).text_snapshot())
4780 }
4781 }
4782
4783 fn language_server_for_buffer(
4784 &self,
4785 buffer: &Buffer,
4786 cx: &AppContext,
4787 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4788 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4789 let worktree_id = file.worktree_id(cx);
4790 self.language_servers
4791 .get(&(worktree_id, language.lsp_adapter()?.name()))
4792 } else {
4793 None
4794 }
4795 }
4796}
4797
4798impl WorktreeHandle {
4799 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4800 match self {
4801 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4802 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4803 }
4804 }
4805}
4806
4807impl OpenBuffer {
4808 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4809 match self {
4810 OpenBuffer::Strong(handle) => Some(handle.clone()),
4811 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4812 OpenBuffer::Loading(_) => None,
4813 }
4814 }
4815}
4816
4817struct CandidateSet {
4818 snapshot: Snapshot,
4819 include_ignored: bool,
4820 include_root_name: bool,
4821}
4822
4823impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4824 type Candidates = CandidateSetIter<'a>;
4825
4826 fn id(&self) -> usize {
4827 self.snapshot.id().to_usize()
4828 }
4829
4830 fn len(&self) -> usize {
4831 if self.include_ignored {
4832 self.snapshot.file_count()
4833 } else {
4834 self.snapshot.visible_file_count()
4835 }
4836 }
4837
4838 fn prefix(&self) -> Arc<str> {
4839 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4840 self.snapshot.root_name().into()
4841 } else if self.include_root_name {
4842 format!("{}/", self.snapshot.root_name()).into()
4843 } else {
4844 "".into()
4845 }
4846 }
4847
4848 fn candidates(&'a self, start: usize) -> Self::Candidates {
4849 CandidateSetIter {
4850 traversal: self.snapshot.files(self.include_ignored, start),
4851 }
4852 }
4853}
4854
4855struct CandidateSetIter<'a> {
4856 traversal: Traversal<'a>,
4857}
4858
4859impl<'a> Iterator for CandidateSetIter<'a> {
4860 type Item = PathMatchCandidate<'a>;
4861
4862 fn next(&mut self) -> Option<Self::Item> {
4863 self.traversal.next().map(|entry| {
4864 if let EntryKind::File(char_bag) = entry.kind {
4865 PathMatchCandidate {
4866 path: &entry.path,
4867 char_bag,
4868 }
4869 } else {
4870 unreachable!()
4871 }
4872 })
4873 }
4874}
4875
4876impl Entity for Project {
4877 type Event = Event;
4878
4879 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4880 match &self.client_state {
4881 ProjectClientState::Local { remote_id_rx, .. } => {
4882 if let Some(project_id) = *remote_id_rx.borrow() {
4883 self.client
4884 .send(proto::UnregisterProject { project_id })
4885 .log_err();
4886 }
4887 }
4888 ProjectClientState::Remote { remote_id, .. } => {
4889 self.client
4890 .send(proto::LeaveProject {
4891 project_id: *remote_id,
4892 })
4893 .log_err();
4894 }
4895 }
4896 }
4897
4898 fn app_will_quit(
4899 &mut self,
4900 _: &mut MutableAppContext,
4901 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4902 let shutdown_futures = self
4903 .language_servers
4904 .drain()
4905 .filter_map(|(_, (_, server))| server.shutdown())
4906 .collect::<Vec<_>>();
4907 Some(
4908 async move {
4909 futures::future::join_all(shutdown_futures).await;
4910 }
4911 .boxed(),
4912 )
4913 }
4914}
4915
4916impl Collaborator {
4917 fn from_proto(
4918 message: proto::Collaborator,
4919 user_store: &ModelHandle<UserStore>,
4920 cx: &mut AsyncAppContext,
4921 ) -> impl Future<Output = Result<Self>> {
4922 let user = user_store.update(cx, |user_store, cx| {
4923 user_store.fetch_user(message.user_id, cx)
4924 });
4925
4926 async move {
4927 Ok(Self {
4928 peer_id: PeerId(message.peer_id),
4929 user: user.await?,
4930 replica_id: message.replica_id as ReplicaId,
4931 })
4932 }
4933 }
4934}
4935
4936impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4937 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4938 Self {
4939 worktree_id,
4940 path: path.as_ref().into(),
4941 }
4942 }
4943}
4944
4945impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4946 fn from(options: lsp::CreateFileOptions) -> Self {
4947 Self {
4948 overwrite: options.overwrite.unwrap_or(false),
4949 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4950 }
4951 }
4952}
4953
4954impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4955 fn from(options: lsp::RenameFileOptions) -> Self {
4956 Self {
4957 overwrite: options.overwrite.unwrap_or(false),
4958 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4959 }
4960 }
4961}
4962
4963impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4964 fn from(options: lsp::DeleteFileOptions) -> Self {
4965 Self {
4966 recursive: options.recursive.unwrap_or(false),
4967 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4968 }
4969 }
4970}
4971
4972fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4973 proto::Symbol {
4974 source_worktree_id: symbol.source_worktree_id.to_proto(),
4975 worktree_id: symbol.worktree_id.to_proto(),
4976 language_server_name: symbol.language_server_name.0.to_string(),
4977 name: symbol.name.clone(),
4978 kind: unsafe { mem::transmute(symbol.kind) },
4979 path: symbol.path.to_string_lossy().to_string(),
4980 start: Some(proto::Point {
4981 row: symbol.range.start.row,
4982 column: symbol.range.start.column,
4983 }),
4984 end: Some(proto::Point {
4985 row: symbol.range.end.row,
4986 column: symbol.range.end.column,
4987 }),
4988 signature: symbol.signature.to_vec(),
4989 }
4990}
4991
4992fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4993 let mut path_components = path.components();
4994 let mut base_components = base.components();
4995 let mut components: Vec<Component> = Vec::new();
4996 loop {
4997 match (path_components.next(), base_components.next()) {
4998 (None, None) => break,
4999 (Some(a), None) => {
5000 components.push(a);
5001 components.extend(path_components.by_ref());
5002 break;
5003 }
5004 (None, _) => components.push(Component::ParentDir),
5005 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5006 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5007 (Some(a), Some(_)) => {
5008 components.push(Component::ParentDir);
5009 for _ in base_components {
5010 components.push(Component::ParentDir);
5011 }
5012 components.push(a);
5013 components.extend(path_components.by_ref());
5014 break;
5015 }
5016 }
5017 }
5018 components.iter().map(|c| c.as_os_str()).collect()
5019}
5020
5021impl Item for Buffer {
5022 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5023 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5024 }
5025}
5026
5027#[cfg(test)]
5028mod tests {
5029 use crate::worktree::WorktreeHandle;
5030
5031 use super::{Event, *};
5032 use fs::RealFs;
5033 use futures::{future, StreamExt};
5034 use gpui::test::subscribe;
5035 use language::{
5036 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5037 OffsetRangeExt, Point, ToPoint,
5038 };
5039 use lsp::Url;
5040 use serde_json::json;
5041 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5042 use unindent::Unindent as _;
5043 use util::{assert_set_eq, test::temp_tree};
5044
5045 #[gpui::test]
5046 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5047 let dir = temp_tree(json!({
5048 "root": {
5049 "apple": "",
5050 "banana": {
5051 "carrot": {
5052 "date": "",
5053 "endive": "",
5054 }
5055 },
5056 "fennel": {
5057 "grape": "",
5058 }
5059 }
5060 }));
5061
5062 let root_link_path = dir.path().join("root_link");
5063 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5064 unix::fs::symlink(
5065 &dir.path().join("root/fennel"),
5066 &dir.path().join("root/finnochio"),
5067 )
5068 .unwrap();
5069
5070 let project = Project::test(Arc::new(RealFs), [root_link_path], cx).await;
5071
5072 project.read_with(cx, |project, cx| {
5073 let tree = project.worktrees(cx).next().unwrap().read(cx);
5074 assert_eq!(tree.file_count(), 5);
5075 assert_eq!(
5076 tree.inode_for_path("fennel/grape"),
5077 tree.inode_for_path("finnochio/grape")
5078 );
5079 });
5080
5081 let cancel_flag = Default::default();
5082 let results = project
5083 .read_with(cx, |project, cx| {
5084 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5085 })
5086 .await;
5087 assert_eq!(
5088 results
5089 .into_iter()
5090 .map(|result| result.path)
5091 .collect::<Vec<Arc<Path>>>(),
5092 vec![
5093 PathBuf::from("banana/carrot/date").into(),
5094 PathBuf::from("banana/carrot/endive").into(),
5095 ]
5096 );
5097 }
5098
5099 #[gpui::test]
5100 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5101 cx.foreground().forbid_parking();
5102
5103 let mut rust_language = Language::new(
5104 LanguageConfig {
5105 name: "Rust".into(),
5106 path_suffixes: vec!["rs".to_string()],
5107 ..Default::default()
5108 },
5109 Some(tree_sitter_rust::language()),
5110 );
5111 let mut json_language = Language::new(
5112 LanguageConfig {
5113 name: "JSON".into(),
5114 path_suffixes: vec!["json".to_string()],
5115 ..Default::default()
5116 },
5117 None,
5118 );
5119 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5120 name: "the-rust-language-server",
5121 capabilities: lsp::ServerCapabilities {
5122 completion_provider: Some(lsp::CompletionOptions {
5123 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5124 ..Default::default()
5125 }),
5126 ..Default::default()
5127 },
5128 ..Default::default()
5129 });
5130 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5131 name: "the-json-language-server",
5132 capabilities: lsp::ServerCapabilities {
5133 completion_provider: Some(lsp::CompletionOptions {
5134 trigger_characters: Some(vec![":".to_string()]),
5135 ..Default::default()
5136 }),
5137 ..Default::default()
5138 },
5139 ..Default::default()
5140 });
5141
5142 let fs = FakeFs::new(cx.background());
5143 fs.insert_tree(
5144 "/the-root",
5145 json!({
5146 "test.rs": "const A: i32 = 1;",
5147 "test2.rs": "",
5148 "Cargo.toml": "a = 1",
5149 "package.json": "{\"a\": 1}",
5150 }),
5151 )
5152 .await;
5153
5154 let project = Project::test(fs.clone(), ["/the-root"], cx).await;
5155 project.update(cx, |project, _| {
5156 project.languages.add(Arc::new(rust_language));
5157 project.languages.add(Arc::new(json_language));
5158 });
5159
5160 // Open a buffer without an associated language server.
5161 let toml_buffer = project
5162 .update(cx, |project, cx| {
5163 project.open_local_buffer("/the-root/Cargo.toml", cx)
5164 })
5165 .await
5166 .unwrap();
5167
5168 // Open a buffer with an associated language server.
5169 let rust_buffer = project
5170 .update(cx, |project, cx| {
5171 project.open_local_buffer("/the-root/test.rs", cx)
5172 })
5173 .await
5174 .unwrap();
5175
5176 // A server is started up, and it is notified about Rust files.
5177 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5178 assert_eq!(
5179 fake_rust_server
5180 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5181 .await
5182 .text_document,
5183 lsp::TextDocumentItem {
5184 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5185 version: 0,
5186 text: "const A: i32 = 1;".to_string(),
5187 language_id: Default::default()
5188 }
5189 );
5190
5191 // The buffer is configured based on the language server's capabilities.
5192 rust_buffer.read_with(cx, |buffer, _| {
5193 assert_eq!(
5194 buffer.completion_triggers(),
5195 &[".".to_string(), "::".to_string()]
5196 );
5197 });
5198 toml_buffer.read_with(cx, |buffer, _| {
5199 assert!(buffer.completion_triggers().is_empty());
5200 });
5201
5202 // Edit a buffer. The changes are reported to the language server.
5203 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5204 assert_eq!(
5205 fake_rust_server
5206 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5207 .await
5208 .text_document,
5209 lsp::VersionedTextDocumentIdentifier::new(
5210 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5211 1
5212 )
5213 );
5214
5215 // Open a third buffer with a different associated language server.
5216 let json_buffer = project
5217 .update(cx, |project, cx| {
5218 project.open_local_buffer("/the-root/package.json", cx)
5219 })
5220 .await
5221 .unwrap();
5222
5223 // A json language server is started up and is only notified about the json buffer.
5224 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5225 assert_eq!(
5226 fake_json_server
5227 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5228 .await
5229 .text_document,
5230 lsp::TextDocumentItem {
5231 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5232 version: 0,
5233 text: "{\"a\": 1}".to_string(),
5234 language_id: Default::default()
5235 }
5236 );
5237
5238 // This buffer is configured based on the second language server's
5239 // capabilities.
5240 json_buffer.read_with(cx, |buffer, _| {
5241 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5242 });
5243
5244 // When opening another buffer whose language server is already running,
5245 // it is also configured based on the existing language server's capabilities.
5246 let rust_buffer2 = project
5247 .update(cx, |project, cx| {
5248 project.open_local_buffer("/the-root/test2.rs", cx)
5249 })
5250 .await
5251 .unwrap();
5252 rust_buffer2.read_with(cx, |buffer, _| {
5253 assert_eq!(
5254 buffer.completion_triggers(),
5255 &[".".to_string(), "::".to_string()]
5256 );
5257 });
5258
5259 // Changes are reported only to servers matching the buffer's language.
5260 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5261 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5262 assert_eq!(
5263 fake_rust_server
5264 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5265 .await
5266 .text_document,
5267 lsp::VersionedTextDocumentIdentifier::new(
5268 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5269 1
5270 )
5271 );
5272
5273 // Save notifications are reported to all servers.
5274 toml_buffer
5275 .update(cx, |buffer, cx| buffer.save(cx))
5276 .await
5277 .unwrap();
5278 assert_eq!(
5279 fake_rust_server
5280 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5281 .await
5282 .text_document,
5283 lsp::TextDocumentIdentifier::new(
5284 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5285 )
5286 );
5287 assert_eq!(
5288 fake_json_server
5289 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5290 .await
5291 .text_document,
5292 lsp::TextDocumentIdentifier::new(
5293 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5294 )
5295 );
5296
5297 // Renames are reported only to servers matching the buffer's language.
5298 fs.rename(
5299 Path::new("/the-root/test2.rs"),
5300 Path::new("/the-root/test3.rs"),
5301 Default::default(),
5302 )
5303 .await
5304 .unwrap();
5305 assert_eq!(
5306 fake_rust_server
5307 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5308 .await
5309 .text_document,
5310 lsp::TextDocumentIdentifier::new(
5311 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5312 ),
5313 );
5314 assert_eq!(
5315 fake_rust_server
5316 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5317 .await
5318 .text_document,
5319 lsp::TextDocumentItem {
5320 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5321 version: 0,
5322 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5323 language_id: Default::default()
5324 },
5325 );
5326
5327 rust_buffer2.update(cx, |buffer, cx| {
5328 buffer.update_diagnostics(
5329 DiagnosticSet::from_sorted_entries(
5330 vec![DiagnosticEntry {
5331 diagnostic: Default::default(),
5332 range: Anchor::MIN..Anchor::MAX,
5333 }],
5334 &buffer.snapshot(),
5335 ),
5336 cx,
5337 );
5338 assert_eq!(
5339 buffer
5340 .snapshot()
5341 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5342 .count(),
5343 1
5344 );
5345 });
5346
5347 // When the rename changes the extension of the file, the buffer gets closed on the old
5348 // language server and gets opened on the new one.
5349 fs.rename(
5350 Path::new("/the-root/test3.rs"),
5351 Path::new("/the-root/test3.json"),
5352 Default::default(),
5353 )
5354 .await
5355 .unwrap();
5356 assert_eq!(
5357 fake_rust_server
5358 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5359 .await
5360 .text_document,
5361 lsp::TextDocumentIdentifier::new(
5362 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5363 ),
5364 );
5365 assert_eq!(
5366 fake_json_server
5367 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5368 .await
5369 .text_document,
5370 lsp::TextDocumentItem {
5371 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5372 version: 0,
5373 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5374 language_id: Default::default()
5375 },
5376 );
5377
5378 // We clear the diagnostics, since the language has changed.
5379 rust_buffer2.read_with(cx, |buffer, _| {
5380 assert_eq!(
5381 buffer
5382 .snapshot()
5383 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5384 .count(),
5385 0
5386 );
5387 });
5388
5389 // The renamed file's version resets after changing language server.
5390 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5391 assert_eq!(
5392 fake_json_server
5393 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5394 .await
5395 .text_document,
5396 lsp::VersionedTextDocumentIdentifier::new(
5397 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5398 1
5399 )
5400 );
5401
5402 // Restart language servers
5403 project.update(cx, |project, cx| {
5404 project.restart_language_servers_for_buffers(
5405 vec![rust_buffer.clone(), json_buffer.clone()],
5406 cx,
5407 );
5408 });
5409
5410 let mut rust_shutdown_requests = fake_rust_server
5411 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5412 let mut json_shutdown_requests = fake_json_server
5413 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5414 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5415
5416 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5417 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5418
5419 // Ensure rust document is reopened in new rust language server
5420 assert_eq!(
5421 fake_rust_server
5422 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5423 .await
5424 .text_document,
5425 lsp::TextDocumentItem {
5426 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5427 version: 1,
5428 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5429 language_id: Default::default()
5430 }
5431 );
5432
5433 // Ensure json documents are reopened in new json language server
5434 assert_set_eq!(
5435 [
5436 fake_json_server
5437 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5438 .await
5439 .text_document,
5440 fake_json_server
5441 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5442 .await
5443 .text_document,
5444 ],
5445 [
5446 lsp::TextDocumentItem {
5447 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5448 version: 0,
5449 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5450 language_id: Default::default()
5451 },
5452 lsp::TextDocumentItem {
5453 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5454 version: 1,
5455 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5456 language_id: Default::default()
5457 }
5458 ]
5459 );
5460
5461 // Close notifications are reported only to servers matching the buffer's language.
5462 cx.update(|_| drop(json_buffer));
5463 let close_message = lsp::DidCloseTextDocumentParams {
5464 text_document: lsp::TextDocumentIdentifier::new(
5465 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5466 ),
5467 };
5468 assert_eq!(
5469 fake_json_server
5470 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5471 .await,
5472 close_message,
5473 );
5474 }
5475
5476 #[gpui::test]
5477 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5478 cx.foreground().forbid_parking();
5479
5480 let fs = FakeFs::new(cx.background());
5481 fs.insert_tree(
5482 "/dir",
5483 json!({
5484 "a.rs": "let a = 1;",
5485 "b.rs": "let b = 2;"
5486 }),
5487 )
5488 .await;
5489
5490 let project = Project::test(fs, ["/dir/a.rs", "/dir/b.rs"], cx).await;
5491
5492 let buffer_a = project
5493 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5494 .await
5495 .unwrap();
5496 let buffer_b = project
5497 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5498 .await
5499 .unwrap();
5500
5501 project.update(cx, |project, cx| {
5502 project
5503 .update_diagnostics(
5504 lsp::PublishDiagnosticsParams {
5505 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5506 version: None,
5507 diagnostics: vec![lsp::Diagnostic {
5508 range: lsp::Range::new(
5509 lsp::Position::new(0, 4),
5510 lsp::Position::new(0, 5),
5511 ),
5512 severity: Some(lsp::DiagnosticSeverity::ERROR),
5513 message: "error 1".to_string(),
5514 ..Default::default()
5515 }],
5516 },
5517 &[],
5518 cx,
5519 )
5520 .unwrap();
5521 project
5522 .update_diagnostics(
5523 lsp::PublishDiagnosticsParams {
5524 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5525 version: None,
5526 diagnostics: vec![lsp::Diagnostic {
5527 range: lsp::Range::new(
5528 lsp::Position::new(0, 4),
5529 lsp::Position::new(0, 5),
5530 ),
5531 severity: Some(lsp::DiagnosticSeverity::WARNING),
5532 message: "error 2".to_string(),
5533 ..Default::default()
5534 }],
5535 },
5536 &[],
5537 cx,
5538 )
5539 .unwrap();
5540 });
5541
5542 buffer_a.read_with(cx, |buffer, _| {
5543 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5544 assert_eq!(
5545 chunks
5546 .iter()
5547 .map(|(s, d)| (s.as_str(), *d))
5548 .collect::<Vec<_>>(),
5549 &[
5550 ("let ", None),
5551 ("a", Some(DiagnosticSeverity::ERROR)),
5552 (" = 1;", None),
5553 ]
5554 );
5555 });
5556 buffer_b.read_with(cx, |buffer, _| {
5557 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5558 assert_eq!(
5559 chunks
5560 .iter()
5561 .map(|(s, d)| (s.as_str(), *d))
5562 .collect::<Vec<_>>(),
5563 &[
5564 ("let ", None),
5565 ("b", Some(DiagnosticSeverity::WARNING)),
5566 (" = 2;", None),
5567 ]
5568 );
5569 });
5570 }
5571
5572 #[gpui::test]
5573 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5574 cx.foreground().forbid_parking();
5575
5576 let progress_token = "the-progress-token";
5577 let mut language = Language::new(
5578 LanguageConfig {
5579 name: "Rust".into(),
5580 path_suffixes: vec!["rs".to_string()],
5581 ..Default::default()
5582 },
5583 Some(tree_sitter_rust::language()),
5584 );
5585 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5586 disk_based_diagnostics_progress_token: Some(progress_token),
5587 disk_based_diagnostics_sources: &["disk"],
5588 ..Default::default()
5589 });
5590
5591 let fs = FakeFs::new(cx.background());
5592 fs.insert_tree(
5593 "/dir",
5594 json!({
5595 "a.rs": "fn a() { A }",
5596 "b.rs": "const y: i32 = 1",
5597 }),
5598 )
5599 .await;
5600
5601 let project = Project::test(fs, ["/dir"], cx).await;
5602 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5603 let worktree_id =
5604 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5605
5606 // Cause worktree to start the fake language server
5607 let _buffer = project
5608 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5609 .await
5610 .unwrap();
5611
5612 let mut events = subscribe(&project, cx);
5613
5614 let mut fake_server = fake_servers.next().await.unwrap();
5615 fake_server.start_progress(progress_token).await;
5616 assert_eq!(
5617 events.next().await.unwrap(),
5618 Event::DiskBasedDiagnosticsStarted
5619 );
5620
5621 fake_server.start_progress(progress_token).await;
5622 fake_server.end_progress(progress_token).await;
5623 fake_server.start_progress(progress_token).await;
5624
5625 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5626 lsp::PublishDiagnosticsParams {
5627 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5628 version: None,
5629 diagnostics: vec![lsp::Diagnostic {
5630 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5631 severity: Some(lsp::DiagnosticSeverity::ERROR),
5632 message: "undefined variable 'A'".to_string(),
5633 ..Default::default()
5634 }],
5635 },
5636 );
5637 assert_eq!(
5638 events.next().await.unwrap(),
5639 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5640 );
5641
5642 fake_server.end_progress(progress_token).await;
5643 fake_server.end_progress(progress_token).await;
5644 assert_eq!(
5645 events.next().await.unwrap(),
5646 Event::DiskBasedDiagnosticsUpdated
5647 );
5648 assert_eq!(
5649 events.next().await.unwrap(),
5650 Event::DiskBasedDiagnosticsFinished
5651 );
5652
5653 let buffer = project
5654 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5655 .await
5656 .unwrap();
5657
5658 buffer.read_with(cx, |buffer, _| {
5659 let snapshot = buffer.snapshot();
5660 let diagnostics = snapshot
5661 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5662 .collect::<Vec<_>>();
5663 assert_eq!(
5664 diagnostics,
5665 &[DiagnosticEntry {
5666 range: Point::new(0, 9)..Point::new(0, 10),
5667 diagnostic: Diagnostic {
5668 severity: lsp::DiagnosticSeverity::ERROR,
5669 message: "undefined variable 'A'".to_string(),
5670 group_id: 0,
5671 is_primary: true,
5672 ..Default::default()
5673 }
5674 }]
5675 )
5676 });
5677
5678 // Ensure publishing empty diagnostics twice only results in one update event.
5679 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5680 lsp::PublishDiagnosticsParams {
5681 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5682 version: None,
5683 diagnostics: Default::default(),
5684 },
5685 );
5686 assert_eq!(
5687 events.next().await.unwrap(),
5688 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5689 );
5690
5691 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5692 lsp::PublishDiagnosticsParams {
5693 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5694 version: None,
5695 diagnostics: Default::default(),
5696 },
5697 );
5698 cx.foreground().run_until_parked();
5699 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5700 }
5701
5702 #[gpui::test]
5703 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5704 cx.foreground().forbid_parking();
5705
5706 let progress_token = "the-progress-token";
5707 let mut language = Language::new(
5708 LanguageConfig {
5709 path_suffixes: vec!["rs".to_string()],
5710 ..Default::default()
5711 },
5712 None,
5713 );
5714 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5715 disk_based_diagnostics_sources: &["disk"],
5716 disk_based_diagnostics_progress_token: Some(progress_token),
5717 ..Default::default()
5718 });
5719
5720 let fs = FakeFs::new(cx.background());
5721 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5722
5723 let project = Project::test(fs, ["/dir"], cx).await;
5724 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5725
5726 let buffer = project
5727 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5728 .await
5729 .unwrap();
5730
5731 // Simulate diagnostics starting to update.
5732 let mut fake_server = fake_servers.next().await.unwrap();
5733 fake_server.start_progress(progress_token).await;
5734
5735 // Restart the server before the diagnostics finish updating.
5736 project.update(cx, |project, cx| {
5737 project.restart_language_servers_for_buffers([buffer], cx);
5738 });
5739 let mut events = subscribe(&project, cx);
5740
5741 // Simulate the newly started server sending more diagnostics.
5742 let mut fake_server = fake_servers.next().await.unwrap();
5743 fake_server.start_progress(progress_token).await;
5744 assert_eq!(
5745 events.next().await.unwrap(),
5746 Event::DiskBasedDiagnosticsStarted
5747 );
5748
5749 // All diagnostics are considered done, despite the old server's diagnostic
5750 // task never completing.
5751 fake_server.end_progress(progress_token).await;
5752 assert_eq!(
5753 events.next().await.unwrap(),
5754 Event::DiskBasedDiagnosticsUpdated
5755 );
5756 assert_eq!(
5757 events.next().await.unwrap(),
5758 Event::DiskBasedDiagnosticsFinished
5759 );
5760 project.read_with(cx, |project, _| {
5761 assert!(!project.is_running_disk_based_diagnostics());
5762 });
5763 }
5764
5765 #[gpui::test]
5766 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5767 cx.foreground().forbid_parking();
5768
5769 let mut language = Language::new(
5770 LanguageConfig {
5771 name: "Rust".into(),
5772 path_suffixes: vec!["rs".to_string()],
5773 ..Default::default()
5774 },
5775 Some(tree_sitter_rust::language()),
5776 );
5777 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5778 disk_based_diagnostics_sources: &["disk"],
5779 ..Default::default()
5780 });
5781
5782 let text = "
5783 fn a() { A }
5784 fn b() { BB }
5785 fn c() { CCC }
5786 "
5787 .unindent();
5788
5789 let fs = FakeFs::new(cx.background());
5790 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5791
5792 let project = Project::test(fs, ["/dir"], cx).await;
5793 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5794
5795 let buffer = project
5796 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5797 .await
5798 .unwrap();
5799
5800 let mut fake_server = fake_servers.next().await.unwrap();
5801 let open_notification = fake_server
5802 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5803 .await;
5804
5805 // Edit the buffer, moving the content down
5806 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
5807 let change_notification_1 = fake_server
5808 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5809 .await;
5810 assert!(
5811 change_notification_1.text_document.version > open_notification.text_document.version
5812 );
5813
5814 // Report some diagnostics for the initial version of the buffer
5815 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5816 lsp::PublishDiagnosticsParams {
5817 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5818 version: Some(open_notification.text_document.version),
5819 diagnostics: vec![
5820 lsp::Diagnostic {
5821 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5822 severity: Some(DiagnosticSeverity::ERROR),
5823 message: "undefined variable 'A'".to_string(),
5824 source: Some("disk".to_string()),
5825 ..Default::default()
5826 },
5827 lsp::Diagnostic {
5828 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5829 severity: Some(DiagnosticSeverity::ERROR),
5830 message: "undefined variable 'BB'".to_string(),
5831 source: Some("disk".to_string()),
5832 ..Default::default()
5833 },
5834 lsp::Diagnostic {
5835 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5836 severity: Some(DiagnosticSeverity::ERROR),
5837 source: Some("disk".to_string()),
5838 message: "undefined variable 'CCC'".to_string(),
5839 ..Default::default()
5840 },
5841 ],
5842 },
5843 );
5844
5845 // The diagnostics have moved down since they were created.
5846 buffer.next_notification(cx).await;
5847 buffer.read_with(cx, |buffer, _| {
5848 assert_eq!(
5849 buffer
5850 .snapshot()
5851 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5852 .collect::<Vec<_>>(),
5853 &[
5854 DiagnosticEntry {
5855 range: Point::new(3, 9)..Point::new(3, 11),
5856 diagnostic: Diagnostic {
5857 severity: DiagnosticSeverity::ERROR,
5858 message: "undefined variable 'BB'".to_string(),
5859 is_disk_based: true,
5860 group_id: 1,
5861 is_primary: true,
5862 ..Default::default()
5863 },
5864 },
5865 DiagnosticEntry {
5866 range: Point::new(4, 9)..Point::new(4, 12),
5867 diagnostic: Diagnostic {
5868 severity: DiagnosticSeverity::ERROR,
5869 message: "undefined variable 'CCC'".to_string(),
5870 is_disk_based: true,
5871 group_id: 2,
5872 is_primary: true,
5873 ..Default::default()
5874 }
5875 }
5876 ]
5877 );
5878 assert_eq!(
5879 chunks_with_diagnostics(buffer, 0..buffer.len()),
5880 [
5881 ("\n\nfn a() { ".to_string(), None),
5882 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5883 (" }\nfn b() { ".to_string(), None),
5884 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5885 (" }\nfn c() { ".to_string(), None),
5886 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5887 (" }\n".to_string(), None),
5888 ]
5889 );
5890 assert_eq!(
5891 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5892 [
5893 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5894 (" }\nfn c() { ".to_string(), None),
5895 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5896 ]
5897 );
5898 });
5899
5900 // Ensure overlapping diagnostics are highlighted correctly.
5901 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5902 lsp::PublishDiagnosticsParams {
5903 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5904 version: Some(open_notification.text_document.version),
5905 diagnostics: vec![
5906 lsp::Diagnostic {
5907 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5908 severity: Some(DiagnosticSeverity::ERROR),
5909 message: "undefined variable 'A'".to_string(),
5910 source: Some("disk".to_string()),
5911 ..Default::default()
5912 },
5913 lsp::Diagnostic {
5914 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5915 severity: Some(DiagnosticSeverity::WARNING),
5916 message: "unreachable statement".to_string(),
5917 source: Some("disk".to_string()),
5918 ..Default::default()
5919 },
5920 ],
5921 },
5922 );
5923
5924 buffer.next_notification(cx).await;
5925 buffer.read_with(cx, |buffer, _| {
5926 assert_eq!(
5927 buffer
5928 .snapshot()
5929 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5930 .collect::<Vec<_>>(),
5931 &[
5932 DiagnosticEntry {
5933 range: Point::new(2, 9)..Point::new(2, 12),
5934 diagnostic: Diagnostic {
5935 severity: DiagnosticSeverity::WARNING,
5936 message: "unreachable statement".to_string(),
5937 is_disk_based: true,
5938 group_id: 1,
5939 is_primary: true,
5940 ..Default::default()
5941 }
5942 },
5943 DiagnosticEntry {
5944 range: Point::new(2, 9)..Point::new(2, 10),
5945 diagnostic: Diagnostic {
5946 severity: DiagnosticSeverity::ERROR,
5947 message: "undefined variable 'A'".to_string(),
5948 is_disk_based: true,
5949 group_id: 0,
5950 is_primary: true,
5951 ..Default::default()
5952 },
5953 }
5954 ]
5955 );
5956 assert_eq!(
5957 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5958 [
5959 ("fn a() { ".to_string(), None),
5960 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5961 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5962 ("\n".to_string(), None),
5963 ]
5964 );
5965 assert_eq!(
5966 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5967 [
5968 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5969 ("\n".to_string(), None),
5970 ]
5971 );
5972 });
5973
5974 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5975 // changes since the last save.
5976 buffer.update(cx, |buffer, cx| {
5977 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
5978 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
5979 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
5980 });
5981 let change_notification_2 = fake_server
5982 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5983 .await;
5984 assert!(
5985 change_notification_2.text_document.version
5986 > change_notification_1.text_document.version
5987 );
5988
5989 // Handle out-of-order diagnostics
5990 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5991 lsp::PublishDiagnosticsParams {
5992 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5993 version: Some(change_notification_2.text_document.version),
5994 diagnostics: vec![
5995 lsp::Diagnostic {
5996 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5997 severity: Some(DiagnosticSeverity::ERROR),
5998 message: "undefined variable 'BB'".to_string(),
5999 source: Some("disk".to_string()),
6000 ..Default::default()
6001 },
6002 lsp::Diagnostic {
6003 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6004 severity: Some(DiagnosticSeverity::WARNING),
6005 message: "undefined variable 'A'".to_string(),
6006 source: Some("disk".to_string()),
6007 ..Default::default()
6008 },
6009 ],
6010 },
6011 );
6012
6013 buffer.next_notification(cx).await;
6014 buffer.read_with(cx, |buffer, _| {
6015 assert_eq!(
6016 buffer
6017 .snapshot()
6018 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6019 .collect::<Vec<_>>(),
6020 &[
6021 DiagnosticEntry {
6022 range: Point::new(2, 21)..Point::new(2, 22),
6023 diagnostic: Diagnostic {
6024 severity: DiagnosticSeverity::WARNING,
6025 message: "undefined variable 'A'".to_string(),
6026 is_disk_based: true,
6027 group_id: 1,
6028 is_primary: true,
6029 ..Default::default()
6030 }
6031 },
6032 DiagnosticEntry {
6033 range: Point::new(3, 9)..Point::new(3, 14),
6034 diagnostic: Diagnostic {
6035 severity: DiagnosticSeverity::ERROR,
6036 message: "undefined variable 'BB'".to_string(),
6037 is_disk_based: true,
6038 group_id: 0,
6039 is_primary: true,
6040 ..Default::default()
6041 },
6042 }
6043 ]
6044 );
6045 });
6046 }
6047
6048 #[gpui::test]
6049 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6050 cx.foreground().forbid_parking();
6051
6052 let text = concat!(
6053 "let one = ;\n", //
6054 "let two = \n",
6055 "let three = 3;\n",
6056 );
6057
6058 let fs = FakeFs::new(cx.background());
6059 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6060
6061 let project = Project::test(fs, ["/dir"], cx).await;
6062 let buffer = project
6063 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6064 .await
6065 .unwrap();
6066
6067 project.update(cx, |project, cx| {
6068 project
6069 .update_buffer_diagnostics(
6070 &buffer,
6071 vec![
6072 DiagnosticEntry {
6073 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6074 diagnostic: Diagnostic {
6075 severity: DiagnosticSeverity::ERROR,
6076 message: "syntax error 1".to_string(),
6077 ..Default::default()
6078 },
6079 },
6080 DiagnosticEntry {
6081 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6082 diagnostic: Diagnostic {
6083 severity: DiagnosticSeverity::ERROR,
6084 message: "syntax error 2".to_string(),
6085 ..Default::default()
6086 },
6087 },
6088 ],
6089 None,
6090 cx,
6091 )
6092 .unwrap();
6093 });
6094
6095 // An empty range is extended forward to include the following character.
6096 // At the end of a line, an empty range is extended backward to include
6097 // the preceding character.
6098 buffer.read_with(cx, |buffer, _| {
6099 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6100 assert_eq!(
6101 chunks
6102 .iter()
6103 .map(|(s, d)| (s.as_str(), *d))
6104 .collect::<Vec<_>>(),
6105 &[
6106 ("let one = ", None),
6107 (";", Some(DiagnosticSeverity::ERROR)),
6108 ("\nlet two =", None),
6109 (" ", Some(DiagnosticSeverity::ERROR)),
6110 ("\nlet three = 3;\n", None)
6111 ]
6112 );
6113 });
6114 }
6115
6116 #[gpui::test]
6117 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6118 cx.foreground().forbid_parking();
6119
6120 let mut language = Language::new(
6121 LanguageConfig {
6122 name: "Rust".into(),
6123 path_suffixes: vec!["rs".to_string()],
6124 ..Default::default()
6125 },
6126 Some(tree_sitter_rust::language()),
6127 );
6128 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6129
6130 let text = "
6131 fn a() {
6132 f1();
6133 }
6134 fn b() {
6135 f2();
6136 }
6137 fn c() {
6138 f3();
6139 }
6140 "
6141 .unindent();
6142
6143 let fs = FakeFs::new(cx.background());
6144 fs.insert_tree(
6145 "/dir",
6146 json!({
6147 "a.rs": text.clone(),
6148 }),
6149 )
6150 .await;
6151
6152 let project = Project::test(fs, ["/dir"], cx).await;
6153 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6154 let buffer = project
6155 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6156 .await
6157 .unwrap();
6158
6159 let mut fake_server = fake_servers.next().await.unwrap();
6160 let lsp_document_version = fake_server
6161 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6162 .await
6163 .text_document
6164 .version;
6165
6166 // Simulate editing the buffer after the language server computes some edits.
6167 buffer.update(cx, |buffer, cx| {
6168 buffer.edit(
6169 [(
6170 Point::new(0, 0)..Point::new(0, 0),
6171 "// above first function\n",
6172 )],
6173 cx,
6174 );
6175 buffer.edit(
6176 [(
6177 Point::new(2, 0)..Point::new(2, 0),
6178 " // inside first function\n",
6179 )],
6180 cx,
6181 );
6182 buffer.edit(
6183 [(
6184 Point::new(6, 4)..Point::new(6, 4),
6185 "// inside second function ",
6186 )],
6187 cx,
6188 );
6189
6190 assert_eq!(
6191 buffer.text(),
6192 "
6193 // above first function
6194 fn a() {
6195 // inside first function
6196 f1();
6197 }
6198 fn b() {
6199 // inside second function f2();
6200 }
6201 fn c() {
6202 f3();
6203 }
6204 "
6205 .unindent()
6206 );
6207 });
6208
6209 let edits = project
6210 .update(cx, |project, cx| {
6211 project.edits_from_lsp(
6212 &buffer,
6213 vec![
6214 // replace body of first function
6215 lsp::TextEdit {
6216 range: lsp::Range::new(
6217 lsp::Position::new(0, 0),
6218 lsp::Position::new(3, 0),
6219 ),
6220 new_text: "
6221 fn a() {
6222 f10();
6223 }
6224 "
6225 .unindent(),
6226 },
6227 // edit inside second function
6228 lsp::TextEdit {
6229 range: lsp::Range::new(
6230 lsp::Position::new(4, 6),
6231 lsp::Position::new(4, 6),
6232 ),
6233 new_text: "00".into(),
6234 },
6235 // edit inside third function via two distinct edits
6236 lsp::TextEdit {
6237 range: lsp::Range::new(
6238 lsp::Position::new(7, 5),
6239 lsp::Position::new(7, 5),
6240 ),
6241 new_text: "4000".into(),
6242 },
6243 lsp::TextEdit {
6244 range: lsp::Range::new(
6245 lsp::Position::new(7, 5),
6246 lsp::Position::new(7, 6),
6247 ),
6248 new_text: "".into(),
6249 },
6250 ],
6251 Some(lsp_document_version),
6252 cx,
6253 )
6254 })
6255 .await
6256 .unwrap();
6257
6258 buffer.update(cx, |buffer, cx| {
6259 for (range, new_text) in edits {
6260 buffer.edit([(range, new_text)], cx);
6261 }
6262 assert_eq!(
6263 buffer.text(),
6264 "
6265 // above first function
6266 fn a() {
6267 // inside first function
6268 f10();
6269 }
6270 fn b() {
6271 // inside second function f200();
6272 }
6273 fn c() {
6274 f4000();
6275 }
6276 "
6277 .unindent()
6278 );
6279 });
6280 }
6281
6282 #[gpui::test]
6283 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6284 cx.foreground().forbid_parking();
6285
6286 let text = "
6287 use a::b;
6288 use a::c;
6289
6290 fn f() {
6291 b();
6292 c();
6293 }
6294 "
6295 .unindent();
6296
6297 let fs = FakeFs::new(cx.background());
6298 fs.insert_tree(
6299 "/dir",
6300 json!({
6301 "a.rs": text.clone(),
6302 }),
6303 )
6304 .await;
6305
6306 let project = Project::test(fs, ["/dir"], cx).await;
6307 let buffer = project
6308 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6309 .await
6310 .unwrap();
6311
6312 // Simulate the language server sending us a small edit in the form of a very large diff.
6313 // Rust-analyzer does this when performing a merge-imports code action.
6314 let edits = project
6315 .update(cx, |project, cx| {
6316 project.edits_from_lsp(
6317 &buffer,
6318 [
6319 // Replace the first use statement without editing the semicolon.
6320 lsp::TextEdit {
6321 range: lsp::Range::new(
6322 lsp::Position::new(0, 4),
6323 lsp::Position::new(0, 8),
6324 ),
6325 new_text: "a::{b, c}".into(),
6326 },
6327 // Reinsert the remainder of the file between the semicolon and the final
6328 // newline of the file.
6329 lsp::TextEdit {
6330 range: lsp::Range::new(
6331 lsp::Position::new(0, 9),
6332 lsp::Position::new(0, 9),
6333 ),
6334 new_text: "\n\n".into(),
6335 },
6336 lsp::TextEdit {
6337 range: lsp::Range::new(
6338 lsp::Position::new(0, 9),
6339 lsp::Position::new(0, 9),
6340 ),
6341 new_text: "
6342 fn f() {
6343 b();
6344 c();
6345 }"
6346 .unindent(),
6347 },
6348 // Delete everything after the first newline of the file.
6349 lsp::TextEdit {
6350 range: lsp::Range::new(
6351 lsp::Position::new(1, 0),
6352 lsp::Position::new(7, 0),
6353 ),
6354 new_text: "".into(),
6355 },
6356 ],
6357 None,
6358 cx,
6359 )
6360 })
6361 .await
6362 .unwrap();
6363
6364 buffer.update(cx, |buffer, cx| {
6365 let edits = edits
6366 .into_iter()
6367 .map(|(range, text)| {
6368 (
6369 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6370 text,
6371 )
6372 })
6373 .collect::<Vec<_>>();
6374
6375 assert_eq!(
6376 edits,
6377 [
6378 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6379 (Point::new(1, 0)..Point::new(2, 0), "".into())
6380 ]
6381 );
6382
6383 for (range, new_text) in edits {
6384 buffer.edit([(range, new_text)], cx);
6385 }
6386 assert_eq!(
6387 buffer.text(),
6388 "
6389 use a::{b, c};
6390
6391 fn f() {
6392 b();
6393 c();
6394 }
6395 "
6396 .unindent()
6397 );
6398 });
6399 }
6400
6401 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6402 buffer: &Buffer,
6403 range: Range<T>,
6404 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6405 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6406 for chunk in buffer.snapshot().chunks(range, true) {
6407 if chunks.last().map_or(false, |prev_chunk| {
6408 prev_chunk.1 == chunk.diagnostic_severity
6409 }) {
6410 chunks.last_mut().unwrap().0.push_str(chunk.text);
6411 } else {
6412 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6413 }
6414 }
6415 chunks
6416 }
6417
6418 #[gpui::test]
6419 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6420 let dir = temp_tree(json!({
6421 "root": {
6422 "dir1": {},
6423 "dir2": {
6424 "dir3": {}
6425 }
6426 }
6427 }));
6428
6429 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6430 let cancel_flag = Default::default();
6431 let results = project
6432 .read_with(cx, |project, cx| {
6433 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6434 })
6435 .await;
6436
6437 assert!(results.is_empty());
6438 }
6439
6440 #[gpui::test]
6441 async fn test_definition(cx: &mut gpui::TestAppContext) {
6442 let mut language = Language::new(
6443 LanguageConfig {
6444 name: "Rust".into(),
6445 path_suffixes: vec!["rs".to_string()],
6446 ..Default::default()
6447 },
6448 Some(tree_sitter_rust::language()),
6449 );
6450 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6451
6452 let fs = FakeFs::new(cx.background());
6453 fs.insert_tree(
6454 "/dir",
6455 json!({
6456 "a.rs": "const fn a() { A }",
6457 "b.rs": "const y: i32 = crate::a()",
6458 }),
6459 )
6460 .await;
6461
6462 let project = Project::test(fs, ["/dir/b.rs"], cx).await;
6463 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6464
6465 let buffer = project
6466 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6467 .await
6468 .unwrap();
6469
6470 let fake_server = fake_servers.next().await.unwrap();
6471 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6472 let params = params.text_document_position_params;
6473 assert_eq!(
6474 params.text_document.uri.to_file_path().unwrap(),
6475 Path::new("/dir/b.rs"),
6476 );
6477 assert_eq!(params.position, lsp::Position::new(0, 22));
6478
6479 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6480 lsp::Location::new(
6481 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6482 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6483 ),
6484 )))
6485 });
6486
6487 let mut definitions = project
6488 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6489 .await
6490 .unwrap();
6491
6492 assert_eq!(definitions.len(), 1);
6493 let definition = definitions.pop().unwrap();
6494 cx.update(|cx| {
6495 let target_buffer = definition.buffer.read(cx);
6496 assert_eq!(
6497 target_buffer
6498 .file()
6499 .unwrap()
6500 .as_local()
6501 .unwrap()
6502 .abs_path(cx),
6503 Path::new("/dir/a.rs"),
6504 );
6505 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6506 assert_eq!(
6507 list_worktrees(&project, cx),
6508 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6509 );
6510
6511 drop(definition);
6512 });
6513 cx.read(|cx| {
6514 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6515 });
6516
6517 fn list_worktrees<'a>(
6518 project: &'a ModelHandle<Project>,
6519 cx: &'a AppContext,
6520 ) -> Vec<(&'a Path, bool)> {
6521 project
6522 .read(cx)
6523 .worktrees(cx)
6524 .map(|worktree| {
6525 let worktree = worktree.read(cx);
6526 (
6527 worktree.as_local().unwrap().abs_path().as_ref(),
6528 worktree.is_visible(),
6529 )
6530 })
6531 .collect::<Vec<_>>()
6532 }
6533 }
6534
6535 #[gpui::test]
6536 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6537 let mut language = Language::new(
6538 LanguageConfig {
6539 name: "TypeScript".into(),
6540 path_suffixes: vec!["ts".to_string()],
6541 ..Default::default()
6542 },
6543 Some(tree_sitter_typescript::language_typescript()),
6544 );
6545 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6546
6547 let fs = FakeFs::new(cx.background());
6548 fs.insert_tree(
6549 "/dir",
6550 json!({
6551 "a.ts": "",
6552 }),
6553 )
6554 .await;
6555
6556 let project = Project::test(fs, ["/dir"], cx).await;
6557 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6558 let buffer = project
6559 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6560 .await
6561 .unwrap();
6562
6563 let fake_server = fake_language_servers.next().await.unwrap();
6564
6565 let text = "let a = b.fqn";
6566 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6567 let completions = project.update(cx, |project, cx| {
6568 project.completions(&buffer, text.len(), cx)
6569 });
6570
6571 fake_server
6572 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6573 Ok(Some(lsp::CompletionResponse::Array(vec![
6574 lsp::CompletionItem {
6575 label: "fullyQualifiedName?".into(),
6576 insert_text: Some("fullyQualifiedName".into()),
6577 ..Default::default()
6578 },
6579 ])))
6580 })
6581 .next()
6582 .await;
6583 let completions = completions.await.unwrap();
6584 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6585 assert_eq!(completions.len(), 1);
6586 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6587 assert_eq!(
6588 completions[0].old_range.to_offset(&snapshot),
6589 text.len() - 3..text.len()
6590 );
6591 }
6592
6593 #[gpui::test(iterations = 10)]
6594 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6595 let mut language = Language::new(
6596 LanguageConfig {
6597 name: "TypeScript".into(),
6598 path_suffixes: vec!["ts".to_string()],
6599 ..Default::default()
6600 },
6601 None,
6602 );
6603 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6604
6605 let fs = FakeFs::new(cx.background());
6606 fs.insert_tree(
6607 "/dir",
6608 json!({
6609 "a.ts": "a",
6610 }),
6611 )
6612 .await;
6613
6614 let project = Project::test(fs, ["/dir"], cx).await;
6615 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6616 let buffer = project
6617 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6618 .await
6619 .unwrap();
6620
6621 let fake_server = fake_language_servers.next().await.unwrap();
6622
6623 // Language server returns code actions that contain commands, and not edits.
6624 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6625 fake_server
6626 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6627 Ok(Some(vec![
6628 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6629 title: "The code action".into(),
6630 command: Some(lsp::Command {
6631 title: "The command".into(),
6632 command: "_the/command".into(),
6633 arguments: Some(vec![json!("the-argument")]),
6634 }),
6635 ..Default::default()
6636 }),
6637 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6638 title: "two".into(),
6639 ..Default::default()
6640 }),
6641 ]))
6642 })
6643 .next()
6644 .await;
6645
6646 let action = actions.await.unwrap()[0].clone();
6647 let apply = project.update(cx, |project, cx| {
6648 project.apply_code_action(buffer.clone(), action, true, cx)
6649 });
6650
6651 // Resolving the code action does not populate its edits. In absence of
6652 // edits, we must execute the given command.
6653 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6654 |action, _| async move { Ok(action) },
6655 );
6656
6657 // While executing the command, the language server sends the editor
6658 // a `workspaceEdit` request.
6659 fake_server
6660 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6661 let fake = fake_server.clone();
6662 move |params, _| {
6663 assert_eq!(params.command, "_the/command");
6664 let fake = fake.clone();
6665 async move {
6666 fake.server
6667 .request::<lsp::request::ApplyWorkspaceEdit>(
6668 lsp::ApplyWorkspaceEditParams {
6669 label: None,
6670 edit: lsp::WorkspaceEdit {
6671 changes: Some(
6672 [(
6673 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6674 vec![lsp::TextEdit {
6675 range: lsp::Range::new(
6676 lsp::Position::new(0, 0),
6677 lsp::Position::new(0, 0),
6678 ),
6679 new_text: "X".into(),
6680 }],
6681 )]
6682 .into_iter()
6683 .collect(),
6684 ),
6685 ..Default::default()
6686 },
6687 },
6688 )
6689 .await
6690 .unwrap();
6691 Ok(Some(json!(null)))
6692 }
6693 }
6694 })
6695 .next()
6696 .await;
6697
6698 // Applying the code action returns a project transaction containing the edits
6699 // sent by the language server in its `workspaceEdit` request.
6700 let transaction = apply.await.unwrap();
6701 assert!(transaction.0.contains_key(&buffer));
6702 buffer.update(cx, |buffer, cx| {
6703 assert_eq!(buffer.text(), "Xa");
6704 buffer.undo(cx);
6705 assert_eq!(buffer.text(), "a");
6706 });
6707 }
6708
6709 #[gpui::test]
6710 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6711 let fs = FakeFs::new(cx.background());
6712 fs.insert_tree(
6713 "/dir",
6714 json!({
6715 "file1": "the old contents",
6716 }),
6717 )
6718 .await;
6719
6720 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6721 let buffer = project
6722 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6723 .await
6724 .unwrap();
6725 buffer
6726 .update(cx, |buffer, cx| {
6727 assert_eq!(buffer.text(), "the old contents");
6728 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6729 buffer.save(cx)
6730 })
6731 .await
6732 .unwrap();
6733
6734 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6735 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6736 }
6737
6738 #[gpui::test]
6739 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6740 let fs = FakeFs::new(cx.background());
6741 fs.insert_tree(
6742 "/dir",
6743 json!({
6744 "file1": "the old contents",
6745 }),
6746 )
6747 .await;
6748
6749 let project = Project::test(fs.clone(), ["/dir/file1"], cx).await;
6750 let buffer = project
6751 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6752 .await
6753 .unwrap();
6754 buffer
6755 .update(cx, |buffer, cx| {
6756 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6757 buffer.save(cx)
6758 })
6759 .await
6760 .unwrap();
6761
6762 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6763 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6764 }
6765
6766 #[gpui::test]
6767 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6768 let fs = FakeFs::new(cx.background());
6769 fs.insert_tree("/dir", json!({})).await;
6770
6771 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6772 let buffer = project.update(cx, |project, cx| {
6773 project.create_buffer("", None, cx).unwrap()
6774 });
6775 buffer.update(cx, |buffer, cx| {
6776 buffer.edit([(0..0, "abc")], cx);
6777 assert!(buffer.is_dirty());
6778 assert!(!buffer.has_conflict());
6779 });
6780 project
6781 .update(cx, |project, cx| {
6782 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6783 })
6784 .await
6785 .unwrap();
6786 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6787 buffer.read_with(cx, |buffer, cx| {
6788 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6789 assert!(!buffer.is_dirty());
6790 assert!(!buffer.has_conflict());
6791 });
6792
6793 let opened_buffer = project
6794 .update(cx, |project, cx| {
6795 project.open_local_buffer("/dir/file1", cx)
6796 })
6797 .await
6798 .unwrap();
6799 assert_eq!(opened_buffer, buffer);
6800 }
6801
6802 #[gpui::test(retries = 5)]
6803 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6804 let dir = temp_tree(json!({
6805 "a": {
6806 "file1": "",
6807 "file2": "",
6808 "file3": "",
6809 },
6810 "b": {
6811 "c": {
6812 "file4": "",
6813 "file5": "",
6814 }
6815 }
6816 }));
6817
6818 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6819 let rpc = project.read_with(cx, |p, _| p.client.clone());
6820
6821 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6822 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
6823 async move { buffer.await.unwrap() }
6824 };
6825 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6826 project.read_with(cx, |project, cx| {
6827 let tree = project.worktrees(cx).next().unwrap();
6828 tree.read(cx)
6829 .entry_for_path(path)
6830 .expect(&format!("no entry for path {}", path))
6831 .id
6832 })
6833 };
6834
6835 let buffer2 = buffer_for_path("a/file2", cx).await;
6836 let buffer3 = buffer_for_path("a/file3", cx).await;
6837 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6838 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6839
6840 let file2_id = id_for_path("a/file2", &cx);
6841 let file3_id = id_for_path("a/file3", &cx);
6842 let file4_id = id_for_path("b/c/file4", &cx);
6843
6844 // Create a remote copy of this worktree.
6845 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
6846 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6847 let (remote, load_task) = cx.update(|cx| {
6848 Worktree::remote(
6849 1,
6850 1,
6851 initial_snapshot.to_proto(&Default::default(), true),
6852 rpc.clone(),
6853 cx,
6854 )
6855 });
6856 // tree
6857 load_task.await;
6858
6859 cx.read(|cx| {
6860 assert!(!buffer2.read(cx).is_dirty());
6861 assert!(!buffer3.read(cx).is_dirty());
6862 assert!(!buffer4.read(cx).is_dirty());
6863 assert!(!buffer5.read(cx).is_dirty());
6864 });
6865
6866 // Rename and delete files and directories.
6867 tree.flush_fs_events(&cx).await;
6868 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6869 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6870 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6871 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6872 tree.flush_fs_events(&cx).await;
6873
6874 let expected_paths = vec![
6875 "a",
6876 "a/file1",
6877 "a/file2.new",
6878 "b",
6879 "d",
6880 "d/file3",
6881 "d/file4",
6882 ];
6883
6884 cx.read(|app| {
6885 assert_eq!(
6886 tree.read(app)
6887 .paths()
6888 .map(|p| p.to_str().unwrap())
6889 .collect::<Vec<_>>(),
6890 expected_paths
6891 );
6892
6893 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6894 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6895 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6896
6897 assert_eq!(
6898 buffer2.read(app).file().unwrap().path().as_ref(),
6899 Path::new("a/file2.new")
6900 );
6901 assert_eq!(
6902 buffer3.read(app).file().unwrap().path().as_ref(),
6903 Path::new("d/file3")
6904 );
6905 assert_eq!(
6906 buffer4.read(app).file().unwrap().path().as_ref(),
6907 Path::new("d/file4")
6908 );
6909 assert_eq!(
6910 buffer5.read(app).file().unwrap().path().as_ref(),
6911 Path::new("b/c/file5")
6912 );
6913
6914 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6915 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6916 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6917 assert!(buffer5.read(app).file().unwrap().is_deleted());
6918 });
6919
6920 // Update the remote worktree. Check that it becomes consistent with the
6921 // local worktree.
6922 remote.update(cx, |remote, cx| {
6923 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6924 &initial_snapshot,
6925 1,
6926 1,
6927 true,
6928 );
6929 remote
6930 .as_remote_mut()
6931 .unwrap()
6932 .snapshot
6933 .apply_remote_update(update_message)
6934 .unwrap();
6935
6936 assert_eq!(
6937 remote
6938 .paths()
6939 .map(|p| p.to_str().unwrap())
6940 .collect::<Vec<_>>(),
6941 expected_paths
6942 );
6943 });
6944 }
6945
6946 #[gpui::test]
6947 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6948 let fs = FakeFs::new(cx.background());
6949 fs.insert_tree(
6950 "/dir",
6951 json!({
6952 "a.txt": "a-contents",
6953 "b.txt": "b-contents",
6954 }),
6955 )
6956 .await;
6957
6958 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6959
6960 // Spawn multiple tasks to open paths, repeating some paths.
6961 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6962 (
6963 p.open_local_buffer("/dir/a.txt", cx),
6964 p.open_local_buffer("/dir/b.txt", cx),
6965 p.open_local_buffer("/dir/a.txt", cx),
6966 )
6967 });
6968
6969 let buffer_a_1 = buffer_a_1.await.unwrap();
6970 let buffer_a_2 = buffer_a_2.await.unwrap();
6971 let buffer_b = buffer_b.await.unwrap();
6972 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6973 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6974
6975 // There is only one buffer per path.
6976 let buffer_a_id = buffer_a_1.id();
6977 assert_eq!(buffer_a_2.id(), buffer_a_id);
6978
6979 // Open the same path again while it is still open.
6980 drop(buffer_a_1);
6981 let buffer_a_3 = project
6982 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
6983 .await
6984 .unwrap();
6985
6986 // There's still only one buffer per path.
6987 assert_eq!(buffer_a_3.id(), buffer_a_id);
6988 }
6989
6990 #[gpui::test]
6991 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6992 let fs = FakeFs::new(cx.background());
6993 fs.insert_tree(
6994 "/dir",
6995 json!({
6996 "file1": "abc",
6997 "file2": "def",
6998 "file3": "ghi",
6999 }),
7000 )
7001 .await;
7002
7003 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7004
7005 let buffer1 = project
7006 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7007 .await
7008 .unwrap();
7009 let events = Rc::new(RefCell::new(Vec::new()));
7010
7011 // initially, the buffer isn't dirty.
7012 buffer1.update(cx, |buffer, cx| {
7013 cx.subscribe(&buffer1, {
7014 let events = events.clone();
7015 move |_, _, event, _| match event {
7016 BufferEvent::Operation(_) => {}
7017 _ => events.borrow_mut().push(event.clone()),
7018 }
7019 })
7020 .detach();
7021
7022 assert!(!buffer.is_dirty());
7023 assert!(events.borrow().is_empty());
7024
7025 buffer.edit([(1..2, "")], cx);
7026 });
7027
7028 // after the first edit, the buffer is dirty, and emits a dirtied event.
7029 buffer1.update(cx, |buffer, cx| {
7030 assert!(buffer.text() == "ac");
7031 assert!(buffer.is_dirty());
7032 assert_eq!(
7033 *events.borrow(),
7034 &[language::Event::Edited, language::Event::Dirtied]
7035 );
7036 events.borrow_mut().clear();
7037 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7038 });
7039
7040 // after saving, the buffer is not dirty, and emits a saved event.
7041 buffer1.update(cx, |buffer, cx| {
7042 assert!(!buffer.is_dirty());
7043 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7044 events.borrow_mut().clear();
7045
7046 buffer.edit([(1..1, "B")], cx);
7047 buffer.edit([(2..2, "D")], cx);
7048 });
7049
7050 // after editing again, the buffer is dirty, and emits another dirty event.
7051 buffer1.update(cx, |buffer, cx| {
7052 assert!(buffer.text() == "aBDc");
7053 assert!(buffer.is_dirty());
7054 assert_eq!(
7055 *events.borrow(),
7056 &[
7057 language::Event::Edited,
7058 language::Event::Dirtied,
7059 language::Event::Edited,
7060 ],
7061 );
7062 events.borrow_mut().clear();
7063
7064 // TODO - currently, after restoring the buffer to its
7065 // previously-saved state, the is still considered dirty.
7066 buffer.edit([(1..3, "")], cx);
7067 assert!(buffer.text() == "ac");
7068 assert!(buffer.is_dirty());
7069 });
7070
7071 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7072
7073 // When a file is deleted, the buffer is considered dirty.
7074 let events = Rc::new(RefCell::new(Vec::new()));
7075 let buffer2 = project
7076 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7077 .await
7078 .unwrap();
7079 buffer2.update(cx, |_, cx| {
7080 cx.subscribe(&buffer2, {
7081 let events = events.clone();
7082 move |_, _, event, _| events.borrow_mut().push(event.clone())
7083 })
7084 .detach();
7085 });
7086
7087 fs.remove_file("/dir/file2".as_ref(), Default::default())
7088 .await
7089 .unwrap();
7090 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7091 assert_eq!(
7092 *events.borrow(),
7093 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7094 );
7095
7096 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7097 let events = Rc::new(RefCell::new(Vec::new()));
7098 let buffer3 = project
7099 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7100 .await
7101 .unwrap();
7102 buffer3.update(cx, |_, cx| {
7103 cx.subscribe(&buffer3, {
7104 let events = events.clone();
7105 move |_, _, event, _| events.borrow_mut().push(event.clone())
7106 })
7107 .detach();
7108 });
7109
7110 buffer3.update(cx, |buffer, cx| {
7111 buffer.edit([(0..0, "x")], cx);
7112 });
7113 events.borrow_mut().clear();
7114 fs.remove_file("/dir/file3".as_ref(), Default::default())
7115 .await
7116 .unwrap();
7117 buffer3
7118 .condition(&cx, |_, _| !events.borrow().is_empty())
7119 .await;
7120 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7121 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7122 }
7123
7124 #[gpui::test]
7125 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7126 let initial_contents = "aaa\nbbbbb\nc\n";
7127 let fs = FakeFs::new(cx.background());
7128 fs.insert_tree(
7129 "/dir",
7130 json!({
7131 "the-file": initial_contents,
7132 }),
7133 )
7134 .await;
7135 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7136 let buffer = project
7137 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7138 .await
7139 .unwrap();
7140
7141 let anchors = (0..3)
7142 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7143 .collect::<Vec<_>>();
7144
7145 // Change the file on disk, adding two new lines of text, and removing
7146 // one line.
7147 buffer.read_with(cx, |buffer, _| {
7148 assert!(!buffer.is_dirty());
7149 assert!(!buffer.has_conflict());
7150 });
7151 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7152 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7153 .await
7154 .unwrap();
7155
7156 // Because the buffer was not modified, it is reloaded from disk. Its
7157 // contents are edited according to the diff between the old and new
7158 // file contents.
7159 buffer
7160 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7161 .await;
7162
7163 buffer.update(cx, |buffer, _| {
7164 assert_eq!(buffer.text(), new_contents);
7165 assert!(!buffer.is_dirty());
7166 assert!(!buffer.has_conflict());
7167
7168 let anchor_positions = anchors
7169 .iter()
7170 .map(|anchor| anchor.to_point(&*buffer))
7171 .collect::<Vec<_>>();
7172 assert_eq!(
7173 anchor_positions,
7174 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7175 );
7176 });
7177
7178 // Modify the buffer
7179 buffer.update(cx, |buffer, cx| {
7180 buffer.edit([(0..0, " ")], cx);
7181 assert!(buffer.is_dirty());
7182 assert!(!buffer.has_conflict());
7183 });
7184
7185 // Change the file on disk again, adding blank lines to the beginning.
7186 fs.save(
7187 "/dir/the-file".as_ref(),
7188 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7189 )
7190 .await
7191 .unwrap();
7192
7193 // Because the buffer is modified, it doesn't reload from disk, but is
7194 // marked as having a conflict.
7195 buffer
7196 .condition(&cx, |buffer, _| buffer.has_conflict())
7197 .await;
7198 }
7199
7200 #[gpui::test]
7201 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7202 cx.foreground().forbid_parking();
7203
7204 let fs = FakeFs::new(cx.background());
7205 fs.insert_tree(
7206 "/the-dir",
7207 json!({
7208 "a.rs": "
7209 fn foo(mut v: Vec<usize>) {
7210 for x in &v {
7211 v.push(1);
7212 }
7213 }
7214 "
7215 .unindent(),
7216 }),
7217 )
7218 .await;
7219
7220 let project = Project::test(fs.clone(), ["/the-dir"], cx).await;
7221 let buffer = project
7222 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7223 .await
7224 .unwrap();
7225
7226 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7227 let message = lsp::PublishDiagnosticsParams {
7228 uri: buffer_uri.clone(),
7229 diagnostics: vec![
7230 lsp::Diagnostic {
7231 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7232 severity: Some(DiagnosticSeverity::WARNING),
7233 message: "error 1".to_string(),
7234 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7235 location: lsp::Location {
7236 uri: buffer_uri.clone(),
7237 range: lsp::Range::new(
7238 lsp::Position::new(1, 8),
7239 lsp::Position::new(1, 9),
7240 ),
7241 },
7242 message: "error 1 hint 1".to_string(),
7243 }]),
7244 ..Default::default()
7245 },
7246 lsp::Diagnostic {
7247 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7248 severity: Some(DiagnosticSeverity::HINT),
7249 message: "error 1 hint 1".to_string(),
7250 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7251 location: lsp::Location {
7252 uri: buffer_uri.clone(),
7253 range: lsp::Range::new(
7254 lsp::Position::new(1, 8),
7255 lsp::Position::new(1, 9),
7256 ),
7257 },
7258 message: "original diagnostic".to_string(),
7259 }]),
7260 ..Default::default()
7261 },
7262 lsp::Diagnostic {
7263 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7264 severity: Some(DiagnosticSeverity::ERROR),
7265 message: "error 2".to_string(),
7266 related_information: Some(vec![
7267 lsp::DiagnosticRelatedInformation {
7268 location: lsp::Location {
7269 uri: buffer_uri.clone(),
7270 range: lsp::Range::new(
7271 lsp::Position::new(1, 13),
7272 lsp::Position::new(1, 15),
7273 ),
7274 },
7275 message: "error 2 hint 1".to_string(),
7276 },
7277 lsp::DiagnosticRelatedInformation {
7278 location: lsp::Location {
7279 uri: buffer_uri.clone(),
7280 range: lsp::Range::new(
7281 lsp::Position::new(1, 13),
7282 lsp::Position::new(1, 15),
7283 ),
7284 },
7285 message: "error 2 hint 2".to_string(),
7286 },
7287 ]),
7288 ..Default::default()
7289 },
7290 lsp::Diagnostic {
7291 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7292 severity: Some(DiagnosticSeverity::HINT),
7293 message: "error 2 hint 1".to_string(),
7294 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7295 location: lsp::Location {
7296 uri: buffer_uri.clone(),
7297 range: lsp::Range::new(
7298 lsp::Position::new(2, 8),
7299 lsp::Position::new(2, 17),
7300 ),
7301 },
7302 message: "original diagnostic".to_string(),
7303 }]),
7304 ..Default::default()
7305 },
7306 lsp::Diagnostic {
7307 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7308 severity: Some(DiagnosticSeverity::HINT),
7309 message: "error 2 hint 2".to_string(),
7310 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7311 location: lsp::Location {
7312 uri: buffer_uri.clone(),
7313 range: lsp::Range::new(
7314 lsp::Position::new(2, 8),
7315 lsp::Position::new(2, 17),
7316 ),
7317 },
7318 message: "original diagnostic".to_string(),
7319 }]),
7320 ..Default::default()
7321 },
7322 ],
7323 version: None,
7324 };
7325
7326 project
7327 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7328 .unwrap();
7329 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7330
7331 assert_eq!(
7332 buffer
7333 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7334 .collect::<Vec<_>>(),
7335 &[
7336 DiagnosticEntry {
7337 range: Point::new(1, 8)..Point::new(1, 9),
7338 diagnostic: Diagnostic {
7339 severity: DiagnosticSeverity::WARNING,
7340 message: "error 1".to_string(),
7341 group_id: 0,
7342 is_primary: true,
7343 ..Default::default()
7344 }
7345 },
7346 DiagnosticEntry {
7347 range: Point::new(1, 8)..Point::new(1, 9),
7348 diagnostic: Diagnostic {
7349 severity: DiagnosticSeverity::HINT,
7350 message: "error 1 hint 1".to_string(),
7351 group_id: 0,
7352 is_primary: false,
7353 ..Default::default()
7354 }
7355 },
7356 DiagnosticEntry {
7357 range: Point::new(1, 13)..Point::new(1, 15),
7358 diagnostic: Diagnostic {
7359 severity: DiagnosticSeverity::HINT,
7360 message: "error 2 hint 1".to_string(),
7361 group_id: 1,
7362 is_primary: false,
7363 ..Default::default()
7364 }
7365 },
7366 DiagnosticEntry {
7367 range: Point::new(1, 13)..Point::new(1, 15),
7368 diagnostic: Diagnostic {
7369 severity: DiagnosticSeverity::HINT,
7370 message: "error 2 hint 2".to_string(),
7371 group_id: 1,
7372 is_primary: false,
7373 ..Default::default()
7374 }
7375 },
7376 DiagnosticEntry {
7377 range: Point::new(2, 8)..Point::new(2, 17),
7378 diagnostic: Diagnostic {
7379 severity: DiagnosticSeverity::ERROR,
7380 message: "error 2".to_string(),
7381 group_id: 1,
7382 is_primary: true,
7383 ..Default::default()
7384 }
7385 }
7386 ]
7387 );
7388
7389 assert_eq!(
7390 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7391 &[
7392 DiagnosticEntry {
7393 range: Point::new(1, 8)..Point::new(1, 9),
7394 diagnostic: Diagnostic {
7395 severity: DiagnosticSeverity::WARNING,
7396 message: "error 1".to_string(),
7397 group_id: 0,
7398 is_primary: true,
7399 ..Default::default()
7400 }
7401 },
7402 DiagnosticEntry {
7403 range: Point::new(1, 8)..Point::new(1, 9),
7404 diagnostic: Diagnostic {
7405 severity: DiagnosticSeverity::HINT,
7406 message: "error 1 hint 1".to_string(),
7407 group_id: 0,
7408 is_primary: false,
7409 ..Default::default()
7410 }
7411 },
7412 ]
7413 );
7414 assert_eq!(
7415 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7416 &[
7417 DiagnosticEntry {
7418 range: Point::new(1, 13)..Point::new(1, 15),
7419 diagnostic: Diagnostic {
7420 severity: DiagnosticSeverity::HINT,
7421 message: "error 2 hint 1".to_string(),
7422 group_id: 1,
7423 is_primary: false,
7424 ..Default::default()
7425 }
7426 },
7427 DiagnosticEntry {
7428 range: Point::new(1, 13)..Point::new(1, 15),
7429 diagnostic: Diagnostic {
7430 severity: DiagnosticSeverity::HINT,
7431 message: "error 2 hint 2".to_string(),
7432 group_id: 1,
7433 is_primary: false,
7434 ..Default::default()
7435 }
7436 },
7437 DiagnosticEntry {
7438 range: Point::new(2, 8)..Point::new(2, 17),
7439 diagnostic: Diagnostic {
7440 severity: DiagnosticSeverity::ERROR,
7441 message: "error 2".to_string(),
7442 group_id: 1,
7443 is_primary: true,
7444 ..Default::default()
7445 }
7446 }
7447 ]
7448 );
7449 }
7450
7451 #[gpui::test]
7452 async fn test_rename(cx: &mut gpui::TestAppContext) {
7453 cx.foreground().forbid_parking();
7454
7455 let mut language = Language::new(
7456 LanguageConfig {
7457 name: "Rust".into(),
7458 path_suffixes: vec!["rs".to_string()],
7459 ..Default::default()
7460 },
7461 Some(tree_sitter_rust::language()),
7462 );
7463 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7464 capabilities: lsp::ServerCapabilities {
7465 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7466 prepare_provider: Some(true),
7467 work_done_progress_options: Default::default(),
7468 })),
7469 ..Default::default()
7470 },
7471 ..Default::default()
7472 });
7473
7474 let fs = FakeFs::new(cx.background());
7475 fs.insert_tree(
7476 "/dir",
7477 json!({
7478 "one.rs": "const ONE: usize = 1;",
7479 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7480 }),
7481 )
7482 .await;
7483
7484 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7485 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7486 let buffer = project
7487 .update(cx, |project, cx| {
7488 project.open_local_buffer("/dir/one.rs", cx)
7489 })
7490 .await
7491 .unwrap();
7492
7493 let fake_server = fake_servers.next().await.unwrap();
7494
7495 let response = project.update(cx, |project, cx| {
7496 project.prepare_rename(buffer.clone(), 7, cx)
7497 });
7498 fake_server
7499 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7500 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7501 assert_eq!(params.position, lsp::Position::new(0, 7));
7502 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7503 lsp::Position::new(0, 6),
7504 lsp::Position::new(0, 9),
7505 ))))
7506 })
7507 .next()
7508 .await
7509 .unwrap();
7510 let range = response.await.unwrap().unwrap();
7511 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7512 assert_eq!(range, 6..9);
7513
7514 let response = project.update(cx, |project, cx| {
7515 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7516 });
7517 fake_server
7518 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7519 assert_eq!(
7520 params.text_document_position.text_document.uri.as_str(),
7521 "file:///dir/one.rs"
7522 );
7523 assert_eq!(
7524 params.text_document_position.position,
7525 lsp::Position::new(0, 7)
7526 );
7527 assert_eq!(params.new_name, "THREE");
7528 Ok(Some(lsp::WorkspaceEdit {
7529 changes: Some(
7530 [
7531 (
7532 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7533 vec![lsp::TextEdit::new(
7534 lsp::Range::new(
7535 lsp::Position::new(0, 6),
7536 lsp::Position::new(0, 9),
7537 ),
7538 "THREE".to_string(),
7539 )],
7540 ),
7541 (
7542 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7543 vec![
7544 lsp::TextEdit::new(
7545 lsp::Range::new(
7546 lsp::Position::new(0, 24),
7547 lsp::Position::new(0, 27),
7548 ),
7549 "THREE".to_string(),
7550 ),
7551 lsp::TextEdit::new(
7552 lsp::Range::new(
7553 lsp::Position::new(0, 35),
7554 lsp::Position::new(0, 38),
7555 ),
7556 "THREE".to_string(),
7557 ),
7558 ],
7559 ),
7560 ]
7561 .into_iter()
7562 .collect(),
7563 ),
7564 ..Default::default()
7565 }))
7566 })
7567 .next()
7568 .await
7569 .unwrap();
7570 let mut transaction = response.await.unwrap().0;
7571 assert_eq!(transaction.len(), 2);
7572 assert_eq!(
7573 transaction
7574 .remove_entry(&buffer)
7575 .unwrap()
7576 .0
7577 .read_with(cx, |buffer, _| buffer.text()),
7578 "const THREE: usize = 1;"
7579 );
7580 assert_eq!(
7581 transaction
7582 .into_keys()
7583 .next()
7584 .unwrap()
7585 .read_with(cx, |buffer, _| buffer.text()),
7586 "const TWO: usize = one::THREE + one::THREE;"
7587 );
7588 }
7589
7590 #[gpui::test]
7591 async fn test_search(cx: &mut gpui::TestAppContext) {
7592 let fs = FakeFs::new(cx.background());
7593 fs.insert_tree(
7594 "/dir",
7595 json!({
7596 "one.rs": "const ONE: usize = 1;",
7597 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7598 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7599 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7600 }),
7601 )
7602 .await;
7603 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7604 assert_eq!(
7605 search(&project, SearchQuery::text("TWO", false, true), cx)
7606 .await
7607 .unwrap(),
7608 HashMap::from_iter([
7609 ("two.rs".to_string(), vec![6..9]),
7610 ("three.rs".to_string(), vec![37..40])
7611 ])
7612 );
7613
7614 let buffer_4 = project
7615 .update(cx, |project, cx| {
7616 project.open_local_buffer("/dir/four.rs", cx)
7617 })
7618 .await
7619 .unwrap();
7620 buffer_4.update(cx, |buffer, cx| {
7621 let text = "two::TWO";
7622 buffer.edit([(20..28, text), (31..43, text)], cx);
7623 });
7624
7625 assert_eq!(
7626 search(&project, SearchQuery::text("TWO", false, true), cx)
7627 .await
7628 .unwrap(),
7629 HashMap::from_iter([
7630 ("two.rs".to_string(), vec![6..9]),
7631 ("three.rs".to_string(), vec![37..40]),
7632 ("four.rs".to_string(), vec![25..28, 36..39])
7633 ])
7634 );
7635
7636 async fn search(
7637 project: &ModelHandle<Project>,
7638 query: SearchQuery,
7639 cx: &mut gpui::TestAppContext,
7640 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7641 let results = project
7642 .update(cx, |project, cx| project.search(query, cx))
7643 .await?;
7644
7645 Ok(results
7646 .into_iter()
7647 .map(|(buffer, ranges)| {
7648 buffer.read_with(cx, |buffer, _| {
7649 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7650 let ranges = ranges
7651 .into_iter()
7652 .map(|range| range.to_offset(buffer))
7653 .collect::<Vec<_>>();
7654 (path, ranges)
7655 })
7656 })
7657 .collect())
7658 }
7659 }
7660}