1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use util::{post_inc, ResultExt, TryFutureExt as _};
53
54pub use fs::*;
55pub use worktree::*;
56
57pub trait Item: Entity {
58 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
59}
60
61pub struct Project {
62 worktrees: Vec<WorktreeHandle>,
63 active_entry: Option<ProjectEntryId>,
64 languages: Arc<LanguageRegistry>,
65 language_servers:
66 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
67 started_language_servers:
68 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
69 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
70 language_server_settings: Arc<Mutex<serde_json::Value>>,
71 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
72 next_language_server_id: usize,
73 client: Arc<client::Client>,
74 next_entry_id: Arc<AtomicUsize>,
75 user_store: ModelHandle<UserStore>,
76 fs: Arc<dyn Fs>,
77 client_state: ProjectClientState,
78 collaborators: HashMap<PeerId, Collaborator>,
79 subscriptions: Vec<client::Subscription>,
80 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
81 shared_buffers: HashMap<PeerId, HashSet<u64>>,
82 loading_buffers: HashMap<
83 ProjectPath,
84 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
85 >,
86 loading_local_worktrees:
87 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
88 opened_buffers: HashMap<u64, OpenBuffer>,
89 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
90 nonce: u128,
91}
92
93enum OpenBuffer {
94 Strong(ModelHandle<Buffer>),
95 Weak(WeakModelHandle<Buffer>),
96 Loading(Vec<Operation>),
97}
98
99enum WorktreeHandle {
100 Strong(ModelHandle<Worktree>),
101 Weak(WeakModelHandle<Worktree>),
102}
103
104enum ProjectClientState {
105 Local {
106 is_shared: bool,
107 remote_id_tx: watch::Sender<Option<u64>>,
108 remote_id_rx: watch::Receiver<Option<u64>>,
109 _maintain_remote_id_task: Task<Option<()>>,
110 },
111 Remote {
112 sharing_has_stopped: bool,
113 remote_id: u64,
114 replica_id: ReplicaId,
115 _detect_unshare_task: Task<Option<()>>,
116 },
117}
118
119#[derive(Clone, Debug)]
120pub struct Collaborator {
121 pub user: Arc<User>,
122 pub peer_id: PeerId,
123 pub replica_id: ReplicaId,
124}
125
126#[derive(Clone, Debug, PartialEq)]
127pub enum Event {
128 ActiveEntryChanged(Option<ProjectEntryId>),
129 WorktreeRemoved(WorktreeId),
130 DiskBasedDiagnosticsStarted,
131 DiskBasedDiagnosticsUpdated,
132 DiskBasedDiagnosticsFinished,
133 DiagnosticsUpdated(ProjectPath),
134 RemoteIdChanged(Option<u64>),
135 CollaboratorLeft(PeerId),
136}
137
138#[derive(Serialize)]
139pub struct LanguageServerStatus {
140 pub name: String,
141 pub pending_work: BTreeMap<String, LanguageServerProgress>,
142 pub pending_diagnostic_updates: isize,
143}
144
145#[derive(Clone, Debug, Serialize)]
146pub struct LanguageServerProgress {
147 pub message: Option<String>,
148 pub percentage: Option<usize>,
149 #[serde(skip_serializing)]
150 pub last_update_at: Instant,
151}
152
153#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
154pub struct ProjectPath {
155 pub worktree_id: WorktreeId,
156 pub path: Arc<Path>,
157}
158
159#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
160pub struct DiagnosticSummary {
161 pub error_count: usize,
162 pub warning_count: usize,
163}
164
165#[derive(Debug)]
166pub struct Location {
167 pub buffer: ModelHandle<Buffer>,
168 pub range: Range<language::Anchor>,
169}
170
171#[derive(Debug)]
172pub struct DocumentHighlight {
173 pub range: Range<language::Anchor>,
174 pub kind: DocumentHighlightKind,
175}
176
177#[derive(Clone, Debug)]
178pub struct Symbol {
179 pub source_worktree_id: WorktreeId,
180 pub worktree_id: WorktreeId,
181 pub language_server_name: LanguageServerName,
182 pub path: PathBuf,
183 pub label: CodeLabel,
184 pub name: String,
185 pub kind: lsp::SymbolKind,
186 pub range: Range<PointUtf16>,
187 pub signature: [u8; 32],
188}
189
190#[derive(Default)]
191pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
192
193impl DiagnosticSummary {
194 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
195 let mut this = Self {
196 error_count: 0,
197 warning_count: 0,
198 };
199
200 for entry in diagnostics {
201 if entry.diagnostic.is_primary {
202 match entry.diagnostic.severity {
203 DiagnosticSeverity::ERROR => this.error_count += 1,
204 DiagnosticSeverity::WARNING => this.warning_count += 1,
205 _ => {}
206 }
207 }
208 }
209
210 this
211 }
212
213 pub fn is_empty(&self) -> bool {
214 self.error_count == 0 && self.warning_count == 0
215 }
216
217 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
218 proto::DiagnosticSummary {
219 path: path.to_string_lossy().to_string(),
220 error_count: self.error_count as u32,
221 warning_count: self.warning_count as u32,
222 }
223 }
224}
225
226#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
227pub struct ProjectEntryId(usize);
228
229impl ProjectEntryId {
230 pub const MAX: Self = Self(usize::MAX);
231
232 pub fn new(counter: &AtomicUsize) -> Self {
233 Self(counter.fetch_add(1, SeqCst))
234 }
235
236 pub fn from_proto(id: u64) -> Self {
237 Self(id as usize)
238 }
239
240 pub fn to_proto(&self) -> u64 {
241 self.0 as u64
242 }
243
244 pub fn to_usize(&self) -> usize {
245 self.0
246 }
247}
248
249impl Project {
250 pub fn init(client: &Arc<Client>) {
251 client.add_model_message_handler(Self::handle_add_collaborator);
252 client.add_model_message_handler(Self::handle_buffer_reloaded);
253 client.add_model_message_handler(Self::handle_buffer_saved);
254 client.add_model_message_handler(Self::handle_start_language_server);
255 client.add_model_message_handler(Self::handle_update_language_server);
256 client.add_model_message_handler(Self::handle_remove_collaborator);
257 client.add_model_message_handler(Self::handle_register_worktree);
258 client.add_model_message_handler(Self::handle_unregister_worktree);
259 client.add_model_message_handler(Self::handle_unshare_project);
260 client.add_model_message_handler(Self::handle_update_buffer_file);
261 client.add_model_message_handler(Self::handle_update_buffer);
262 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
263 client.add_model_message_handler(Self::handle_update_worktree);
264 client.add_model_request_handler(Self::handle_create_project_entry);
265 client.add_model_request_handler(Self::handle_rename_project_entry);
266 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
267 client.add_model_request_handler(Self::handle_apply_code_action);
268 client.add_model_request_handler(Self::handle_reload_buffers);
269 client.add_model_request_handler(Self::handle_format_buffers);
270 client.add_model_request_handler(Self::handle_get_code_actions);
271 client.add_model_request_handler(Self::handle_get_completions);
272 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
273 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
274 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
275 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
276 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
277 client.add_model_request_handler(Self::handle_search_project);
278 client.add_model_request_handler(Self::handle_get_project_symbols);
279 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
280 client.add_model_request_handler(Self::handle_open_buffer_by_id);
281 client.add_model_request_handler(Self::handle_open_buffer_by_path);
282 client.add_model_request_handler(Self::handle_save_buffer);
283 }
284
285 pub fn local(
286 client: Arc<Client>,
287 user_store: ModelHandle<UserStore>,
288 languages: Arc<LanguageRegistry>,
289 fs: Arc<dyn Fs>,
290 cx: &mut MutableAppContext,
291 ) -> ModelHandle<Self> {
292 cx.add_model(|cx: &mut ModelContext<Self>| {
293 let (remote_id_tx, remote_id_rx) = watch::channel();
294 let _maintain_remote_id_task = cx.spawn_weak({
295 let rpc = client.clone();
296 move |this, mut cx| {
297 async move {
298 let mut status = rpc.status();
299 while let Some(status) = status.next().await {
300 if let Some(this) = this.upgrade(&cx) {
301 if status.is_connected() {
302 this.update(&mut cx, |this, cx| this.register(cx)).await?;
303 } else {
304 this.update(&mut cx, |this, cx| this.unregister(cx));
305 }
306 }
307 }
308 Ok(())
309 }
310 .log_err()
311 }
312 });
313
314 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
315 Self {
316 worktrees: Default::default(),
317 collaborators: Default::default(),
318 opened_buffers: Default::default(),
319 shared_buffers: Default::default(),
320 loading_buffers: Default::default(),
321 loading_local_worktrees: Default::default(),
322 buffer_snapshots: Default::default(),
323 client_state: ProjectClientState::Local {
324 is_shared: false,
325 remote_id_tx,
326 remote_id_rx,
327 _maintain_remote_id_task,
328 },
329 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
330 subscriptions: Vec::new(),
331 active_entry: None,
332 languages,
333 client,
334 user_store,
335 fs,
336 next_entry_id: Default::default(),
337 language_servers: Default::default(),
338 started_language_servers: Default::default(),
339 language_server_statuses: Default::default(),
340 last_workspace_edits_by_language_server: Default::default(),
341 language_server_settings: Default::default(),
342 next_language_server_id: 0,
343 nonce: StdRng::from_entropy().gen(),
344 }
345 })
346 }
347
348 pub async fn remote(
349 remote_id: u64,
350 client: Arc<Client>,
351 user_store: ModelHandle<UserStore>,
352 languages: Arc<LanguageRegistry>,
353 fs: Arc<dyn Fs>,
354 cx: &mut AsyncAppContext,
355 ) -> Result<ModelHandle<Self>> {
356 client.authenticate_and_connect(true, &cx).await?;
357
358 let response = client
359 .request(proto::JoinProject {
360 project_id: remote_id,
361 })
362 .await?;
363
364 let replica_id = response.replica_id as ReplicaId;
365
366 let mut worktrees = Vec::new();
367 for worktree in response.worktrees {
368 let (worktree, load_task) = cx
369 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
370 worktrees.push(worktree);
371 load_task.detach();
372 }
373
374 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
375 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
376 let mut this = Self {
377 worktrees: Vec::new(),
378 loading_buffers: Default::default(),
379 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
380 shared_buffers: Default::default(),
381 loading_local_worktrees: Default::default(),
382 active_entry: None,
383 collaborators: Default::default(),
384 languages,
385 user_store: user_store.clone(),
386 fs,
387 next_entry_id: Default::default(),
388 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
389 client: client.clone(),
390 client_state: ProjectClientState::Remote {
391 sharing_has_stopped: false,
392 remote_id,
393 replica_id,
394 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
395 async move {
396 let mut status = client.status();
397 let is_connected =
398 status.next().await.map_or(false, |s| s.is_connected());
399 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
400 if !is_connected || status.next().await.is_some() {
401 if let Some(this) = this.upgrade(&cx) {
402 this.update(&mut cx, |this, cx| this.project_unshared(cx))
403 }
404 }
405 Ok(())
406 }
407 .log_err()
408 }),
409 },
410 language_servers: Default::default(),
411 started_language_servers: Default::default(),
412 language_server_settings: Default::default(),
413 language_server_statuses: response
414 .language_servers
415 .into_iter()
416 .map(|server| {
417 (
418 server.id as usize,
419 LanguageServerStatus {
420 name: server.name,
421 pending_work: Default::default(),
422 pending_diagnostic_updates: 0,
423 },
424 )
425 })
426 .collect(),
427 last_workspace_edits_by_language_server: Default::default(),
428 next_language_server_id: 0,
429 opened_buffers: Default::default(),
430 buffer_snapshots: Default::default(),
431 nonce: StdRng::from_entropy().gen(),
432 };
433 for worktree in worktrees {
434 this.add_worktree(&worktree, cx);
435 }
436 this
437 });
438
439 let user_ids = response
440 .collaborators
441 .iter()
442 .map(|peer| peer.user_id)
443 .collect();
444 user_store
445 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
446 .await?;
447 let mut collaborators = HashMap::default();
448 for message in response.collaborators {
449 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
450 collaborators.insert(collaborator.peer_id, collaborator);
451 }
452
453 this.update(cx, |this, _| {
454 this.collaborators = collaborators;
455 });
456
457 Ok(this)
458 }
459
460 #[cfg(any(test, feature = "test-support"))]
461 pub async fn test(
462 fs: Arc<dyn Fs>,
463 root_paths: impl IntoIterator<Item = impl AsRef<Path>>,
464 cx: &mut gpui::TestAppContext,
465 ) -> ModelHandle<Project> {
466 let languages = Arc::new(LanguageRegistry::test());
467 let http_client = client::test::FakeHttpClient::with_404_response();
468 let client = client::Client::new(http_client.clone());
469 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
470 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
471 for path in root_paths {
472 let (tree, _) = project
473 .update(cx, |project, cx| {
474 project.find_or_create_local_worktree(path, true, cx)
475 })
476 .await
477 .unwrap();
478 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
479 .await;
480 }
481 project
482 }
483
484 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
485 self.opened_buffers
486 .get(&remote_id)
487 .and_then(|buffer| buffer.upgrade(cx))
488 }
489
490 pub fn languages(&self) -> &Arc<LanguageRegistry> {
491 &self.languages
492 }
493
494 #[cfg(any(test, feature = "test-support"))]
495 pub fn check_invariants(&self, cx: &AppContext) {
496 if self.is_local() {
497 let mut worktree_root_paths = HashMap::default();
498 for worktree in self.worktrees(cx) {
499 let worktree = worktree.read(cx);
500 let abs_path = worktree.as_local().unwrap().abs_path().clone();
501 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
502 assert_eq!(
503 prev_worktree_id,
504 None,
505 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
506 abs_path,
507 worktree.id(),
508 prev_worktree_id
509 )
510 }
511 } else {
512 let replica_id = self.replica_id();
513 for buffer in self.opened_buffers.values() {
514 if let Some(buffer) = buffer.upgrade(cx) {
515 let buffer = buffer.read(cx);
516 assert_eq!(
517 buffer.deferred_ops_len(),
518 0,
519 "replica {}, buffer {} has deferred operations",
520 replica_id,
521 buffer.remote_id()
522 );
523 }
524 }
525 }
526 }
527
528 #[cfg(any(test, feature = "test-support"))]
529 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
530 let path = path.into();
531 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
532 self.opened_buffers.iter().any(|(_, buffer)| {
533 if let Some(buffer) = buffer.upgrade(cx) {
534 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
535 if file.worktree == worktree && file.path() == &path.path {
536 return true;
537 }
538 }
539 }
540 false
541 })
542 } else {
543 false
544 }
545 }
546
547 pub fn fs(&self) -> &Arc<dyn Fs> {
548 &self.fs
549 }
550
551 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
552 self.unshare(cx);
553 for worktree in &self.worktrees {
554 if let Some(worktree) = worktree.upgrade(cx) {
555 worktree.update(cx, |worktree, _| {
556 worktree.as_local_mut().unwrap().unregister();
557 });
558 }
559 }
560
561 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
562 *remote_id_tx.borrow_mut() = None;
563 }
564
565 self.subscriptions.clear();
566 }
567
568 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
569 self.unregister(cx);
570
571 let response = self.client.request(proto::RegisterProject {});
572 cx.spawn(|this, mut cx| async move {
573 let remote_id = response.await?.project_id;
574
575 let mut registrations = Vec::new();
576 this.update(&mut cx, |this, cx| {
577 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
578 *remote_id_tx.borrow_mut() = Some(remote_id);
579 }
580
581 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
582
583 this.subscriptions
584 .push(this.client.add_model_for_remote_entity(remote_id, cx));
585
586 for worktree in &this.worktrees {
587 if let Some(worktree) = worktree.upgrade(cx) {
588 registrations.push(worktree.update(cx, |worktree, cx| {
589 let worktree = worktree.as_local_mut().unwrap();
590 worktree.register(remote_id, cx)
591 }));
592 }
593 }
594 });
595
596 futures::future::try_join_all(registrations).await?;
597 Ok(())
598 })
599 }
600
601 pub fn remote_id(&self) -> Option<u64> {
602 match &self.client_state {
603 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
604 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
605 }
606 }
607
608 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
609 let mut id = None;
610 let mut watch = None;
611 match &self.client_state {
612 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
613 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
614 }
615
616 async move {
617 if let Some(id) = id {
618 return id;
619 }
620 let mut watch = watch.unwrap();
621 loop {
622 let id = *watch.borrow();
623 if let Some(id) = id {
624 return id;
625 }
626 watch.next().await;
627 }
628 }
629 }
630
631 pub fn replica_id(&self) -> ReplicaId {
632 match &self.client_state {
633 ProjectClientState::Local { .. } => 0,
634 ProjectClientState::Remote { replica_id, .. } => *replica_id,
635 }
636 }
637
638 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
639 &self.collaborators
640 }
641
642 pub fn worktrees<'a>(
643 &'a self,
644 cx: &'a AppContext,
645 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
646 self.worktrees
647 .iter()
648 .filter_map(move |worktree| worktree.upgrade(cx))
649 }
650
651 pub fn visible_worktrees<'a>(
652 &'a self,
653 cx: &'a AppContext,
654 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
655 self.worktrees.iter().filter_map(|worktree| {
656 worktree.upgrade(cx).and_then(|worktree| {
657 if worktree.read(cx).is_visible() {
658 Some(worktree)
659 } else {
660 None
661 }
662 })
663 })
664 }
665
666 pub fn worktree_for_id(
667 &self,
668 id: WorktreeId,
669 cx: &AppContext,
670 ) -> Option<ModelHandle<Worktree>> {
671 self.worktrees(cx)
672 .find(|worktree| worktree.read(cx).id() == id)
673 }
674
675 pub fn worktree_for_entry(
676 &self,
677 entry_id: ProjectEntryId,
678 cx: &AppContext,
679 ) -> Option<ModelHandle<Worktree>> {
680 self.worktrees(cx)
681 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
682 }
683
684 pub fn worktree_id_for_entry(
685 &self,
686 entry_id: ProjectEntryId,
687 cx: &AppContext,
688 ) -> Option<WorktreeId> {
689 self.worktree_for_entry(entry_id, cx)
690 .map(|worktree| worktree.read(cx).id())
691 }
692
693 pub fn create_file(
694 &mut self,
695 project_path: impl Into<ProjectPath>,
696 cx: &mut ModelContext<Self>,
697 ) -> Option<Task<Result<Entry>>> {
698 let project_path = project_path.into();
699 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
700
701 if self.is_local() {
702 Some(worktree.update(cx, |worktree, cx| {
703 worktree.as_local_mut().unwrap().write_file(
704 project_path.path,
705 Default::default(),
706 cx,
707 )
708 }))
709 } else {
710 let client = self.client.clone();
711 let project_id = self.remote_id().unwrap();
712
713 Some(cx.spawn_weak(|_, mut cx| async move {
714 let response = client
715 .request(proto::CreateProjectEntry {
716 worktree_id: project_path.worktree_id.to_proto(),
717 project_id,
718 path: project_path.path.as_os_str().as_bytes().to_vec(),
719 is_directory: false,
720 })
721 .await?;
722 let entry = response
723 .entry
724 .ok_or_else(|| anyhow!("missing entry in response"))?;
725 worktree
726 .update(&mut cx, |worktree, cx| {
727 worktree.as_remote().unwrap().insert_entry(entry, cx)
728 })
729 .await
730 }))
731 }
732 }
733
734 pub fn rename_entry(
735 &mut self,
736 entry_id: ProjectEntryId,
737 new_path: impl Into<Arc<Path>>,
738 cx: &mut ModelContext<Self>,
739 ) -> Option<Task<Result<Entry>>> {
740 let worktree = self.worktree_for_entry(entry_id, cx)?;
741 let new_path = new_path.into();
742 if self.is_local() {
743 worktree.update(cx, |worktree, cx| {
744 worktree
745 .as_local_mut()
746 .unwrap()
747 .rename_entry(entry_id, new_path, cx)
748 })
749 } else {
750 let client = self.client.clone();
751 let project_id = self.remote_id().unwrap();
752
753 Some(cx.spawn_weak(|_, mut cx| async move {
754 let response = client
755 .request(proto::RenameProjectEntry {
756 project_id,
757 entry_id: entry_id.to_proto(),
758 new_path: new_path.as_os_str().as_bytes().to_vec(),
759 })
760 .await?;
761 let entry = response
762 .entry
763 .ok_or_else(|| anyhow!("missing entry in response"))?;
764 worktree
765 .update(&mut cx, |worktree, cx| {
766 worktree.as_remote().unwrap().insert_entry(entry, cx)
767 })
768 .await
769 }))
770 }
771 }
772
773 pub fn can_share(&self, cx: &AppContext) -> bool {
774 self.is_local() && self.visible_worktrees(cx).next().is_some()
775 }
776
777 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
778 let rpc = self.client.clone();
779 cx.spawn(|this, mut cx| async move {
780 let project_id = this.update(&mut cx, |this, cx| {
781 if let ProjectClientState::Local {
782 is_shared,
783 remote_id_rx,
784 ..
785 } = &mut this.client_state
786 {
787 *is_shared = true;
788
789 for open_buffer in this.opened_buffers.values_mut() {
790 match open_buffer {
791 OpenBuffer::Strong(_) => {}
792 OpenBuffer::Weak(buffer) => {
793 if let Some(buffer) = buffer.upgrade(cx) {
794 *open_buffer = OpenBuffer::Strong(buffer);
795 }
796 }
797 OpenBuffer::Loading(_) => unreachable!(),
798 }
799 }
800
801 for worktree_handle in this.worktrees.iter_mut() {
802 match worktree_handle {
803 WorktreeHandle::Strong(_) => {}
804 WorktreeHandle::Weak(worktree) => {
805 if let Some(worktree) = worktree.upgrade(cx) {
806 *worktree_handle = WorktreeHandle::Strong(worktree);
807 }
808 }
809 }
810 }
811
812 remote_id_rx
813 .borrow()
814 .ok_or_else(|| anyhow!("no project id"))
815 } else {
816 Err(anyhow!("can't share a remote project"))
817 }
818 })?;
819
820 rpc.request(proto::ShareProject { project_id }).await?;
821
822 let mut tasks = Vec::new();
823 this.update(&mut cx, |this, cx| {
824 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
825 worktree.update(cx, |worktree, cx| {
826 let worktree = worktree.as_local_mut().unwrap();
827 tasks.push(worktree.share(project_id, cx));
828 });
829 }
830 });
831 for task in tasks {
832 task.await?;
833 }
834 this.update(&mut cx, |_, cx| cx.notify());
835 Ok(())
836 })
837 }
838
839 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
840 let rpc = self.client.clone();
841
842 if let ProjectClientState::Local {
843 is_shared,
844 remote_id_rx,
845 ..
846 } = &mut self.client_state
847 {
848 if !*is_shared {
849 return;
850 }
851
852 *is_shared = false;
853 self.collaborators.clear();
854 self.shared_buffers.clear();
855 for worktree_handle in self.worktrees.iter_mut() {
856 if let WorktreeHandle::Strong(worktree) = worktree_handle {
857 let is_visible = worktree.update(cx, |worktree, _| {
858 worktree.as_local_mut().unwrap().unshare();
859 worktree.is_visible()
860 });
861 if !is_visible {
862 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
863 }
864 }
865 }
866
867 for open_buffer in self.opened_buffers.values_mut() {
868 match open_buffer {
869 OpenBuffer::Strong(buffer) => {
870 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
871 }
872 _ => {}
873 }
874 }
875
876 if let Some(project_id) = *remote_id_rx.borrow() {
877 rpc.send(proto::UnshareProject { project_id }).log_err();
878 }
879
880 cx.notify();
881 } else {
882 log::error!("attempted to unshare a remote project");
883 }
884 }
885
886 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
887 if let ProjectClientState::Remote {
888 sharing_has_stopped,
889 ..
890 } = &mut self.client_state
891 {
892 *sharing_has_stopped = true;
893 self.collaborators.clear();
894 cx.notify();
895 }
896 }
897
898 pub fn is_read_only(&self) -> bool {
899 match &self.client_state {
900 ProjectClientState::Local { .. } => false,
901 ProjectClientState::Remote {
902 sharing_has_stopped,
903 ..
904 } => *sharing_has_stopped,
905 }
906 }
907
908 pub fn is_local(&self) -> bool {
909 match &self.client_state {
910 ProjectClientState::Local { .. } => true,
911 ProjectClientState::Remote { .. } => false,
912 }
913 }
914
915 pub fn is_remote(&self) -> bool {
916 !self.is_local()
917 }
918
919 pub fn create_buffer(
920 &mut self,
921 text: &str,
922 language: Option<Arc<Language>>,
923 cx: &mut ModelContext<Self>,
924 ) -> Result<ModelHandle<Buffer>> {
925 if self.is_remote() {
926 return Err(anyhow!("creating buffers as a guest is not supported yet"));
927 }
928
929 let buffer = cx.add_model(|cx| {
930 Buffer::new(self.replica_id(), text, cx)
931 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
932 });
933 self.register_buffer(&buffer, cx)?;
934 Ok(buffer)
935 }
936
937 pub fn open_path(
938 &mut self,
939 path: impl Into<ProjectPath>,
940 cx: &mut ModelContext<Self>,
941 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
942 let task = self.open_buffer(path, cx);
943 cx.spawn_weak(|_, cx| async move {
944 let buffer = task.await?;
945 let project_entry_id = buffer
946 .read_with(&cx, |buffer, cx| {
947 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
948 })
949 .ok_or_else(|| anyhow!("no project entry"))?;
950 Ok((project_entry_id, buffer.into()))
951 })
952 }
953
954 pub fn open_local_buffer(
955 &mut self,
956 abs_path: impl AsRef<Path>,
957 cx: &mut ModelContext<Self>,
958 ) -> Task<Result<ModelHandle<Buffer>>> {
959 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
960 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
961 } else {
962 Task::ready(Err(anyhow!("no such path")))
963 }
964 }
965
966 pub fn open_buffer(
967 &mut self,
968 path: impl Into<ProjectPath>,
969 cx: &mut ModelContext<Self>,
970 ) -> Task<Result<ModelHandle<Buffer>>> {
971 let project_path = path.into();
972 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
973 worktree
974 } else {
975 return Task::ready(Err(anyhow!("no such worktree")));
976 };
977
978 // If there is already a buffer for the given path, then return it.
979 let existing_buffer = self.get_open_buffer(&project_path, cx);
980 if let Some(existing_buffer) = existing_buffer {
981 return Task::ready(Ok(existing_buffer));
982 }
983
984 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
985 // If the given path is already being loaded, then wait for that existing
986 // task to complete and return the same buffer.
987 hash_map::Entry::Occupied(e) => e.get().clone(),
988
989 // Otherwise, record the fact that this path is now being loaded.
990 hash_map::Entry::Vacant(entry) => {
991 let (mut tx, rx) = postage::watch::channel();
992 entry.insert(rx.clone());
993
994 let load_buffer = if worktree.read(cx).is_local() {
995 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
996 } else {
997 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
998 };
999
1000 cx.spawn(move |this, mut cx| async move {
1001 let load_result = load_buffer.await;
1002 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1003 // Record the fact that the buffer is no longer loading.
1004 this.loading_buffers.remove(&project_path);
1005 let buffer = load_result.map_err(Arc::new)?;
1006 Ok(buffer)
1007 }));
1008 })
1009 .detach();
1010 rx
1011 }
1012 };
1013
1014 cx.foreground().spawn(async move {
1015 loop {
1016 if let Some(result) = loading_watch.borrow().as_ref() {
1017 match result {
1018 Ok(buffer) => return Ok(buffer.clone()),
1019 Err(error) => return Err(anyhow!("{}", error)),
1020 }
1021 }
1022 loading_watch.next().await;
1023 }
1024 })
1025 }
1026
1027 fn open_local_buffer_internal(
1028 &mut self,
1029 path: &Arc<Path>,
1030 worktree: &ModelHandle<Worktree>,
1031 cx: &mut ModelContext<Self>,
1032 ) -> Task<Result<ModelHandle<Buffer>>> {
1033 let load_buffer = worktree.update(cx, |worktree, cx| {
1034 let worktree = worktree.as_local_mut().unwrap();
1035 worktree.load_buffer(path, cx)
1036 });
1037 cx.spawn(|this, mut cx| async move {
1038 let buffer = load_buffer.await?;
1039 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1040 Ok(buffer)
1041 })
1042 }
1043
1044 fn open_remote_buffer_internal(
1045 &mut self,
1046 path: &Arc<Path>,
1047 worktree: &ModelHandle<Worktree>,
1048 cx: &mut ModelContext<Self>,
1049 ) -> Task<Result<ModelHandle<Buffer>>> {
1050 let rpc = self.client.clone();
1051 let project_id = self.remote_id().unwrap();
1052 let remote_worktree_id = worktree.read(cx).id();
1053 let path = path.clone();
1054 let path_string = path.to_string_lossy().to_string();
1055 cx.spawn(|this, mut cx| async move {
1056 let response = rpc
1057 .request(proto::OpenBufferByPath {
1058 project_id,
1059 worktree_id: remote_worktree_id.to_proto(),
1060 path: path_string,
1061 })
1062 .await?;
1063 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1064 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1065 .await
1066 })
1067 }
1068
1069 fn open_local_buffer_via_lsp(
1070 &mut self,
1071 abs_path: lsp::Url,
1072 lsp_adapter: Arc<dyn LspAdapter>,
1073 lsp_server: Arc<LanguageServer>,
1074 cx: &mut ModelContext<Self>,
1075 ) -> Task<Result<ModelHandle<Buffer>>> {
1076 cx.spawn(|this, mut cx| async move {
1077 let abs_path = abs_path
1078 .to_file_path()
1079 .map_err(|_| anyhow!("can't convert URI to path"))?;
1080 let (worktree, relative_path) = if let Some(result) =
1081 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1082 {
1083 result
1084 } else {
1085 let worktree = this
1086 .update(&mut cx, |this, cx| {
1087 this.create_local_worktree(&abs_path, false, cx)
1088 })
1089 .await?;
1090 this.update(&mut cx, |this, cx| {
1091 this.language_servers.insert(
1092 (worktree.read(cx).id(), lsp_adapter.name()),
1093 (lsp_adapter, lsp_server),
1094 );
1095 });
1096 (worktree, PathBuf::new())
1097 };
1098
1099 let project_path = ProjectPath {
1100 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1101 path: relative_path.into(),
1102 };
1103 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1104 .await
1105 })
1106 }
1107
1108 pub fn open_buffer_by_id(
1109 &mut self,
1110 id: u64,
1111 cx: &mut ModelContext<Self>,
1112 ) -> Task<Result<ModelHandle<Buffer>>> {
1113 if let Some(buffer) = self.buffer_for_id(id, cx) {
1114 Task::ready(Ok(buffer))
1115 } else if self.is_local() {
1116 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1117 } else if let Some(project_id) = self.remote_id() {
1118 let request = self
1119 .client
1120 .request(proto::OpenBufferById { project_id, id });
1121 cx.spawn(|this, mut cx| async move {
1122 let buffer = request
1123 .await?
1124 .buffer
1125 .ok_or_else(|| anyhow!("invalid buffer"))?;
1126 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1127 .await
1128 })
1129 } else {
1130 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1131 }
1132 }
1133
1134 pub fn save_buffer_as(
1135 &mut self,
1136 buffer: ModelHandle<Buffer>,
1137 abs_path: PathBuf,
1138 cx: &mut ModelContext<Project>,
1139 ) -> Task<Result<()>> {
1140 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1141 let old_path =
1142 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1143 cx.spawn(|this, mut cx| async move {
1144 if let Some(old_path) = old_path {
1145 this.update(&mut cx, |this, cx| {
1146 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1147 });
1148 }
1149 let (worktree, path) = worktree_task.await?;
1150 worktree
1151 .update(&mut cx, |worktree, cx| {
1152 worktree
1153 .as_local_mut()
1154 .unwrap()
1155 .save_buffer_as(buffer.clone(), path, cx)
1156 })
1157 .await?;
1158 this.update(&mut cx, |this, cx| {
1159 this.assign_language_to_buffer(&buffer, cx);
1160 this.register_buffer_with_language_server(&buffer, cx);
1161 });
1162 Ok(())
1163 })
1164 }
1165
1166 pub fn get_open_buffer(
1167 &mut self,
1168 path: &ProjectPath,
1169 cx: &mut ModelContext<Self>,
1170 ) -> Option<ModelHandle<Buffer>> {
1171 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1172 self.opened_buffers.values().find_map(|buffer| {
1173 let buffer = buffer.upgrade(cx)?;
1174 let file = File::from_dyn(buffer.read(cx).file())?;
1175 if file.worktree == worktree && file.path() == &path.path {
1176 Some(buffer)
1177 } else {
1178 None
1179 }
1180 })
1181 }
1182
1183 fn register_buffer(
1184 &mut self,
1185 buffer: &ModelHandle<Buffer>,
1186 cx: &mut ModelContext<Self>,
1187 ) -> Result<()> {
1188 let remote_id = buffer.read(cx).remote_id();
1189 let open_buffer = if self.is_remote() || self.is_shared() {
1190 OpenBuffer::Strong(buffer.clone())
1191 } else {
1192 OpenBuffer::Weak(buffer.downgrade())
1193 };
1194
1195 match self.opened_buffers.insert(remote_id, open_buffer) {
1196 None => {}
1197 Some(OpenBuffer::Loading(operations)) => {
1198 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1199 }
1200 Some(OpenBuffer::Weak(existing_handle)) => {
1201 if existing_handle.upgrade(cx).is_some() {
1202 Err(anyhow!(
1203 "already registered buffer with remote id {}",
1204 remote_id
1205 ))?
1206 }
1207 }
1208 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1209 "already registered buffer with remote id {}",
1210 remote_id
1211 ))?,
1212 }
1213 cx.subscribe(buffer, |this, buffer, event, cx| {
1214 this.on_buffer_event(buffer, event, cx);
1215 })
1216 .detach();
1217
1218 self.assign_language_to_buffer(buffer, cx);
1219 self.register_buffer_with_language_server(buffer, cx);
1220 cx.observe_release(buffer, |this, buffer, cx| {
1221 if let Some(file) = File::from_dyn(buffer.file()) {
1222 if file.is_local() {
1223 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1224 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1225 server
1226 .notify::<lsp::notification::DidCloseTextDocument>(
1227 lsp::DidCloseTextDocumentParams {
1228 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1229 },
1230 )
1231 .log_err();
1232 }
1233 }
1234 }
1235 })
1236 .detach();
1237
1238 Ok(())
1239 }
1240
1241 fn register_buffer_with_language_server(
1242 &mut self,
1243 buffer_handle: &ModelHandle<Buffer>,
1244 cx: &mut ModelContext<Self>,
1245 ) {
1246 let buffer = buffer_handle.read(cx);
1247 let buffer_id = buffer.remote_id();
1248 if let Some(file) = File::from_dyn(buffer.file()) {
1249 if file.is_local() {
1250 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1251 let initial_snapshot = buffer.text_snapshot();
1252
1253 let mut language_server = None;
1254 let mut language_id = None;
1255 if let Some(language) = buffer.language() {
1256 let worktree_id = file.worktree_id(cx);
1257 if let Some(adapter) = language.lsp_adapter() {
1258 language_id = adapter.id_for_language(language.name().as_ref());
1259 language_server = self
1260 .language_servers
1261 .get(&(worktree_id, adapter.name()))
1262 .cloned();
1263 }
1264 }
1265
1266 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1267 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1268 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1269 .log_err();
1270 }
1271 }
1272
1273 if let Some((_, server)) = language_server {
1274 server
1275 .notify::<lsp::notification::DidOpenTextDocument>(
1276 lsp::DidOpenTextDocumentParams {
1277 text_document: lsp::TextDocumentItem::new(
1278 uri,
1279 language_id.unwrap_or_default(),
1280 0,
1281 initial_snapshot.text(),
1282 ),
1283 }
1284 .clone(),
1285 )
1286 .log_err();
1287 buffer_handle.update(cx, |buffer, cx| {
1288 buffer.set_completion_triggers(
1289 server
1290 .capabilities()
1291 .completion_provider
1292 .as_ref()
1293 .and_then(|provider| provider.trigger_characters.clone())
1294 .unwrap_or(Vec::new()),
1295 cx,
1296 )
1297 });
1298 self.buffer_snapshots
1299 .insert(buffer_id, vec![(0, initial_snapshot)]);
1300 }
1301 }
1302 }
1303 }
1304
1305 fn unregister_buffer_from_language_server(
1306 &mut self,
1307 buffer: &ModelHandle<Buffer>,
1308 old_path: PathBuf,
1309 cx: &mut ModelContext<Self>,
1310 ) {
1311 buffer.update(cx, |buffer, cx| {
1312 buffer.update_diagnostics(Default::default(), cx);
1313 self.buffer_snapshots.remove(&buffer.remote_id());
1314 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1315 language_server
1316 .notify::<lsp::notification::DidCloseTextDocument>(
1317 lsp::DidCloseTextDocumentParams {
1318 text_document: lsp::TextDocumentIdentifier::new(
1319 lsp::Url::from_file_path(old_path).unwrap(),
1320 ),
1321 },
1322 )
1323 .log_err();
1324 }
1325 });
1326 }
1327
1328 fn on_buffer_event(
1329 &mut self,
1330 buffer: ModelHandle<Buffer>,
1331 event: &BufferEvent,
1332 cx: &mut ModelContext<Self>,
1333 ) -> Option<()> {
1334 match event {
1335 BufferEvent::Operation(operation) => {
1336 let project_id = self.remote_id()?;
1337 let request = self.client.request(proto::UpdateBuffer {
1338 project_id,
1339 buffer_id: buffer.read(cx).remote_id(),
1340 operations: vec![language::proto::serialize_operation(&operation)],
1341 });
1342 cx.background().spawn(request).detach_and_log_err(cx);
1343 }
1344 BufferEvent::Edited { .. } => {
1345 let (_, language_server) = self
1346 .language_server_for_buffer(buffer.read(cx), cx)?
1347 .clone();
1348 let buffer = buffer.read(cx);
1349 let file = File::from_dyn(buffer.file())?;
1350 let abs_path = file.as_local()?.abs_path(cx);
1351 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1352 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1353 let (version, prev_snapshot) = buffer_snapshots.last()?;
1354 let next_snapshot = buffer.text_snapshot();
1355 let next_version = version + 1;
1356
1357 let content_changes = buffer
1358 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1359 .map(|edit| {
1360 let edit_start = edit.new.start.0;
1361 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1362 let new_text = next_snapshot
1363 .text_for_range(edit.new.start.1..edit.new.end.1)
1364 .collect();
1365 lsp::TextDocumentContentChangeEvent {
1366 range: Some(lsp::Range::new(
1367 point_to_lsp(edit_start),
1368 point_to_lsp(edit_end),
1369 )),
1370 range_length: None,
1371 text: new_text,
1372 }
1373 })
1374 .collect();
1375
1376 buffer_snapshots.push((next_version, next_snapshot));
1377
1378 language_server
1379 .notify::<lsp::notification::DidChangeTextDocument>(
1380 lsp::DidChangeTextDocumentParams {
1381 text_document: lsp::VersionedTextDocumentIdentifier::new(
1382 uri,
1383 next_version,
1384 ),
1385 content_changes,
1386 },
1387 )
1388 .log_err();
1389 }
1390 BufferEvent::Saved => {
1391 let file = File::from_dyn(buffer.read(cx).file())?;
1392 let worktree_id = file.worktree_id(cx);
1393 let abs_path = file.as_local()?.abs_path(cx);
1394 let text_document = lsp::TextDocumentIdentifier {
1395 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1396 };
1397
1398 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1399 server
1400 .notify::<lsp::notification::DidSaveTextDocument>(
1401 lsp::DidSaveTextDocumentParams {
1402 text_document: text_document.clone(),
1403 text: None,
1404 },
1405 )
1406 .log_err();
1407 }
1408 }
1409 _ => {}
1410 }
1411
1412 None
1413 }
1414
1415 fn language_servers_for_worktree(
1416 &self,
1417 worktree_id: WorktreeId,
1418 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1419 self.language_servers.iter().filter_map(
1420 move |((language_server_worktree_id, _), server)| {
1421 if *language_server_worktree_id == worktree_id {
1422 Some(server)
1423 } else {
1424 None
1425 }
1426 },
1427 )
1428 }
1429
1430 fn assign_language_to_buffer(
1431 &mut self,
1432 buffer: &ModelHandle<Buffer>,
1433 cx: &mut ModelContext<Self>,
1434 ) -> Option<()> {
1435 // If the buffer has a language, set it and start the language server if we haven't already.
1436 let full_path = buffer.read(cx).file()?.full_path(cx);
1437 let language = self.languages.select_language(&full_path)?;
1438 buffer.update(cx, |buffer, cx| {
1439 buffer.set_language(Some(language.clone()), cx);
1440 });
1441
1442 let file = File::from_dyn(buffer.read(cx).file())?;
1443 let worktree = file.worktree.read(cx).as_local()?;
1444 let worktree_id = worktree.id();
1445 let worktree_abs_path = worktree.abs_path().clone();
1446 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1447
1448 None
1449 }
1450
1451 fn start_language_server(
1452 &mut self,
1453 worktree_id: WorktreeId,
1454 worktree_path: Arc<Path>,
1455 language: Arc<Language>,
1456 cx: &mut ModelContext<Self>,
1457 ) {
1458 let adapter = if let Some(adapter) = language.lsp_adapter() {
1459 adapter
1460 } else {
1461 return;
1462 };
1463 let key = (worktree_id, adapter.name());
1464 self.started_language_servers
1465 .entry(key.clone())
1466 .or_insert_with(|| {
1467 let server_id = post_inc(&mut self.next_language_server_id);
1468 let language_server = self.languages.start_language_server(
1469 server_id,
1470 language.clone(),
1471 worktree_path,
1472 self.client.http_client(),
1473 cx,
1474 );
1475 cx.spawn_weak(|this, mut cx| async move {
1476 let language_server = language_server?.await.log_err()?;
1477 let language_server = language_server
1478 .initialize(adapter.initialization_options())
1479 .await
1480 .log_err()?;
1481 let this = this.upgrade(&cx)?;
1482 let disk_based_diagnostics_progress_token =
1483 adapter.disk_based_diagnostics_progress_token();
1484
1485 language_server
1486 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1487 let this = this.downgrade();
1488 let adapter = adapter.clone();
1489 move |params, mut cx| {
1490 if let Some(this) = this.upgrade(&cx) {
1491 this.update(&mut cx, |this, cx| {
1492 this.on_lsp_diagnostics_published(
1493 server_id,
1494 params,
1495 &adapter,
1496 disk_based_diagnostics_progress_token,
1497 cx,
1498 );
1499 });
1500 }
1501 }
1502 })
1503 .detach();
1504
1505 language_server
1506 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1507 let settings = this
1508 .read_with(&cx, |this, _| this.language_server_settings.clone());
1509 move |params, _| {
1510 let settings = settings.lock().clone();
1511 async move {
1512 Ok(params
1513 .items
1514 .into_iter()
1515 .map(|item| {
1516 if let Some(section) = &item.section {
1517 settings
1518 .get(section)
1519 .cloned()
1520 .unwrap_or(serde_json::Value::Null)
1521 } else {
1522 settings.clone()
1523 }
1524 })
1525 .collect())
1526 }
1527 }
1528 })
1529 .detach();
1530
1531 language_server
1532 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1533 let this = this.downgrade();
1534 let adapter = adapter.clone();
1535 let language_server = language_server.clone();
1536 move |params, cx| {
1537 Self::on_lsp_workspace_edit(
1538 this,
1539 params,
1540 server_id,
1541 adapter.clone(),
1542 language_server.clone(),
1543 cx,
1544 )
1545 }
1546 })
1547 .detach();
1548
1549 language_server
1550 .on_notification::<lsp::notification::Progress, _>({
1551 let this = this.downgrade();
1552 move |params, mut cx| {
1553 if let Some(this) = this.upgrade(&cx) {
1554 this.update(&mut cx, |this, cx| {
1555 this.on_lsp_progress(
1556 params,
1557 server_id,
1558 disk_based_diagnostics_progress_token,
1559 cx,
1560 );
1561 });
1562 }
1563 }
1564 })
1565 .detach();
1566
1567 this.update(&mut cx, |this, cx| {
1568 this.language_servers
1569 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1570 this.language_server_statuses.insert(
1571 server_id,
1572 LanguageServerStatus {
1573 name: language_server.name().to_string(),
1574 pending_work: Default::default(),
1575 pending_diagnostic_updates: 0,
1576 },
1577 );
1578 language_server
1579 .notify::<lsp::notification::DidChangeConfiguration>(
1580 lsp::DidChangeConfigurationParams {
1581 settings: this.language_server_settings.lock().clone(),
1582 },
1583 )
1584 .ok();
1585
1586 if let Some(project_id) = this.remote_id() {
1587 this.client
1588 .send(proto::StartLanguageServer {
1589 project_id,
1590 server: Some(proto::LanguageServer {
1591 id: server_id as u64,
1592 name: language_server.name().to_string(),
1593 }),
1594 })
1595 .log_err();
1596 }
1597
1598 // Tell the language server about every open buffer in the worktree that matches the language.
1599 for buffer in this.opened_buffers.values() {
1600 if let Some(buffer_handle) = buffer.upgrade(cx) {
1601 let buffer = buffer_handle.read(cx);
1602 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1603 file
1604 } else {
1605 continue;
1606 };
1607 let language = if let Some(language) = buffer.language() {
1608 language
1609 } else {
1610 continue;
1611 };
1612 if file.worktree.read(cx).id() != key.0
1613 || language.lsp_adapter().map(|a| a.name())
1614 != Some(key.1.clone())
1615 {
1616 continue;
1617 }
1618
1619 let file = file.as_local()?;
1620 let versions = this
1621 .buffer_snapshots
1622 .entry(buffer.remote_id())
1623 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1624 let (version, initial_snapshot) = versions.last().unwrap();
1625 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1626 let language_id = adapter.id_for_language(language.name().as_ref());
1627 language_server
1628 .notify::<lsp::notification::DidOpenTextDocument>(
1629 lsp::DidOpenTextDocumentParams {
1630 text_document: lsp::TextDocumentItem::new(
1631 uri,
1632 language_id.unwrap_or_default(),
1633 *version,
1634 initial_snapshot.text(),
1635 ),
1636 },
1637 )
1638 .log_err()?;
1639 buffer_handle.update(cx, |buffer, cx| {
1640 buffer.set_completion_triggers(
1641 language_server
1642 .capabilities()
1643 .completion_provider
1644 .as_ref()
1645 .and_then(|provider| {
1646 provider.trigger_characters.clone()
1647 })
1648 .unwrap_or(Vec::new()),
1649 cx,
1650 )
1651 });
1652 }
1653 }
1654
1655 cx.notify();
1656 Some(())
1657 });
1658
1659 Some(language_server)
1660 })
1661 });
1662 }
1663
1664 pub fn restart_language_servers_for_buffers(
1665 &mut self,
1666 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1667 cx: &mut ModelContext<Self>,
1668 ) -> Option<()> {
1669 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1670 .into_iter()
1671 .filter_map(|buffer| {
1672 let file = File::from_dyn(buffer.read(cx).file())?;
1673 let worktree = file.worktree.read(cx).as_local()?;
1674 let worktree_id = worktree.id();
1675 let worktree_abs_path = worktree.abs_path().clone();
1676 let full_path = file.full_path(cx);
1677 Some((worktree_id, worktree_abs_path, full_path))
1678 })
1679 .collect();
1680 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1681 let language = self.languages.select_language(&full_path)?;
1682 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1683 }
1684
1685 None
1686 }
1687
1688 fn restart_language_server(
1689 &mut self,
1690 worktree_id: WorktreeId,
1691 worktree_path: Arc<Path>,
1692 language: Arc<Language>,
1693 cx: &mut ModelContext<Self>,
1694 ) {
1695 let adapter = if let Some(adapter) = language.lsp_adapter() {
1696 adapter
1697 } else {
1698 return;
1699 };
1700 let key = (worktree_id, adapter.name());
1701 let server_to_shutdown = self.language_servers.remove(&key);
1702 self.started_language_servers.remove(&key);
1703 server_to_shutdown
1704 .as_ref()
1705 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1706 cx.spawn_weak(|this, mut cx| async move {
1707 if let Some(this) = this.upgrade(&cx) {
1708 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1709 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1710 shutdown_task.await;
1711 }
1712 }
1713
1714 this.update(&mut cx, |this, cx| {
1715 this.start_language_server(worktree_id, worktree_path, language, cx);
1716 });
1717 }
1718 })
1719 .detach();
1720 }
1721
1722 fn on_lsp_diagnostics_published(
1723 &mut self,
1724 server_id: usize,
1725 mut params: lsp::PublishDiagnosticsParams,
1726 adapter: &Arc<dyn LspAdapter>,
1727 disk_based_diagnostics_progress_token: Option<&str>,
1728 cx: &mut ModelContext<Self>,
1729 ) {
1730 adapter.process_diagnostics(&mut params);
1731 if disk_based_diagnostics_progress_token.is_none() {
1732 self.disk_based_diagnostics_started(cx);
1733 self.broadcast_language_server_update(
1734 server_id,
1735 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1736 proto::LspDiskBasedDiagnosticsUpdating {},
1737 ),
1738 );
1739 }
1740 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1741 .log_err();
1742 if disk_based_diagnostics_progress_token.is_none() {
1743 self.disk_based_diagnostics_finished(cx);
1744 self.broadcast_language_server_update(
1745 server_id,
1746 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1747 proto::LspDiskBasedDiagnosticsUpdated {},
1748 ),
1749 );
1750 }
1751 }
1752
1753 fn on_lsp_progress(
1754 &mut self,
1755 progress: lsp::ProgressParams,
1756 server_id: usize,
1757 disk_based_diagnostics_progress_token: Option<&str>,
1758 cx: &mut ModelContext<Self>,
1759 ) {
1760 let token = match progress.token {
1761 lsp::NumberOrString::String(token) => token,
1762 lsp::NumberOrString::Number(token) => {
1763 log::info!("skipping numeric progress token {}", token);
1764 return;
1765 }
1766 };
1767 let progress = match progress.value {
1768 lsp::ProgressParamsValue::WorkDone(value) => value,
1769 };
1770 let language_server_status =
1771 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1772 status
1773 } else {
1774 return;
1775 };
1776 match progress {
1777 lsp::WorkDoneProgress::Begin(_) => {
1778 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1779 language_server_status.pending_diagnostic_updates += 1;
1780 if language_server_status.pending_diagnostic_updates == 1 {
1781 self.disk_based_diagnostics_started(cx);
1782 self.broadcast_language_server_update(
1783 server_id,
1784 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1785 proto::LspDiskBasedDiagnosticsUpdating {},
1786 ),
1787 );
1788 }
1789 } else {
1790 self.on_lsp_work_start(server_id, token.clone(), cx);
1791 self.broadcast_language_server_update(
1792 server_id,
1793 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1794 token,
1795 }),
1796 );
1797 }
1798 }
1799 lsp::WorkDoneProgress::Report(report) => {
1800 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1801 self.on_lsp_work_progress(
1802 server_id,
1803 token.clone(),
1804 LanguageServerProgress {
1805 message: report.message.clone(),
1806 percentage: report.percentage.map(|p| p as usize),
1807 last_update_at: Instant::now(),
1808 },
1809 cx,
1810 );
1811 self.broadcast_language_server_update(
1812 server_id,
1813 proto::update_language_server::Variant::WorkProgress(
1814 proto::LspWorkProgress {
1815 token,
1816 message: report.message,
1817 percentage: report.percentage.map(|p| p as u32),
1818 },
1819 ),
1820 );
1821 }
1822 }
1823 lsp::WorkDoneProgress::End(_) => {
1824 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1825 language_server_status.pending_diagnostic_updates -= 1;
1826 if language_server_status.pending_diagnostic_updates == 0 {
1827 self.disk_based_diagnostics_finished(cx);
1828 self.broadcast_language_server_update(
1829 server_id,
1830 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1831 proto::LspDiskBasedDiagnosticsUpdated {},
1832 ),
1833 );
1834 }
1835 } else {
1836 self.on_lsp_work_end(server_id, token.clone(), cx);
1837 self.broadcast_language_server_update(
1838 server_id,
1839 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1840 token,
1841 }),
1842 );
1843 }
1844 }
1845 }
1846 }
1847
1848 fn on_lsp_work_start(
1849 &mut self,
1850 language_server_id: usize,
1851 token: String,
1852 cx: &mut ModelContext<Self>,
1853 ) {
1854 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1855 status.pending_work.insert(
1856 token,
1857 LanguageServerProgress {
1858 message: None,
1859 percentage: None,
1860 last_update_at: Instant::now(),
1861 },
1862 );
1863 cx.notify();
1864 }
1865 }
1866
1867 fn on_lsp_work_progress(
1868 &mut self,
1869 language_server_id: usize,
1870 token: String,
1871 progress: LanguageServerProgress,
1872 cx: &mut ModelContext<Self>,
1873 ) {
1874 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1875 status.pending_work.insert(token, progress);
1876 cx.notify();
1877 }
1878 }
1879
1880 fn on_lsp_work_end(
1881 &mut self,
1882 language_server_id: usize,
1883 token: String,
1884 cx: &mut ModelContext<Self>,
1885 ) {
1886 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1887 status.pending_work.remove(&token);
1888 cx.notify();
1889 }
1890 }
1891
1892 async fn on_lsp_workspace_edit(
1893 this: WeakModelHandle<Self>,
1894 params: lsp::ApplyWorkspaceEditParams,
1895 server_id: usize,
1896 adapter: Arc<dyn LspAdapter>,
1897 language_server: Arc<LanguageServer>,
1898 mut cx: AsyncAppContext,
1899 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1900 let this = this
1901 .upgrade(&cx)
1902 .ok_or_else(|| anyhow!("project project closed"))?;
1903 let transaction = Self::deserialize_workspace_edit(
1904 this.clone(),
1905 params.edit,
1906 true,
1907 adapter.clone(),
1908 language_server.clone(),
1909 &mut cx,
1910 )
1911 .await
1912 .log_err();
1913 this.update(&mut cx, |this, _| {
1914 if let Some(transaction) = transaction {
1915 this.last_workspace_edits_by_language_server
1916 .insert(server_id, transaction);
1917 }
1918 });
1919 Ok(lsp::ApplyWorkspaceEditResponse {
1920 applied: true,
1921 failed_change: None,
1922 failure_reason: None,
1923 })
1924 }
1925
1926 fn broadcast_language_server_update(
1927 &self,
1928 language_server_id: usize,
1929 event: proto::update_language_server::Variant,
1930 ) {
1931 if let Some(project_id) = self.remote_id() {
1932 self.client
1933 .send(proto::UpdateLanguageServer {
1934 project_id,
1935 language_server_id: language_server_id as u64,
1936 variant: Some(event),
1937 })
1938 .log_err();
1939 }
1940 }
1941
1942 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1943 for (_, server) in self.language_servers.values() {
1944 server
1945 .notify::<lsp::notification::DidChangeConfiguration>(
1946 lsp::DidChangeConfigurationParams {
1947 settings: settings.clone(),
1948 },
1949 )
1950 .ok();
1951 }
1952 *self.language_server_settings.lock() = settings;
1953 }
1954
1955 pub fn language_server_statuses(
1956 &self,
1957 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1958 self.language_server_statuses.values()
1959 }
1960
1961 pub fn update_diagnostics(
1962 &mut self,
1963 params: lsp::PublishDiagnosticsParams,
1964 disk_based_sources: &[&str],
1965 cx: &mut ModelContext<Self>,
1966 ) -> Result<()> {
1967 let abs_path = params
1968 .uri
1969 .to_file_path()
1970 .map_err(|_| anyhow!("URI is not a file"))?;
1971 let mut next_group_id = 0;
1972 let mut diagnostics = Vec::default();
1973 let mut primary_diagnostic_group_ids = HashMap::default();
1974 let mut sources_by_group_id = HashMap::default();
1975 let mut supporting_diagnostics = HashMap::default();
1976 for diagnostic in ¶ms.diagnostics {
1977 let source = diagnostic.source.as_ref();
1978 let code = diagnostic.code.as_ref().map(|code| match code {
1979 lsp::NumberOrString::Number(code) => code.to_string(),
1980 lsp::NumberOrString::String(code) => code.clone(),
1981 });
1982 let range = range_from_lsp(diagnostic.range);
1983 let is_supporting = diagnostic
1984 .related_information
1985 .as_ref()
1986 .map_or(false, |infos| {
1987 infos.iter().any(|info| {
1988 primary_diagnostic_group_ids.contains_key(&(
1989 source,
1990 code.clone(),
1991 range_from_lsp(info.location.range),
1992 ))
1993 })
1994 });
1995
1996 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1997 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1998 });
1999
2000 if is_supporting {
2001 supporting_diagnostics.insert(
2002 (source, code.clone(), range),
2003 (diagnostic.severity, is_unnecessary),
2004 );
2005 } else {
2006 let group_id = post_inc(&mut next_group_id);
2007 let is_disk_based = source.map_or(false, |source| {
2008 disk_based_sources.contains(&source.as_str())
2009 });
2010
2011 sources_by_group_id.insert(group_id, source);
2012 primary_diagnostic_group_ids
2013 .insert((source, code.clone(), range.clone()), group_id);
2014
2015 diagnostics.push(DiagnosticEntry {
2016 range,
2017 diagnostic: Diagnostic {
2018 code: code.clone(),
2019 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2020 message: diagnostic.message.clone(),
2021 group_id,
2022 is_primary: true,
2023 is_valid: true,
2024 is_disk_based,
2025 is_unnecessary,
2026 },
2027 });
2028 if let Some(infos) = &diagnostic.related_information {
2029 for info in infos {
2030 if info.location.uri == params.uri && !info.message.is_empty() {
2031 let range = range_from_lsp(info.location.range);
2032 diagnostics.push(DiagnosticEntry {
2033 range,
2034 diagnostic: Diagnostic {
2035 code: code.clone(),
2036 severity: DiagnosticSeverity::INFORMATION,
2037 message: info.message.clone(),
2038 group_id,
2039 is_primary: false,
2040 is_valid: true,
2041 is_disk_based,
2042 is_unnecessary: false,
2043 },
2044 });
2045 }
2046 }
2047 }
2048 }
2049 }
2050
2051 for entry in &mut diagnostics {
2052 let diagnostic = &mut entry.diagnostic;
2053 if !diagnostic.is_primary {
2054 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2055 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2056 source,
2057 diagnostic.code.clone(),
2058 entry.range.clone(),
2059 )) {
2060 if let Some(severity) = severity {
2061 diagnostic.severity = severity;
2062 }
2063 diagnostic.is_unnecessary = is_unnecessary;
2064 }
2065 }
2066 }
2067
2068 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2069 Ok(())
2070 }
2071
2072 pub fn update_diagnostic_entries(
2073 &mut self,
2074 abs_path: PathBuf,
2075 version: Option<i32>,
2076 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2077 cx: &mut ModelContext<Project>,
2078 ) -> Result<(), anyhow::Error> {
2079 let (worktree, relative_path) = self
2080 .find_local_worktree(&abs_path, cx)
2081 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2082 if !worktree.read(cx).is_visible() {
2083 return Ok(());
2084 }
2085
2086 let project_path = ProjectPath {
2087 worktree_id: worktree.read(cx).id(),
2088 path: relative_path.into(),
2089 };
2090 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2091 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2092 }
2093
2094 let updated = worktree.update(cx, |worktree, cx| {
2095 worktree
2096 .as_local_mut()
2097 .ok_or_else(|| anyhow!("not a local worktree"))?
2098 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2099 })?;
2100 if updated {
2101 cx.emit(Event::DiagnosticsUpdated(project_path));
2102 }
2103 Ok(())
2104 }
2105
2106 fn update_buffer_diagnostics(
2107 &mut self,
2108 buffer: &ModelHandle<Buffer>,
2109 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2110 version: Option<i32>,
2111 cx: &mut ModelContext<Self>,
2112 ) -> Result<()> {
2113 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2114 Ordering::Equal
2115 .then_with(|| b.is_primary.cmp(&a.is_primary))
2116 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2117 .then_with(|| a.severity.cmp(&b.severity))
2118 .then_with(|| a.message.cmp(&b.message))
2119 }
2120
2121 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2122
2123 diagnostics.sort_unstable_by(|a, b| {
2124 Ordering::Equal
2125 .then_with(|| a.range.start.cmp(&b.range.start))
2126 .then_with(|| b.range.end.cmp(&a.range.end))
2127 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2128 });
2129
2130 let mut sanitized_diagnostics = Vec::new();
2131 let edits_since_save = Patch::new(
2132 snapshot
2133 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2134 .collect(),
2135 );
2136 for entry in diagnostics {
2137 let start;
2138 let end;
2139 if entry.diagnostic.is_disk_based {
2140 // Some diagnostics are based on files on disk instead of buffers'
2141 // current contents. Adjust these diagnostics' ranges to reflect
2142 // any unsaved edits.
2143 start = edits_since_save.old_to_new(entry.range.start);
2144 end = edits_since_save.old_to_new(entry.range.end);
2145 } else {
2146 start = entry.range.start;
2147 end = entry.range.end;
2148 }
2149
2150 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2151 ..snapshot.clip_point_utf16(end, Bias::Right);
2152
2153 // Expand empty ranges by one character
2154 if range.start == range.end {
2155 range.end.column += 1;
2156 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2157 if range.start == range.end && range.end.column > 0 {
2158 range.start.column -= 1;
2159 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2160 }
2161 }
2162
2163 sanitized_diagnostics.push(DiagnosticEntry {
2164 range,
2165 diagnostic: entry.diagnostic,
2166 });
2167 }
2168 drop(edits_since_save);
2169
2170 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2171 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2172 Ok(())
2173 }
2174
2175 pub fn reload_buffers(
2176 &self,
2177 buffers: HashSet<ModelHandle<Buffer>>,
2178 push_to_history: bool,
2179 cx: &mut ModelContext<Self>,
2180 ) -> Task<Result<ProjectTransaction>> {
2181 let mut local_buffers = Vec::new();
2182 let mut remote_buffers = None;
2183 for buffer_handle in buffers {
2184 let buffer = buffer_handle.read(cx);
2185 if buffer.is_dirty() {
2186 if let Some(file) = File::from_dyn(buffer.file()) {
2187 if file.is_local() {
2188 local_buffers.push(buffer_handle);
2189 } else {
2190 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2191 }
2192 }
2193 }
2194 }
2195
2196 let remote_buffers = self.remote_id().zip(remote_buffers);
2197 let client = self.client.clone();
2198
2199 cx.spawn(|this, mut cx| async move {
2200 let mut project_transaction = ProjectTransaction::default();
2201
2202 if let Some((project_id, remote_buffers)) = remote_buffers {
2203 let response = client
2204 .request(proto::ReloadBuffers {
2205 project_id,
2206 buffer_ids: remote_buffers
2207 .iter()
2208 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2209 .collect(),
2210 })
2211 .await?
2212 .transaction
2213 .ok_or_else(|| anyhow!("missing transaction"))?;
2214 project_transaction = this
2215 .update(&mut cx, |this, cx| {
2216 this.deserialize_project_transaction(response, push_to_history, cx)
2217 })
2218 .await?;
2219 }
2220
2221 for buffer in local_buffers {
2222 let transaction = buffer
2223 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2224 .await?;
2225 buffer.update(&mut cx, |buffer, cx| {
2226 if let Some(transaction) = transaction {
2227 if !push_to_history {
2228 buffer.forget_transaction(transaction.id);
2229 }
2230 project_transaction.0.insert(cx.handle(), transaction);
2231 }
2232 });
2233 }
2234
2235 Ok(project_transaction)
2236 })
2237 }
2238
2239 pub fn format(
2240 &self,
2241 buffers: HashSet<ModelHandle<Buffer>>,
2242 push_to_history: bool,
2243 cx: &mut ModelContext<Project>,
2244 ) -> Task<Result<ProjectTransaction>> {
2245 let mut local_buffers = Vec::new();
2246 let mut remote_buffers = None;
2247 for buffer_handle in buffers {
2248 let buffer = buffer_handle.read(cx);
2249 if let Some(file) = File::from_dyn(buffer.file()) {
2250 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2251 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2252 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2253 }
2254 } else {
2255 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2256 }
2257 } else {
2258 return Task::ready(Ok(Default::default()));
2259 }
2260 }
2261
2262 let remote_buffers = self.remote_id().zip(remote_buffers);
2263 let client = self.client.clone();
2264
2265 cx.spawn(|this, mut cx| async move {
2266 let mut project_transaction = ProjectTransaction::default();
2267
2268 if let Some((project_id, remote_buffers)) = remote_buffers {
2269 let response = client
2270 .request(proto::FormatBuffers {
2271 project_id,
2272 buffer_ids: remote_buffers
2273 .iter()
2274 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2275 .collect(),
2276 })
2277 .await?
2278 .transaction
2279 .ok_or_else(|| anyhow!("missing transaction"))?;
2280 project_transaction = this
2281 .update(&mut cx, |this, cx| {
2282 this.deserialize_project_transaction(response, push_to_history, cx)
2283 })
2284 .await?;
2285 }
2286
2287 for (buffer, buffer_abs_path, language_server) in local_buffers {
2288 let text_document = lsp::TextDocumentIdentifier::new(
2289 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2290 );
2291 let capabilities = &language_server.capabilities();
2292 let tab_size = cx.update(|cx| {
2293 let language_name = buffer.read(cx).language().map(|language| language.name());
2294 cx.global::<Settings>().tab_size(language_name.as_deref())
2295 });
2296 let lsp_edits = if capabilities
2297 .document_formatting_provider
2298 .as_ref()
2299 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2300 {
2301 language_server
2302 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2303 text_document,
2304 options: lsp::FormattingOptions {
2305 tab_size,
2306 insert_spaces: true,
2307 insert_final_newline: Some(true),
2308 ..Default::default()
2309 },
2310 work_done_progress_params: Default::default(),
2311 })
2312 .await?
2313 } else if capabilities
2314 .document_range_formatting_provider
2315 .as_ref()
2316 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2317 {
2318 let buffer_start = lsp::Position::new(0, 0);
2319 let buffer_end =
2320 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2321 language_server
2322 .request::<lsp::request::RangeFormatting>(
2323 lsp::DocumentRangeFormattingParams {
2324 text_document,
2325 range: lsp::Range::new(buffer_start, buffer_end),
2326 options: lsp::FormattingOptions {
2327 tab_size: 4,
2328 insert_spaces: true,
2329 insert_final_newline: Some(true),
2330 ..Default::default()
2331 },
2332 work_done_progress_params: Default::default(),
2333 },
2334 )
2335 .await?
2336 } else {
2337 continue;
2338 };
2339
2340 if let Some(lsp_edits) = lsp_edits {
2341 let edits = this
2342 .update(&mut cx, |this, cx| {
2343 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2344 })
2345 .await?;
2346 buffer.update(&mut cx, |buffer, cx| {
2347 buffer.finalize_last_transaction();
2348 buffer.start_transaction();
2349 for (range, text) in edits {
2350 buffer.edit([(range, text)], cx);
2351 }
2352 if buffer.end_transaction(cx).is_some() {
2353 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2354 if !push_to_history {
2355 buffer.forget_transaction(transaction.id);
2356 }
2357 project_transaction.0.insert(cx.handle(), transaction);
2358 }
2359 });
2360 }
2361 }
2362
2363 Ok(project_transaction)
2364 })
2365 }
2366
2367 pub fn definition<T: ToPointUtf16>(
2368 &self,
2369 buffer: &ModelHandle<Buffer>,
2370 position: T,
2371 cx: &mut ModelContext<Self>,
2372 ) -> Task<Result<Vec<Location>>> {
2373 let position = position.to_point_utf16(buffer.read(cx));
2374 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2375 }
2376
2377 pub fn references<T: ToPointUtf16>(
2378 &self,
2379 buffer: &ModelHandle<Buffer>,
2380 position: T,
2381 cx: &mut ModelContext<Self>,
2382 ) -> Task<Result<Vec<Location>>> {
2383 let position = position.to_point_utf16(buffer.read(cx));
2384 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2385 }
2386
2387 pub fn document_highlights<T: ToPointUtf16>(
2388 &self,
2389 buffer: &ModelHandle<Buffer>,
2390 position: T,
2391 cx: &mut ModelContext<Self>,
2392 ) -> Task<Result<Vec<DocumentHighlight>>> {
2393 let position = position.to_point_utf16(buffer.read(cx));
2394
2395 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2396 }
2397
2398 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2399 if self.is_local() {
2400 let mut requests = Vec::new();
2401 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2402 let worktree_id = *worktree_id;
2403 if let Some(worktree) = self
2404 .worktree_for_id(worktree_id, cx)
2405 .and_then(|worktree| worktree.read(cx).as_local())
2406 {
2407 let lsp_adapter = lsp_adapter.clone();
2408 let worktree_abs_path = worktree.abs_path().clone();
2409 requests.push(
2410 language_server
2411 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2412 query: query.to_string(),
2413 ..Default::default()
2414 })
2415 .log_err()
2416 .map(move |response| {
2417 (
2418 lsp_adapter,
2419 worktree_id,
2420 worktree_abs_path,
2421 response.unwrap_or_default(),
2422 )
2423 }),
2424 );
2425 }
2426 }
2427
2428 cx.spawn_weak(|this, cx| async move {
2429 let responses = futures::future::join_all(requests).await;
2430 let this = if let Some(this) = this.upgrade(&cx) {
2431 this
2432 } else {
2433 return Ok(Default::default());
2434 };
2435 this.read_with(&cx, |this, cx| {
2436 let mut symbols = Vec::new();
2437 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2438 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2439 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2440 let mut worktree_id = source_worktree_id;
2441 let path;
2442 if let Some((worktree, rel_path)) =
2443 this.find_local_worktree(&abs_path, cx)
2444 {
2445 worktree_id = worktree.read(cx).id();
2446 path = rel_path;
2447 } else {
2448 path = relativize_path(&worktree_abs_path, &abs_path);
2449 }
2450
2451 let label = this
2452 .languages
2453 .select_language(&path)
2454 .and_then(|language| {
2455 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2456 })
2457 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2458 let signature = this.symbol_signature(worktree_id, &path);
2459
2460 Some(Symbol {
2461 source_worktree_id,
2462 worktree_id,
2463 language_server_name: adapter.name(),
2464 name: lsp_symbol.name,
2465 kind: lsp_symbol.kind,
2466 label,
2467 path,
2468 range: range_from_lsp(lsp_symbol.location.range),
2469 signature,
2470 })
2471 }));
2472 }
2473 Ok(symbols)
2474 })
2475 })
2476 } else if let Some(project_id) = self.remote_id() {
2477 let request = self.client.request(proto::GetProjectSymbols {
2478 project_id,
2479 query: query.to_string(),
2480 });
2481 cx.spawn_weak(|this, cx| async move {
2482 let response = request.await?;
2483 let mut symbols = Vec::new();
2484 if let Some(this) = this.upgrade(&cx) {
2485 this.read_with(&cx, |this, _| {
2486 symbols.extend(
2487 response
2488 .symbols
2489 .into_iter()
2490 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2491 );
2492 })
2493 }
2494 Ok(symbols)
2495 })
2496 } else {
2497 Task::ready(Ok(Default::default()))
2498 }
2499 }
2500
2501 pub fn open_buffer_for_symbol(
2502 &mut self,
2503 symbol: &Symbol,
2504 cx: &mut ModelContext<Self>,
2505 ) -> Task<Result<ModelHandle<Buffer>>> {
2506 if self.is_local() {
2507 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2508 symbol.source_worktree_id,
2509 symbol.language_server_name.clone(),
2510 )) {
2511 server.clone()
2512 } else {
2513 return Task::ready(Err(anyhow!(
2514 "language server for worktree and language not found"
2515 )));
2516 };
2517
2518 let worktree_abs_path = if let Some(worktree_abs_path) = self
2519 .worktree_for_id(symbol.worktree_id, cx)
2520 .and_then(|worktree| worktree.read(cx).as_local())
2521 .map(|local_worktree| local_worktree.abs_path())
2522 {
2523 worktree_abs_path
2524 } else {
2525 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2526 };
2527 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2528 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2529 uri
2530 } else {
2531 return Task::ready(Err(anyhow!("invalid symbol path")));
2532 };
2533
2534 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2535 } else if let Some(project_id) = self.remote_id() {
2536 let request = self.client.request(proto::OpenBufferForSymbol {
2537 project_id,
2538 symbol: Some(serialize_symbol(symbol)),
2539 });
2540 cx.spawn(|this, mut cx| async move {
2541 let response = request.await?;
2542 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2543 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2544 .await
2545 })
2546 } else {
2547 Task::ready(Err(anyhow!("project does not have a remote id")))
2548 }
2549 }
2550
2551 pub fn completions<T: ToPointUtf16>(
2552 &self,
2553 source_buffer_handle: &ModelHandle<Buffer>,
2554 position: T,
2555 cx: &mut ModelContext<Self>,
2556 ) -> Task<Result<Vec<Completion>>> {
2557 let source_buffer_handle = source_buffer_handle.clone();
2558 let source_buffer = source_buffer_handle.read(cx);
2559 let buffer_id = source_buffer.remote_id();
2560 let language = source_buffer.language().cloned();
2561 let worktree;
2562 let buffer_abs_path;
2563 if let Some(file) = File::from_dyn(source_buffer.file()) {
2564 worktree = file.worktree.clone();
2565 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2566 } else {
2567 return Task::ready(Ok(Default::default()));
2568 };
2569
2570 let position = position.to_point_utf16(source_buffer);
2571 let anchor = source_buffer.anchor_after(position);
2572
2573 if worktree.read(cx).as_local().is_some() {
2574 let buffer_abs_path = buffer_abs_path.unwrap();
2575 let (_, lang_server) =
2576 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2577 server.clone()
2578 } else {
2579 return Task::ready(Ok(Default::default()));
2580 };
2581
2582 cx.spawn(|_, cx| async move {
2583 let completions = lang_server
2584 .request::<lsp::request::Completion>(lsp::CompletionParams {
2585 text_document_position: lsp::TextDocumentPositionParams::new(
2586 lsp::TextDocumentIdentifier::new(
2587 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2588 ),
2589 point_to_lsp(position),
2590 ),
2591 context: Default::default(),
2592 work_done_progress_params: Default::default(),
2593 partial_result_params: Default::default(),
2594 })
2595 .await
2596 .context("lsp completion request failed")?;
2597
2598 let completions = if let Some(completions) = completions {
2599 match completions {
2600 lsp::CompletionResponse::Array(completions) => completions,
2601 lsp::CompletionResponse::List(list) => list.items,
2602 }
2603 } else {
2604 Default::default()
2605 };
2606
2607 source_buffer_handle.read_with(&cx, |this, _| {
2608 let snapshot = this.snapshot();
2609 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2610 let mut range_for_token = None;
2611 Ok(completions
2612 .into_iter()
2613 .filter_map(|lsp_completion| {
2614 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2615 // If the language server provides a range to overwrite, then
2616 // check that the range is valid.
2617 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2618 let range = range_from_lsp(edit.range);
2619 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2620 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2621 if start != range.start || end != range.end {
2622 log::info!("completion out of expected range");
2623 return None;
2624 }
2625 (
2626 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2627 edit.new_text.clone(),
2628 )
2629 }
2630 // If the language server does not provide a range, then infer
2631 // the range based on the syntax tree.
2632 None => {
2633 if position != clipped_position {
2634 log::info!("completion out of expected range");
2635 return None;
2636 }
2637 let Range { start, end } = range_for_token
2638 .get_or_insert_with(|| {
2639 let offset = position.to_offset(&snapshot);
2640 snapshot
2641 .range_for_word_token_at(offset)
2642 .unwrap_or_else(|| offset..offset)
2643 })
2644 .clone();
2645 let text = lsp_completion
2646 .insert_text
2647 .as_ref()
2648 .unwrap_or(&lsp_completion.label)
2649 .clone();
2650 (
2651 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2652 text.clone(),
2653 )
2654 }
2655 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2656 log::info!("unsupported insert/replace completion");
2657 return None;
2658 }
2659 };
2660
2661 Some(Completion {
2662 old_range,
2663 new_text,
2664 label: language
2665 .as_ref()
2666 .and_then(|l| l.label_for_completion(&lsp_completion))
2667 .unwrap_or_else(|| {
2668 CodeLabel::plain(
2669 lsp_completion.label.clone(),
2670 lsp_completion.filter_text.as_deref(),
2671 )
2672 }),
2673 lsp_completion,
2674 })
2675 })
2676 .collect())
2677 })
2678 })
2679 } else if let Some(project_id) = self.remote_id() {
2680 let rpc = self.client.clone();
2681 let message = proto::GetCompletions {
2682 project_id,
2683 buffer_id,
2684 position: Some(language::proto::serialize_anchor(&anchor)),
2685 version: serialize_version(&source_buffer.version()),
2686 };
2687 cx.spawn_weak(|_, mut cx| async move {
2688 let response = rpc.request(message).await?;
2689
2690 source_buffer_handle
2691 .update(&mut cx, |buffer, _| {
2692 buffer.wait_for_version(deserialize_version(response.version))
2693 })
2694 .await;
2695
2696 response
2697 .completions
2698 .into_iter()
2699 .map(|completion| {
2700 language::proto::deserialize_completion(completion, language.as_ref())
2701 })
2702 .collect()
2703 })
2704 } else {
2705 Task::ready(Ok(Default::default()))
2706 }
2707 }
2708
2709 pub fn apply_additional_edits_for_completion(
2710 &self,
2711 buffer_handle: ModelHandle<Buffer>,
2712 completion: Completion,
2713 push_to_history: bool,
2714 cx: &mut ModelContext<Self>,
2715 ) -> Task<Result<Option<Transaction>>> {
2716 let buffer = buffer_handle.read(cx);
2717 let buffer_id = buffer.remote_id();
2718
2719 if self.is_local() {
2720 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2721 {
2722 server.clone()
2723 } else {
2724 return Task::ready(Ok(Default::default()));
2725 };
2726
2727 cx.spawn(|this, mut cx| async move {
2728 let resolved_completion = lang_server
2729 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2730 .await?;
2731 if let Some(edits) = resolved_completion.additional_text_edits {
2732 let edits = this
2733 .update(&mut cx, |this, cx| {
2734 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2735 })
2736 .await?;
2737 buffer_handle.update(&mut cx, |buffer, cx| {
2738 buffer.finalize_last_transaction();
2739 buffer.start_transaction();
2740 for (range, text) in edits {
2741 buffer.edit([(range, text)], cx);
2742 }
2743 let transaction = if buffer.end_transaction(cx).is_some() {
2744 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2745 if !push_to_history {
2746 buffer.forget_transaction(transaction.id);
2747 }
2748 Some(transaction)
2749 } else {
2750 None
2751 };
2752 Ok(transaction)
2753 })
2754 } else {
2755 Ok(None)
2756 }
2757 })
2758 } else if let Some(project_id) = self.remote_id() {
2759 let client = self.client.clone();
2760 cx.spawn(|_, mut cx| async move {
2761 let response = client
2762 .request(proto::ApplyCompletionAdditionalEdits {
2763 project_id,
2764 buffer_id,
2765 completion: Some(language::proto::serialize_completion(&completion)),
2766 })
2767 .await?;
2768
2769 if let Some(transaction) = response.transaction {
2770 let transaction = language::proto::deserialize_transaction(transaction)?;
2771 buffer_handle
2772 .update(&mut cx, |buffer, _| {
2773 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2774 })
2775 .await;
2776 if push_to_history {
2777 buffer_handle.update(&mut cx, |buffer, _| {
2778 buffer.push_transaction(transaction.clone(), Instant::now());
2779 });
2780 }
2781 Ok(Some(transaction))
2782 } else {
2783 Ok(None)
2784 }
2785 })
2786 } else {
2787 Task::ready(Err(anyhow!("project does not have a remote id")))
2788 }
2789 }
2790
2791 pub fn code_actions<T: Clone + ToOffset>(
2792 &self,
2793 buffer_handle: &ModelHandle<Buffer>,
2794 range: Range<T>,
2795 cx: &mut ModelContext<Self>,
2796 ) -> Task<Result<Vec<CodeAction>>> {
2797 let buffer_handle = buffer_handle.clone();
2798 let buffer = buffer_handle.read(cx);
2799 let snapshot = buffer.snapshot();
2800 let relevant_diagnostics = snapshot
2801 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2802 .map(|entry| entry.to_lsp_diagnostic_stub())
2803 .collect();
2804 let buffer_id = buffer.remote_id();
2805 let worktree;
2806 let buffer_abs_path;
2807 if let Some(file) = File::from_dyn(buffer.file()) {
2808 worktree = file.worktree.clone();
2809 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2810 } else {
2811 return Task::ready(Ok(Default::default()));
2812 };
2813 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2814
2815 if worktree.read(cx).as_local().is_some() {
2816 let buffer_abs_path = buffer_abs_path.unwrap();
2817 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2818 {
2819 server.clone()
2820 } else {
2821 return Task::ready(Ok(Default::default()));
2822 };
2823
2824 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2825 cx.foreground().spawn(async move {
2826 if !lang_server.capabilities().code_action_provider.is_some() {
2827 return Ok(Default::default());
2828 }
2829
2830 Ok(lang_server
2831 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2832 text_document: lsp::TextDocumentIdentifier::new(
2833 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2834 ),
2835 range: lsp_range,
2836 work_done_progress_params: Default::default(),
2837 partial_result_params: Default::default(),
2838 context: lsp::CodeActionContext {
2839 diagnostics: relevant_diagnostics,
2840 only: Some(vec![
2841 lsp::CodeActionKind::QUICKFIX,
2842 lsp::CodeActionKind::REFACTOR,
2843 lsp::CodeActionKind::REFACTOR_EXTRACT,
2844 lsp::CodeActionKind::SOURCE,
2845 ]),
2846 },
2847 })
2848 .await?
2849 .unwrap_or_default()
2850 .into_iter()
2851 .filter_map(|entry| {
2852 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2853 Some(CodeAction {
2854 range: range.clone(),
2855 lsp_action,
2856 })
2857 } else {
2858 None
2859 }
2860 })
2861 .collect())
2862 })
2863 } else if let Some(project_id) = self.remote_id() {
2864 let rpc = self.client.clone();
2865 let version = buffer.version();
2866 cx.spawn_weak(|_, mut cx| async move {
2867 let response = rpc
2868 .request(proto::GetCodeActions {
2869 project_id,
2870 buffer_id,
2871 start: Some(language::proto::serialize_anchor(&range.start)),
2872 end: Some(language::proto::serialize_anchor(&range.end)),
2873 version: serialize_version(&version),
2874 })
2875 .await?;
2876
2877 buffer_handle
2878 .update(&mut cx, |buffer, _| {
2879 buffer.wait_for_version(deserialize_version(response.version))
2880 })
2881 .await;
2882
2883 response
2884 .actions
2885 .into_iter()
2886 .map(language::proto::deserialize_code_action)
2887 .collect()
2888 })
2889 } else {
2890 Task::ready(Ok(Default::default()))
2891 }
2892 }
2893
2894 pub fn apply_code_action(
2895 &self,
2896 buffer_handle: ModelHandle<Buffer>,
2897 mut action: CodeAction,
2898 push_to_history: bool,
2899 cx: &mut ModelContext<Self>,
2900 ) -> Task<Result<ProjectTransaction>> {
2901 if self.is_local() {
2902 let buffer = buffer_handle.read(cx);
2903 let (lsp_adapter, lang_server) =
2904 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2905 server.clone()
2906 } else {
2907 return Task::ready(Ok(Default::default()));
2908 };
2909 let range = action.range.to_point_utf16(buffer);
2910
2911 cx.spawn(|this, mut cx| async move {
2912 if let Some(lsp_range) = action
2913 .lsp_action
2914 .data
2915 .as_mut()
2916 .and_then(|d| d.get_mut("codeActionParams"))
2917 .and_then(|d| d.get_mut("range"))
2918 {
2919 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2920 action.lsp_action = lang_server
2921 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2922 .await?;
2923 } else {
2924 let actions = this
2925 .update(&mut cx, |this, cx| {
2926 this.code_actions(&buffer_handle, action.range, cx)
2927 })
2928 .await?;
2929 action.lsp_action = actions
2930 .into_iter()
2931 .find(|a| a.lsp_action.title == action.lsp_action.title)
2932 .ok_or_else(|| anyhow!("code action is outdated"))?
2933 .lsp_action;
2934 }
2935
2936 if let Some(edit) = action.lsp_action.edit {
2937 Self::deserialize_workspace_edit(
2938 this,
2939 edit,
2940 push_to_history,
2941 lsp_adapter,
2942 lang_server,
2943 &mut cx,
2944 )
2945 .await
2946 } else if let Some(command) = action.lsp_action.command {
2947 this.update(&mut cx, |this, _| {
2948 this.last_workspace_edits_by_language_server
2949 .remove(&lang_server.server_id());
2950 });
2951 lang_server
2952 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2953 command: command.command,
2954 arguments: command.arguments.unwrap_or_default(),
2955 ..Default::default()
2956 })
2957 .await?;
2958 Ok(this.update(&mut cx, |this, _| {
2959 this.last_workspace_edits_by_language_server
2960 .remove(&lang_server.server_id())
2961 .unwrap_or_default()
2962 }))
2963 } else {
2964 Ok(ProjectTransaction::default())
2965 }
2966 })
2967 } else if let Some(project_id) = self.remote_id() {
2968 let client = self.client.clone();
2969 let request = proto::ApplyCodeAction {
2970 project_id,
2971 buffer_id: buffer_handle.read(cx).remote_id(),
2972 action: Some(language::proto::serialize_code_action(&action)),
2973 };
2974 cx.spawn(|this, mut cx| async move {
2975 let response = client
2976 .request(request)
2977 .await?
2978 .transaction
2979 .ok_or_else(|| anyhow!("missing transaction"))?;
2980 this.update(&mut cx, |this, cx| {
2981 this.deserialize_project_transaction(response, push_to_history, cx)
2982 })
2983 .await
2984 })
2985 } else {
2986 Task::ready(Err(anyhow!("project does not have a remote id")))
2987 }
2988 }
2989
2990 async fn deserialize_workspace_edit(
2991 this: ModelHandle<Self>,
2992 edit: lsp::WorkspaceEdit,
2993 push_to_history: bool,
2994 lsp_adapter: Arc<dyn LspAdapter>,
2995 language_server: Arc<LanguageServer>,
2996 cx: &mut AsyncAppContext,
2997 ) -> Result<ProjectTransaction> {
2998 let fs = this.read_with(cx, |this, _| this.fs.clone());
2999 let mut operations = Vec::new();
3000 if let Some(document_changes) = edit.document_changes {
3001 match document_changes {
3002 lsp::DocumentChanges::Edits(edits) => {
3003 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3004 }
3005 lsp::DocumentChanges::Operations(ops) => operations = ops,
3006 }
3007 } else if let Some(changes) = edit.changes {
3008 operations.extend(changes.into_iter().map(|(uri, edits)| {
3009 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3010 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3011 uri,
3012 version: None,
3013 },
3014 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3015 })
3016 }));
3017 }
3018
3019 let mut project_transaction = ProjectTransaction::default();
3020 for operation in operations {
3021 match operation {
3022 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3023 let abs_path = op
3024 .uri
3025 .to_file_path()
3026 .map_err(|_| anyhow!("can't convert URI to path"))?;
3027
3028 if let Some(parent_path) = abs_path.parent() {
3029 fs.create_dir(parent_path).await?;
3030 }
3031 if abs_path.ends_with("/") {
3032 fs.create_dir(&abs_path).await?;
3033 } else {
3034 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3035 .await?;
3036 }
3037 }
3038 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3039 let source_abs_path = op
3040 .old_uri
3041 .to_file_path()
3042 .map_err(|_| anyhow!("can't convert URI to path"))?;
3043 let target_abs_path = op
3044 .new_uri
3045 .to_file_path()
3046 .map_err(|_| anyhow!("can't convert URI to path"))?;
3047 fs.rename(
3048 &source_abs_path,
3049 &target_abs_path,
3050 op.options.map(Into::into).unwrap_or_default(),
3051 )
3052 .await?;
3053 }
3054 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3055 let abs_path = op
3056 .uri
3057 .to_file_path()
3058 .map_err(|_| anyhow!("can't convert URI to path"))?;
3059 let options = op.options.map(Into::into).unwrap_or_default();
3060 if abs_path.ends_with("/") {
3061 fs.remove_dir(&abs_path, options).await?;
3062 } else {
3063 fs.remove_file(&abs_path, options).await?;
3064 }
3065 }
3066 lsp::DocumentChangeOperation::Edit(op) => {
3067 let buffer_to_edit = this
3068 .update(cx, |this, cx| {
3069 this.open_local_buffer_via_lsp(
3070 op.text_document.uri,
3071 lsp_adapter.clone(),
3072 language_server.clone(),
3073 cx,
3074 )
3075 })
3076 .await?;
3077
3078 let edits = this
3079 .update(cx, |this, cx| {
3080 let edits = op.edits.into_iter().map(|edit| match edit {
3081 lsp::OneOf::Left(edit) => edit,
3082 lsp::OneOf::Right(edit) => edit.text_edit,
3083 });
3084 this.edits_from_lsp(
3085 &buffer_to_edit,
3086 edits,
3087 op.text_document.version,
3088 cx,
3089 )
3090 })
3091 .await?;
3092
3093 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3094 buffer.finalize_last_transaction();
3095 buffer.start_transaction();
3096 for (range, text) in edits {
3097 buffer.edit([(range, text)], cx);
3098 }
3099 let transaction = if buffer.end_transaction(cx).is_some() {
3100 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3101 if !push_to_history {
3102 buffer.forget_transaction(transaction.id);
3103 }
3104 Some(transaction)
3105 } else {
3106 None
3107 };
3108
3109 transaction
3110 });
3111 if let Some(transaction) = transaction {
3112 project_transaction.0.insert(buffer_to_edit, transaction);
3113 }
3114 }
3115 }
3116 }
3117
3118 Ok(project_transaction)
3119 }
3120
3121 pub fn prepare_rename<T: ToPointUtf16>(
3122 &self,
3123 buffer: ModelHandle<Buffer>,
3124 position: T,
3125 cx: &mut ModelContext<Self>,
3126 ) -> Task<Result<Option<Range<Anchor>>>> {
3127 let position = position.to_point_utf16(buffer.read(cx));
3128 self.request_lsp(buffer, PrepareRename { position }, cx)
3129 }
3130
3131 pub fn perform_rename<T: ToPointUtf16>(
3132 &self,
3133 buffer: ModelHandle<Buffer>,
3134 position: T,
3135 new_name: String,
3136 push_to_history: bool,
3137 cx: &mut ModelContext<Self>,
3138 ) -> Task<Result<ProjectTransaction>> {
3139 let position = position.to_point_utf16(buffer.read(cx));
3140 self.request_lsp(
3141 buffer,
3142 PerformRename {
3143 position,
3144 new_name,
3145 push_to_history,
3146 },
3147 cx,
3148 )
3149 }
3150
3151 pub fn search(
3152 &self,
3153 query: SearchQuery,
3154 cx: &mut ModelContext<Self>,
3155 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3156 if self.is_local() {
3157 let snapshots = self
3158 .visible_worktrees(cx)
3159 .filter_map(|tree| {
3160 let tree = tree.read(cx).as_local()?;
3161 Some(tree.snapshot())
3162 })
3163 .collect::<Vec<_>>();
3164
3165 let background = cx.background().clone();
3166 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3167 if path_count == 0 {
3168 return Task::ready(Ok(Default::default()));
3169 }
3170 let workers = background.num_cpus().min(path_count);
3171 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3172 cx.background()
3173 .spawn({
3174 let fs = self.fs.clone();
3175 let background = cx.background().clone();
3176 let query = query.clone();
3177 async move {
3178 let fs = &fs;
3179 let query = &query;
3180 let matching_paths_tx = &matching_paths_tx;
3181 let paths_per_worker = (path_count + workers - 1) / workers;
3182 let snapshots = &snapshots;
3183 background
3184 .scoped(|scope| {
3185 for worker_ix in 0..workers {
3186 let worker_start_ix = worker_ix * paths_per_worker;
3187 let worker_end_ix = worker_start_ix + paths_per_worker;
3188 scope.spawn(async move {
3189 let mut snapshot_start_ix = 0;
3190 let mut abs_path = PathBuf::new();
3191 for snapshot in snapshots {
3192 let snapshot_end_ix =
3193 snapshot_start_ix + snapshot.visible_file_count();
3194 if worker_end_ix <= snapshot_start_ix {
3195 break;
3196 } else if worker_start_ix > snapshot_end_ix {
3197 snapshot_start_ix = snapshot_end_ix;
3198 continue;
3199 } else {
3200 let start_in_snapshot = worker_start_ix
3201 .saturating_sub(snapshot_start_ix);
3202 let end_in_snapshot =
3203 cmp::min(worker_end_ix, snapshot_end_ix)
3204 - snapshot_start_ix;
3205
3206 for entry in snapshot
3207 .files(false, start_in_snapshot)
3208 .take(end_in_snapshot - start_in_snapshot)
3209 {
3210 if matching_paths_tx.is_closed() {
3211 break;
3212 }
3213
3214 abs_path.clear();
3215 abs_path.push(&snapshot.abs_path());
3216 abs_path.push(&entry.path);
3217 let matches = if let Some(file) =
3218 fs.open_sync(&abs_path).await.log_err()
3219 {
3220 query.detect(file).unwrap_or(false)
3221 } else {
3222 false
3223 };
3224
3225 if matches {
3226 let project_path =
3227 (snapshot.id(), entry.path.clone());
3228 if matching_paths_tx
3229 .send(project_path)
3230 .await
3231 .is_err()
3232 {
3233 break;
3234 }
3235 }
3236 }
3237
3238 snapshot_start_ix = snapshot_end_ix;
3239 }
3240 }
3241 });
3242 }
3243 })
3244 .await;
3245 }
3246 })
3247 .detach();
3248
3249 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3250 let open_buffers = self
3251 .opened_buffers
3252 .values()
3253 .filter_map(|b| b.upgrade(cx))
3254 .collect::<HashSet<_>>();
3255 cx.spawn(|this, cx| async move {
3256 for buffer in &open_buffers {
3257 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3258 buffers_tx.send((buffer.clone(), snapshot)).await?;
3259 }
3260
3261 let open_buffers = Rc::new(RefCell::new(open_buffers));
3262 while let Some(project_path) = matching_paths_rx.next().await {
3263 if buffers_tx.is_closed() {
3264 break;
3265 }
3266
3267 let this = this.clone();
3268 let open_buffers = open_buffers.clone();
3269 let buffers_tx = buffers_tx.clone();
3270 cx.spawn(|mut cx| async move {
3271 if let Some(buffer) = this
3272 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3273 .await
3274 .log_err()
3275 {
3276 if open_buffers.borrow_mut().insert(buffer.clone()) {
3277 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3278 buffers_tx.send((buffer, snapshot)).await?;
3279 }
3280 }
3281
3282 Ok::<_, anyhow::Error>(())
3283 })
3284 .detach();
3285 }
3286
3287 Ok::<_, anyhow::Error>(())
3288 })
3289 .detach_and_log_err(cx);
3290
3291 let background = cx.background().clone();
3292 cx.background().spawn(async move {
3293 let query = &query;
3294 let mut matched_buffers = Vec::new();
3295 for _ in 0..workers {
3296 matched_buffers.push(HashMap::default());
3297 }
3298 background
3299 .scoped(|scope| {
3300 for worker_matched_buffers in matched_buffers.iter_mut() {
3301 let mut buffers_rx = buffers_rx.clone();
3302 scope.spawn(async move {
3303 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3304 let buffer_matches = query
3305 .search(snapshot.as_rope())
3306 .await
3307 .iter()
3308 .map(|range| {
3309 snapshot.anchor_before(range.start)
3310 ..snapshot.anchor_after(range.end)
3311 })
3312 .collect::<Vec<_>>();
3313 if !buffer_matches.is_empty() {
3314 worker_matched_buffers
3315 .insert(buffer.clone(), buffer_matches);
3316 }
3317 }
3318 });
3319 }
3320 })
3321 .await;
3322 Ok(matched_buffers.into_iter().flatten().collect())
3323 })
3324 } else if let Some(project_id) = self.remote_id() {
3325 let request = self.client.request(query.to_proto(project_id));
3326 cx.spawn(|this, mut cx| async move {
3327 let response = request.await?;
3328 let mut result = HashMap::default();
3329 for location in response.locations {
3330 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3331 let target_buffer = this
3332 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3333 .await?;
3334 let start = location
3335 .start
3336 .and_then(deserialize_anchor)
3337 .ok_or_else(|| anyhow!("missing target start"))?;
3338 let end = location
3339 .end
3340 .and_then(deserialize_anchor)
3341 .ok_or_else(|| anyhow!("missing target end"))?;
3342 result
3343 .entry(target_buffer)
3344 .or_insert(Vec::new())
3345 .push(start..end)
3346 }
3347 Ok(result)
3348 })
3349 } else {
3350 Task::ready(Ok(Default::default()))
3351 }
3352 }
3353
3354 fn request_lsp<R: LspCommand>(
3355 &self,
3356 buffer_handle: ModelHandle<Buffer>,
3357 request: R,
3358 cx: &mut ModelContext<Self>,
3359 ) -> Task<Result<R::Response>>
3360 where
3361 <R::LspRequest as lsp::request::Request>::Result: Send,
3362 {
3363 let buffer = buffer_handle.read(cx);
3364 if self.is_local() {
3365 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3366 if let Some((file, (_, language_server))) =
3367 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3368 {
3369 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3370 return cx.spawn(|this, cx| async move {
3371 if !request.check_capabilities(&language_server.capabilities()) {
3372 return Ok(Default::default());
3373 }
3374
3375 let response = language_server
3376 .request::<R::LspRequest>(lsp_params)
3377 .await
3378 .context("lsp request failed")?;
3379 request
3380 .response_from_lsp(response, this, buffer_handle, cx)
3381 .await
3382 });
3383 }
3384 } else if let Some(project_id) = self.remote_id() {
3385 let rpc = self.client.clone();
3386 let message = request.to_proto(project_id, buffer);
3387 return cx.spawn(|this, cx| async move {
3388 let response = rpc.request(message).await?;
3389 request
3390 .response_from_proto(response, this, buffer_handle, cx)
3391 .await
3392 });
3393 }
3394 Task::ready(Ok(Default::default()))
3395 }
3396
3397 pub fn find_or_create_local_worktree(
3398 &mut self,
3399 abs_path: impl AsRef<Path>,
3400 visible: bool,
3401 cx: &mut ModelContext<Self>,
3402 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3403 let abs_path = abs_path.as_ref();
3404 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3405 Task::ready(Ok((tree.clone(), relative_path.into())))
3406 } else {
3407 let worktree = self.create_local_worktree(abs_path, visible, cx);
3408 cx.foreground()
3409 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3410 }
3411 }
3412
3413 pub fn find_local_worktree(
3414 &self,
3415 abs_path: &Path,
3416 cx: &AppContext,
3417 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3418 for tree in self.worktrees(cx) {
3419 if let Some(relative_path) = tree
3420 .read(cx)
3421 .as_local()
3422 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3423 {
3424 return Some((tree.clone(), relative_path.into()));
3425 }
3426 }
3427 None
3428 }
3429
3430 pub fn is_shared(&self) -> bool {
3431 match &self.client_state {
3432 ProjectClientState::Local { is_shared, .. } => *is_shared,
3433 ProjectClientState::Remote { .. } => false,
3434 }
3435 }
3436
3437 fn create_local_worktree(
3438 &mut self,
3439 abs_path: impl AsRef<Path>,
3440 visible: bool,
3441 cx: &mut ModelContext<Self>,
3442 ) -> Task<Result<ModelHandle<Worktree>>> {
3443 let fs = self.fs.clone();
3444 let client = self.client.clone();
3445 let next_entry_id = self.next_entry_id.clone();
3446 let path: Arc<Path> = abs_path.as_ref().into();
3447 let task = self
3448 .loading_local_worktrees
3449 .entry(path.clone())
3450 .or_insert_with(|| {
3451 cx.spawn(|project, mut cx| {
3452 async move {
3453 let worktree = Worktree::local(
3454 client.clone(),
3455 path.clone(),
3456 visible,
3457 fs,
3458 next_entry_id,
3459 &mut cx,
3460 )
3461 .await;
3462 project.update(&mut cx, |project, _| {
3463 project.loading_local_worktrees.remove(&path);
3464 });
3465 let worktree = worktree?;
3466
3467 let (remote_project_id, is_shared) =
3468 project.update(&mut cx, |project, cx| {
3469 project.add_worktree(&worktree, cx);
3470 (project.remote_id(), project.is_shared())
3471 });
3472
3473 if let Some(project_id) = remote_project_id {
3474 if is_shared {
3475 worktree
3476 .update(&mut cx, |worktree, cx| {
3477 worktree.as_local_mut().unwrap().share(project_id, cx)
3478 })
3479 .await?;
3480 } else {
3481 worktree
3482 .update(&mut cx, |worktree, cx| {
3483 worktree.as_local_mut().unwrap().register(project_id, cx)
3484 })
3485 .await?;
3486 }
3487 }
3488
3489 Ok(worktree)
3490 }
3491 .map_err(|err| Arc::new(err))
3492 })
3493 .shared()
3494 })
3495 .clone();
3496 cx.foreground().spawn(async move {
3497 match task.await {
3498 Ok(worktree) => Ok(worktree),
3499 Err(err) => Err(anyhow!("{}", err)),
3500 }
3501 })
3502 }
3503
3504 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3505 self.worktrees.retain(|worktree| {
3506 worktree
3507 .upgrade(cx)
3508 .map_or(false, |w| w.read(cx).id() != id)
3509 });
3510 cx.notify();
3511 }
3512
3513 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3514 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3515 if worktree.read(cx).is_local() {
3516 cx.subscribe(&worktree, |this, worktree, _, cx| {
3517 this.update_local_worktree_buffers(worktree, cx);
3518 })
3519 .detach();
3520 }
3521
3522 let push_strong_handle = {
3523 let worktree = worktree.read(cx);
3524 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3525 };
3526 if push_strong_handle {
3527 self.worktrees
3528 .push(WorktreeHandle::Strong(worktree.clone()));
3529 } else {
3530 cx.observe_release(&worktree, |this, _, cx| {
3531 this.worktrees
3532 .retain(|worktree| worktree.upgrade(cx).is_some());
3533 cx.notify();
3534 })
3535 .detach();
3536 self.worktrees
3537 .push(WorktreeHandle::Weak(worktree.downgrade()));
3538 }
3539 cx.notify();
3540 }
3541
3542 fn update_local_worktree_buffers(
3543 &mut self,
3544 worktree_handle: ModelHandle<Worktree>,
3545 cx: &mut ModelContext<Self>,
3546 ) {
3547 let snapshot = worktree_handle.read(cx).snapshot();
3548 let mut buffers_to_delete = Vec::new();
3549 let mut renamed_buffers = Vec::new();
3550 for (buffer_id, buffer) in &self.opened_buffers {
3551 if let Some(buffer) = buffer.upgrade(cx) {
3552 buffer.update(cx, |buffer, cx| {
3553 if let Some(old_file) = File::from_dyn(buffer.file()) {
3554 if old_file.worktree != worktree_handle {
3555 return;
3556 }
3557
3558 let new_file = if let Some(entry) = old_file
3559 .entry_id
3560 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3561 {
3562 File {
3563 is_local: true,
3564 entry_id: Some(entry.id),
3565 mtime: entry.mtime,
3566 path: entry.path.clone(),
3567 worktree: worktree_handle.clone(),
3568 }
3569 } else if let Some(entry) =
3570 snapshot.entry_for_path(old_file.path().as_ref())
3571 {
3572 File {
3573 is_local: true,
3574 entry_id: Some(entry.id),
3575 mtime: entry.mtime,
3576 path: entry.path.clone(),
3577 worktree: worktree_handle.clone(),
3578 }
3579 } else {
3580 File {
3581 is_local: true,
3582 entry_id: None,
3583 path: old_file.path().clone(),
3584 mtime: old_file.mtime(),
3585 worktree: worktree_handle.clone(),
3586 }
3587 };
3588
3589 let old_path = old_file.abs_path(cx);
3590 if new_file.abs_path(cx) != old_path {
3591 renamed_buffers.push((cx.handle(), old_path));
3592 }
3593
3594 if let Some(project_id) = self.remote_id() {
3595 self.client
3596 .send(proto::UpdateBufferFile {
3597 project_id,
3598 buffer_id: *buffer_id as u64,
3599 file: Some(new_file.to_proto()),
3600 })
3601 .log_err();
3602 }
3603 buffer.file_updated(Box::new(new_file), cx).detach();
3604 }
3605 });
3606 } else {
3607 buffers_to_delete.push(*buffer_id);
3608 }
3609 }
3610
3611 for buffer_id in buffers_to_delete {
3612 self.opened_buffers.remove(&buffer_id);
3613 }
3614
3615 for (buffer, old_path) in renamed_buffers {
3616 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3617 self.assign_language_to_buffer(&buffer, cx);
3618 self.register_buffer_with_language_server(&buffer, cx);
3619 }
3620 }
3621
3622 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3623 let new_active_entry = entry.and_then(|project_path| {
3624 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3625 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3626 Some(entry.id)
3627 });
3628 if new_active_entry != self.active_entry {
3629 self.active_entry = new_active_entry;
3630 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3631 }
3632 }
3633
3634 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3635 self.language_server_statuses
3636 .values()
3637 .any(|status| status.pending_diagnostic_updates > 0)
3638 }
3639
3640 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3641 let mut summary = DiagnosticSummary::default();
3642 for (_, path_summary) in self.diagnostic_summaries(cx) {
3643 summary.error_count += path_summary.error_count;
3644 summary.warning_count += path_summary.warning_count;
3645 }
3646 summary
3647 }
3648
3649 pub fn diagnostic_summaries<'a>(
3650 &'a self,
3651 cx: &'a AppContext,
3652 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3653 self.worktrees(cx).flat_map(move |worktree| {
3654 let worktree = worktree.read(cx);
3655 let worktree_id = worktree.id();
3656 worktree
3657 .diagnostic_summaries()
3658 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3659 })
3660 }
3661
3662 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3663 if self
3664 .language_server_statuses
3665 .values()
3666 .map(|status| status.pending_diagnostic_updates)
3667 .sum::<isize>()
3668 == 1
3669 {
3670 cx.emit(Event::DiskBasedDiagnosticsStarted);
3671 }
3672 }
3673
3674 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3675 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3676 if self
3677 .language_server_statuses
3678 .values()
3679 .map(|status| status.pending_diagnostic_updates)
3680 .sum::<isize>()
3681 == 0
3682 {
3683 cx.emit(Event::DiskBasedDiagnosticsFinished);
3684 }
3685 }
3686
3687 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3688 self.active_entry
3689 }
3690
3691 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3692 self.worktree_for_id(path.worktree_id, cx)?
3693 .read(cx)
3694 .entry_for_path(&path.path)
3695 .map(|entry| entry.id)
3696 }
3697
3698 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3699 let worktree = self.worktree_for_entry(entry_id, cx)?;
3700 let worktree = worktree.read(cx);
3701 let worktree_id = worktree.id();
3702 let path = worktree.entry_for_id(entry_id)?.path.clone();
3703 Some(ProjectPath { worktree_id, path })
3704 }
3705
3706 // RPC message handlers
3707
3708 async fn handle_unshare_project(
3709 this: ModelHandle<Self>,
3710 _: TypedEnvelope<proto::UnshareProject>,
3711 _: Arc<Client>,
3712 mut cx: AsyncAppContext,
3713 ) -> Result<()> {
3714 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3715 Ok(())
3716 }
3717
3718 async fn handle_add_collaborator(
3719 this: ModelHandle<Self>,
3720 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3721 _: Arc<Client>,
3722 mut cx: AsyncAppContext,
3723 ) -> Result<()> {
3724 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3725 let collaborator = envelope
3726 .payload
3727 .collaborator
3728 .take()
3729 .ok_or_else(|| anyhow!("empty collaborator"))?;
3730
3731 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3732 this.update(&mut cx, |this, cx| {
3733 this.collaborators
3734 .insert(collaborator.peer_id, collaborator);
3735 cx.notify();
3736 });
3737
3738 Ok(())
3739 }
3740
3741 async fn handle_remove_collaborator(
3742 this: ModelHandle<Self>,
3743 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3744 _: Arc<Client>,
3745 mut cx: AsyncAppContext,
3746 ) -> Result<()> {
3747 this.update(&mut cx, |this, cx| {
3748 let peer_id = PeerId(envelope.payload.peer_id);
3749 let replica_id = this
3750 .collaborators
3751 .remove(&peer_id)
3752 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3753 .replica_id;
3754 for (_, buffer) in &this.opened_buffers {
3755 if let Some(buffer) = buffer.upgrade(cx) {
3756 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3757 }
3758 }
3759 cx.emit(Event::CollaboratorLeft(peer_id));
3760 cx.notify();
3761 Ok(())
3762 })
3763 }
3764
3765 async fn handle_register_worktree(
3766 this: ModelHandle<Self>,
3767 envelope: TypedEnvelope<proto::RegisterWorktree>,
3768 client: Arc<Client>,
3769 mut cx: AsyncAppContext,
3770 ) -> Result<()> {
3771 this.update(&mut cx, |this, cx| {
3772 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3773 let replica_id = this.replica_id();
3774 let worktree = proto::Worktree {
3775 id: envelope.payload.worktree_id,
3776 root_name: envelope.payload.root_name,
3777 entries: Default::default(),
3778 diagnostic_summaries: Default::default(),
3779 visible: envelope.payload.visible,
3780 };
3781 let (worktree, load_task) =
3782 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3783 this.add_worktree(&worktree, cx);
3784 load_task.detach();
3785 Ok(())
3786 })
3787 }
3788
3789 async fn handle_unregister_worktree(
3790 this: ModelHandle<Self>,
3791 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3792 _: Arc<Client>,
3793 mut cx: AsyncAppContext,
3794 ) -> Result<()> {
3795 this.update(&mut cx, |this, cx| {
3796 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3797 this.remove_worktree(worktree_id, cx);
3798 Ok(())
3799 })
3800 }
3801
3802 async fn handle_update_worktree(
3803 this: ModelHandle<Self>,
3804 envelope: TypedEnvelope<proto::UpdateWorktree>,
3805 _: Arc<Client>,
3806 mut cx: AsyncAppContext,
3807 ) -> Result<()> {
3808 this.update(&mut cx, |this, cx| {
3809 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3810 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3811 worktree.update(cx, |worktree, _| {
3812 let worktree = worktree.as_remote_mut().unwrap();
3813 worktree.update_from_remote(envelope)
3814 })?;
3815 }
3816 Ok(())
3817 })
3818 }
3819
3820 async fn handle_create_project_entry(
3821 this: ModelHandle<Self>,
3822 envelope: TypedEnvelope<proto::CreateProjectEntry>,
3823 _: Arc<Client>,
3824 mut cx: AsyncAppContext,
3825 ) -> Result<proto::ProjectEntryResponse> {
3826 let entry = this
3827 .update(&mut cx, |this, cx| {
3828 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3829 let worktree = this
3830 .worktree_for_id(worktree_id, cx)
3831 .ok_or_else(|| anyhow!("worktree not found"))?;
3832 worktree.update(cx, |worktree, cx| {
3833 let worktree = worktree.as_local_mut().unwrap();
3834 if envelope.payload.is_directory {
3835 unimplemented!("can't yet create directories");
3836 } else {
3837 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
3838 anyhow::Ok(worktree.write_file(path, Default::default(), cx))
3839 }
3840 })
3841 })?
3842 .await?;
3843 Ok(proto::ProjectEntryResponse {
3844 entry: Some((&entry).into()),
3845 })
3846 }
3847
3848 async fn handle_rename_project_entry(
3849 this: ModelHandle<Self>,
3850 envelope: TypedEnvelope<proto::RenameProjectEntry>,
3851 _: Arc<Client>,
3852 mut cx: AsyncAppContext,
3853 ) -> Result<proto::ProjectEntryResponse> {
3854 let entry = this
3855 .update(&mut cx, |this, cx| {
3856 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
3857 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
3858 this.rename_entry(entry_id, new_path, cx)
3859 .ok_or_else(|| anyhow!("invalid entry"))
3860 })?
3861 .await?;
3862 Ok(proto::ProjectEntryResponse {
3863 entry: Some((&entry).into()),
3864 })
3865 }
3866
3867 async fn handle_update_diagnostic_summary(
3868 this: ModelHandle<Self>,
3869 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3870 _: Arc<Client>,
3871 mut cx: AsyncAppContext,
3872 ) -> Result<()> {
3873 this.update(&mut cx, |this, cx| {
3874 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3875 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3876 if let Some(summary) = envelope.payload.summary {
3877 let project_path = ProjectPath {
3878 worktree_id,
3879 path: Path::new(&summary.path).into(),
3880 };
3881 worktree.update(cx, |worktree, _| {
3882 worktree
3883 .as_remote_mut()
3884 .unwrap()
3885 .update_diagnostic_summary(project_path.path.clone(), &summary);
3886 });
3887 cx.emit(Event::DiagnosticsUpdated(project_path));
3888 }
3889 }
3890 Ok(())
3891 })
3892 }
3893
3894 async fn handle_start_language_server(
3895 this: ModelHandle<Self>,
3896 envelope: TypedEnvelope<proto::StartLanguageServer>,
3897 _: Arc<Client>,
3898 mut cx: AsyncAppContext,
3899 ) -> Result<()> {
3900 let server = envelope
3901 .payload
3902 .server
3903 .ok_or_else(|| anyhow!("invalid server"))?;
3904 this.update(&mut cx, |this, cx| {
3905 this.language_server_statuses.insert(
3906 server.id as usize,
3907 LanguageServerStatus {
3908 name: server.name,
3909 pending_work: Default::default(),
3910 pending_diagnostic_updates: 0,
3911 },
3912 );
3913 cx.notify();
3914 });
3915 Ok(())
3916 }
3917
3918 async fn handle_update_language_server(
3919 this: ModelHandle<Self>,
3920 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3921 _: Arc<Client>,
3922 mut cx: AsyncAppContext,
3923 ) -> Result<()> {
3924 let language_server_id = envelope.payload.language_server_id as usize;
3925 match envelope
3926 .payload
3927 .variant
3928 .ok_or_else(|| anyhow!("invalid variant"))?
3929 {
3930 proto::update_language_server::Variant::WorkStart(payload) => {
3931 this.update(&mut cx, |this, cx| {
3932 this.on_lsp_work_start(language_server_id, payload.token, cx);
3933 })
3934 }
3935 proto::update_language_server::Variant::WorkProgress(payload) => {
3936 this.update(&mut cx, |this, cx| {
3937 this.on_lsp_work_progress(
3938 language_server_id,
3939 payload.token,
3940 LanguageServerProgress {
3941 message: payload.message,
3942 percentage: payload.percentage.map(|p| p as usize),
3943 last_update_at: Instant::now(),
3944 },
3945 cx,
3946 );
3947 })
3948 }
3949 proto::update_language_server::Variant::WorkEnd(payload) => {
3950 this.update(&mut cx, |this, cx| {
3951 this.on_lsp_work_end(language_server_id, payload.token, cx);
3952 })
3953 }
3954 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3955 this.update(&mut cx, |this, cx| {
3956 this.disk_based_diagnostics_started(cx);
3957 })
3958 }
3959 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3960 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3961 }
3962 }
3963
3964 Ok(())
3965 }
3966
3967 async fn handle_update_buffer(
3968 this: ModelHandle<Self>,
3969 envelope: TypedEnvelope<proto::UpdateBuffer>,
3970 _: Arc<Client>,
3971 mut cx: AsyncAppContext,
3972 ) -> Result<()> {
3973 this.update(&mut cx, |this, cx| {
3974 let payload = envelope.payload.clone();
3975 let buffer_id = payload.buffer_id;
3976 let ops = payload
3977 .operations
3978 .into_iter()
3979 .map(|op| language::proto::deserialize_operation(op))
3980 .collect::<Result<Vec<_>, _>>()?;
3981 match this.opened_buffers.entry(buffer_id) {
3982 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3983 OpenBuffer::Strong(buffer) => {
3984 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3985 }
3986 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3987 OpenBuffer::Weak(_) => {}
3988 },
3989 hash_map::Entry::Vacant(e) => {
3990 e.insert(OpenBuffer::Loading(ops));
3991 }
3992 }
3993 Ok(())
3994 })
3995 }
3996
3997 async fn handle_update_buffer_file(
3998 this: ModelHandle<Self>,
3999 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4000 _: Arc<Client>,
4001 mut cx: AsyncAppContext,
4002 ) -> Result<()> {
4003 this.update(&mut cx, |this, cx| {
4004 let payload = envelope.payload.clone();
4005 let buffer_id = payload.buffer_id;
4006 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4007 let worktree = this
4008 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4009 .ok_or_else(|| anyhow!("no such worktree"))?;
4010 let file = File::from_proto(file, worktree.clone(), cx)?;
4011 let buffer = this
4012 .opened_buffers
4013 .get_mut(&buffer_id)
4014 .and_then(|b| b.upgrade(cx))
4015 .ok_or_else(|| anyhow!("no such buffer"))?;
4016 buffer.update(cx, |buffer, cx| {
4017 buffer.file_updated(Box::new(file), cx).detach();
4018 });
4019 Ok(())
4020 })
4021 }
4022
4023 async fn handle_save_buffer(
4024 this: ModelHandle<Self>,
4025 envelope: TypedEnvelope<proto::SaveBuffer>,
4026 _: Arc<Client>,
4027 mut cx: AsyncAppContext,
4028 ) -> Result<proto::BufferSaved> {
4029 let buffer_id = envelope.payload.buffer_id;
4030 let requested_version = deserialize_version(envelope.payload.version);
4031
4032 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4033 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4034 let buffer = this
4035 .opened_buffers
4036 .get(&buffer_id)
4037 .and_then(|buffer| buffer.upgrade(cx))
4038 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4039 Ok::<_, anyhow::Error>((project_id, buffer))
4040 })?;
4041 buffer
4042 .update(&mut cx, |buffer, _| {
4043 buffer.wait_for_version(requested_version)
4044 })
4045 .await;
4046
4047 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4048 Ok(proto::BufferSaved {
4049 project_id,
4050 buffer_id,
4051 version: serialize_version(&saved_version),
4052 mtime: Some(mtime.into()),
4053 })
4054 }
4055
4056 async fn handle_reload_buffers(
4057 this: ModelHandle<Self>,
4058 envelope: TypedEnvelope<proto::ReloadBuffers>,
4059 _: Arc<Client>,
4060 mut cx: AsyncAppContext,
4061 ) -> Result<proto::ReloadBuffersResponse> {
4062 let sender_id = envelope.original_sender_id()?;
4063 let reload = this.update(&mut cx, |this, cx| {
4064 let mut buffers = HashSet::default();
4065 for buffer_id in &envelope.payload.buffer_ids {
4066 buffers.insert(
4067 this.opened_buffers
4068 .get(buffer_id)
4069 .and_then(|buffer| buffer.upgrade(cx))
4070 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4071 );
4072 }
4073 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4074 })?;
4075
4076 let project_transaction = reload.await?;
4077 let project_transaction = this.update(&mut cx, |this, cx| {
4078 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4079 });
4080 Ok(proto::ReloadBuffersResponse {
4081 transaction: Some(project_transaction),
4082 })
4083 }
4084
4085 async fn handle_format_buffers(
4086 this: ModelHandle<Self>,
4087 envelope: TypedEnvelope<proto::FormatBuffers>,
4088 _: Arc<Client>,
4089 mut cx: AsyncAppContext,
4090 ) -> Result<proto::FormatBuffersResponse> {
4091 let sender_id = envelope.original_sender_id()?;
4092 let format = this.update(&mut cx, |this, cx| {
4093 let mut buffers = HashSet::default();
4094 for buffer_id in &envelope.payload.buffer_ids {
4095 buffers.insert(
4096 this.opened_buffers
4097 .get(buffer_id)
4098 .and_then(|buffer| buffer.upgrade(cx))
4099 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4100 );
4101 }
4102 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4103 })?;
4104
4105 let project_transaction = format.await?;
4106 let project_transaction = this.update(&mut cx, |this, cx| {
4107 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4108 });
4109 Ok(proto::FormatBuffersResponse {
4110 transaction: Some(project_transaction),
4111 })
4112 }
4113
4114 async fn handle_get_completions(
4115 this: ModelHandle<Self>,
4116 envelope: TypedEnvelope<proto::GetCompletions>,
4117 _: Arc<Client>,
4118 mut cx: AsyncAppContext,
4119 ) -> Result<proto::GetCompletionsResponse> {
4120 let position = envelope
4121 .payload
4122 .position
4123 .and_then(language::proto::deserialize_anchor)
4124 .ok_or_else(|| anyhow!("invalid position"))?;
4125 let version = deserialize_version(envelope.payload.version);
4126 let buffer = this.read_with(&cx, |this, cx| {
4127 this.opened_buffers
4128 .get(&envelope.payload.buffer_id)
4129 .and_then(|buffer| buffer.upgrade(cx))
4130 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4131 })?;
4132 buffer
4133 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4134 .await;
4135 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4136 let completions = this
4137 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4138 .await?;
4139
4140 Ok(proto::GetCompletionsResponse {
4141 completions: completions
4142 .iter()
4143 .map(language::proto::serialize_completion)
4144 .collect(),
4145 version: serialize_version(&version),
4146 })
4147 }
4148
4149 async fn handle_apply_additional_edits_for_completion(
4150 this: ModelHandle<Self>,
4151 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4152 _: Arc<Client>,
4153 mut cx: AsyncAppContext,
4154 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4155 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4156 let buffer = this
4157 .opened_buffers
4158 .get(&envelope.payload.buffer_id)
4159 .and_then(|buffer| buffer.upgrade(cx))
4160 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4161 let language = buffer.read(cx).language();
4162 let completion = language::proto::deserialize_completion(
4163 envelope
4164 .payload
4165 .completion
4166 .ok_or_else(|| anyhow!("invalid completion"))?,
4167 language,
4168 )?;
4169 Ok::<_, anyhow::Error>(
4170 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4171 )
4172 })?;
4173
4174 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4175 transaction: apply_additional_edits
4176 .await?
4177 .as_ref()
4178 .map(language::proto::serialize_transaction),
4179 })
4180 }
4181
4182 async fn handle_get_code_actions(
4183 this: ModelHandle<Self>,
4184 envelope: TypedEnvelope<proto::GetCodeActions>,
4185 _: Arc<Client>,
4186 mut cx: AsyncAppContext,
4187 ) -> Result<proto::GetCodeActionsResponse> {
4188 let start = envelope
4189 .payload
4190 .start
4191 .and_then(language::proto::deserialize_anchor)
4192 .ok_or_else(|| anyhow!("invalid start"))?;
4193 let end = envelope
4194 .payload
4195 .end
4196 .and_then(language::proto::deserialize_anchor)
4197 .ok_or_else(|| anyhow!("invalid end"))?;
4198 let buffer = this.update(&mut cx, |this, cx| {
4199 this.opened_buffers
4200 .get(&envelope.payload.buffer_id)
4201 .and_then(|buffer| buffer.upgrade(cx))
4202 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4203 })?;
4204 buffer
4205 .update(&mut cx, |buffer, _| {
4206 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4207 })
4208 .await;
4209
4210 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4211 let code_actions = this.update(&mut cx, |this, cx| {
4212 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4213 })?;
4214
4215 Ok(proto::GetCodeActionsResponse {
4216 actions: code_actions
4217 .await?
4218 .iter()
4219 .map(language::proto::serialize_code_action)
4220 .collect(),
4221 version: serialize_version(&version),
4222 })
4223 }
4224
4225 async fn handle_apply_code_action(
4226 this: ModelHandle<Self>,
4227 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4228 _: Arc<Client>,
4229 mut cx: AsyncAppContext,
4230 ) -> Result<proto::ApplyCodeActionResponse> {
4231 let sender_id = envelope.original_sender_id()?;
4232 let action = language::proto::deserialize_code_action(
4233 envelope
4234 .payload
4235 .action
4236 .ok_or_else(|| anyhow!("invalid action"))?,
4237 )?;
4238 let apply_code_action = this.update(&mut cx, |this, cx| {
4239 let buffer = this
4240 .opened_buffers
4241 .get(&envelope.payload.buffer_id)
4242 .and_then(|buffer| buffer.upgrade(cx))
4243 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4244 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4245 })?;
4246
4247 let project_transaction = apply_code_action.await?;
4248 let project_transaction = this.update(&mut cx, |this, cx| {
4249 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4250 });
4251 Ok(proto::ApplyCodeActionResponse {
4252 transaction: Some(project_transaction),
4253 })
4254 }
4255
4256 async fn handle_lsp_command<T: LspCommand>(
4257 this: ModelHandle<Self>,
4258 envelope: TypedEnvelope<T::ProtoRequest>,
4259 _: Arc<Client>,
4260 mut cx: AsyncAppContext,
4261 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4262 where
4263 <T::LspRequest as lsp::request::Request>::Result: Send,
4264 {
4265 let sender_id = envelope.original_sender_id()?;
4266 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4267 let buffer_handle = this.read_with(&cx, |this, _| {
4268 this.opened_buffers
4269 .get(&buffer_id)
4270 .and_then(|buffer| buffer.upgrade(&cx))
4271 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4272 })?;
4273 let request = T::from_proto(
4274 envelope.payload,
4275 this.clone(),
4276 buffer_handle.clone(),
4277 cx.clone(),
4278 )
4279 .await?;
4280 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4281 let response = this
4282 .update(&mut cx, |this, cx| {
4283 this.request_lsp(buffer_handle, request, cx)
4284 })
4285 .await?;
4286 this.update(&mut cx, |this, cx| {
4287 Ok(T::response_to_proto(
4288 response,
4289 this,
4290 sender_id,
4291 &buffer_version,
4292 cx,
4293 ))
4294 })
4295 }
4296
4297 async fn handle_get_project_symbols(
4298 this: ModelHandle<Self>,
4299 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4300 _: Arc<Client>,
4301 mut cx: AsyncAppContext,
4302 ) -> Result<proto::GetProjectSymbolsResponse> {
4303 let symbols = this
4304 .update(&mut cx, |this, cx| {
4305 this.symbols(&envelope.payload.query, cx)
4306 })
4307 .await?;
4308
4309 Ok(proto::GetProjectSymbolsResponse {
4310 symbols: symbols.iter().map(serialize_symbol).collect(),
4311 })
4312 }
4313
4314 async fn handle_search_project(
4315 this: ModelHandle<Self>,
4316 envelope: TypedEnvelope<proto::SearchProject>,
4317 _: Arc<Client>,
4318 mut cx: AsyncAppContext,
4319 ) -> Result<proto::SearchProjectResponse> {
4320 let peer_id = envelope.original_sender_id()?;
4321 let query = SearchQuery::from_proto(envelope.payload)?;
4322 let result = this
4323 .update(&mut cx, |this, cx| this.search(query, cx))
4324 .await?;
4325
4326 this.update(&mut cx, |this, cx| {
4327 let mut locations = Vec::new();
4328 for (buffer, ranges) in result {
4329 for range in ranges {
4330 let start = serialize_anchor(&range.start);
4331 let end = serialize_anchor(&range.end);
4332 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4333 locations.push(proto::Location {
4334 buffer: Some(buffer),
4335 start: Some(start),
4336 end: Some(end),
4337 });
4338 }
4339 }
4340 Ok(proto::SearchProjectResponse { locations })
4341 })
4342 }
4343
4344 async fn handle_open_buffer_for_symbol(
4345 this: ModelHandle<Self>,
4346 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4347 _: Arc<Client>,
4348 mut cx: AsyncAppContext,
4349 ) -> Result<proto::OpenBufferForSymbolResponse> {
4350 let peer_id = envelope.original_sender_id()?;
4351 let symbol = envelope
4352 .payload
4353 .symbol
4354 .ok_or_else(|| anyhow!("invalid symbol"))?;
4355 let symbol = this.read_with(&cx, |this, _| {
4356 let symbol = this.deserialize_symbol(symbol)?;
4357 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4358 if signature == symbol.signature {
4359 Ok(symbol)
4360 } else {
4361 Err(anyhow!("invalid symbol signature"))
4362 }
4363 })?;
4364 let buffer = this
4365 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4366 .await?;
4367
4368 Ok(proto::OpenBufferForSymbolResponse {
4369 buffer: Some(this.update(&mut cx, |this, cx| {
4370 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4371 })),
4372 })
4373 }
4374
4375 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4376 let mut hasher = Sha256::new();
4377 hasher.update(worktree_id.to_proto().to_be_bytes());
4378 hasher.update(path.to_string_lossy().as_bytes());
4379 hasher.update(self.nonce.to_be_bytes());
4380 hasher.finalize().as_slice().try_into().unwrap()
4381 }
4382
4383 async fn handle_open_buffer_by_id(
4384 this: ModelHandle<Self>,
4385 envelope: TypedEnvelope<proto::OpenBufferById>,
4386 _: Arc<Client>,
4387 mut cx: AsyncAppContext,
4388 ) -> Result<proto::OpenBufferResponse> {
4389 let peer_id = envelope.original_sender_id()?;
4390 let buffer = this
4391 .update(&mut cx, |this, cx| {
4392 this.open_buffer_by_id(envelope.payload.id, cx)
4393 })
4394 .await?;
4395 this.update(&mut cx, |this, cx| {
4396 Ok(proto::OpenBufferResponse {
4397 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4398 })
4399 })
4400 }
4401
4402 async fn handle_open_buffer_by_path(
4403 this: ModelHandle<Self>,
4404 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4405 _: Arc<Client>,
4406 mut cx: AsyncAppContext,
4407 ) -> Result<proto::OpenBufferResponse> {
4408 let peer_id = envelope.original_sender_id()?;
4409 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4410 let open_buffer = this.update(&mut cx, |this, cx| {
4411 this.open_buffer(
4412 ProjectPath {
4413 worktree_id,
4414 path: PathBuf::from(envelope.payload.path).into(),
4415 },
4416 cx,
4417 )
4418 });
4419
4420 let buffer = open_buffer.await?;
4421 this.update(&mut cx, |this, cx| {
4422 Ok(proto::OpenBufferResponse {
4423 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4424 })
4425 })
4426 }
4427
4428 fn serialize_project_transaction_for_peer(
4429 &mut self,
4430 project_transaction: ProjectTransaction,
4431 peer_id: PeerId,
4432 cx: &AppContext,
4433 ) -> proto::ProjectTransaction {
4434 let mut serialized_transaction = proto::ProjectTransaction {
4435 buffers: Default::default(),
4436 transactions: Default::default(),
4437 };
4438 for (buffer, transaction) in project_transaction.0 {
4439 serialized_transaction
4440 .buffers
4441 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4442 serialized_transaction
4443 .transactions
4444 .push(language::proto::serialize_transaction(&transaction));
4445 }
4446 serialized_transaction
4447 }
4448
4449 fn deserialize_project_transaction(
4450 &mut self,
4451 message: proto::ProjectTransaction,
4452 push_to_history: bool,
4453 cx: &mut ModelContext<Self>,
4454 ) -> Task<Result<ProjectTransaction>> {
4455 cx.spawn(|this, mut cx| async move {
4456 let mut project_transaction = ProjectTransaction::default();
4457 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4458 let buffer = this
4459 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4460 .await?;
4461 let transaction = language::proto::deserialize_transaction(transaction)?;
4462 project_transaction.0.insert(buffer, transaction);
4463 }
4464
4465 for (buffer, transaction) in &project_transaction.0 {
4466 buffer
4467 .update(&mut cx, |buffer, _| {
4468 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4469 })
4470 .await;
4471
4472 if push_to_history {
4473 buffer.update(&mut cx, |buffer, _| {
4474 buffer.push_transaction(transaction.clone(), Instant::now());
4475 });
4476 }
4477 }
4478
4479 Ok(project_transaction)
4480 })
4481 }
4482
4483 fn serialize_buffer_for_peer(
4484 &mut self,
4485 buffer: &ModelHandle<Buffer>,
4486 peer_id: PeerId,
4487 cx: &AppContext,
4488 ) -> proto::Buffer {
4489 let buffer_id = buffer.read(cx).remote_id();
4490 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4491 if shared_buffers.insert(buffer_id) {
4492 proto::Buffer {
4493 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4494 }
4495 } else {
4496 proto::Buffer {
4497 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4498 }
4499 }
4500 }
4501
4502 fn deserialize_buffer(
4503 &mut self,
4504 buffer: proto::Buffer,
4505 cx: &mut ModelContext<Self>,
4506 ) -> Task<Result<ModelHandle<Buffer>>> {
4507 let replica_id = self.replica_id();
4508
4509 let opened_buffer_tx = self.opened_buffer.0.clone();
4510 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4511 cx.spawn(|this, mut cx| async move {
4512 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4513 proto::buffer::Variant::Id(id) => {
4514 let buffer = loop {
4515 let buffer = this.read_with(&cx, |this, cx| {
4516 this.opened_buffers
4517 .get(&id)
4518 .and_then(|buffer| buffer.upgrade(cx))
4519 });
4520 if let Some(buffer) = buffer {
4521 break buffer;
4522 }
4523 opened_buffer_rx
4524 .next()
4525 .await
4526 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4527 };
4528 Ok(buffer)
4529 }
4530 proto::buffer::Variant::State(mut buffer) => {
4531 let mut buffer_worktree = None;
4532 let mut buffer_file = None;
4533 if let Some(file) = buffer.file.take() {
4534 this.read_with(&cx, |this, cx| {
4535 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4536 let worktree =
4537 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4538 anyhow!("no worktree found for id {}", file.worktree_id)
4539 })?;
4540 buffer_file =
4541 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4542 as Box<dyn language::File>);
4543 buffer_worktree = Some(worktree);
4544 Ok::<_, anyhow::Error>(())
4545 })?;
4546 }
4547
4548 let buffer = cx.add_model(|cx| {
4549 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4550 });
4551
4552 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4553
4554 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4555 Ok(buffer)
4556 }
4557 }
4558 })
4559 }
4560
4561 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4562 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4563 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4564 let start = serialized_symbol
4565 .start
4566 .ok_or_else(|| anyhow!("invalid start"))?;
4567 let end = serialized_symbol
4568 .end
4569 .ok_or_else(|| anyhow!("invalid end"))?;
4570 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4571 let path = PathBuf::from(serialized_symbol.path);
4572 let language = self.languages.select_language(&path);
4573 Ok(Symbol {
4574 source_worktree_id,
4575 worktree_id,
4576 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4577 label: language
4578 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4579 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4580 name: serialized_symbol.name,
4581 path,
4582 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4583 kind,
4584 signature: serialized_symbol
4585 .signature
4586 .try_into()
4587 .map_err(|_| anyhow!("invalid signature"))?,
4588 })
4589 }
4590
4591 async fn handle_buffer_saved(
4592 this: ModelHandle<Self>,
4593 envelope: TypedEnvelope<proto::BufferSaved>,
4594 _: Arc<Client>,
4595 mut cx: AsyncAppContext,
4596 ) -> Result<()> {
4597 let version = deserialize_version(envelope.payload.version);
4598 let mtime = envelope
4599 .payload
4600 .mtime
4601 .ok_or_else(|| anyhow!("missing mtime"))?
4602 .into();
4603
4604 this.update(&mut cx, |this, cx| {
4605 let buffer = this
4606 .opened_buffers
4607 .get(&envelope.payload.buffer_id)
4608 .and_then(|buffer| buffer.upgrade(cx));
4609 if let Some(buffer) = buffer {
4610 buffer.update(cx, |buffer, cx| {
4611 buffer.did_save(version, mtime, None, cx);
4612 });
4613 }
4614 Ok(())
4615 })
4616 }
4617
4618 async fn handle_buffer_reloaded(
4619 this: ModelHandle<Self>,
4620 envelope: TypedEnvelope<proto::BufferReloaded>,
4621 _: Arc<Client>,
4622 mut cx: AsyncAppContext,
4623 ) -> Result<()> {
4624 let payload = envelope.payload.clone();
4625 let version = deserialize_version(payload.version);
4626 let mtime = payload
4627 .mtime
4628 .ok_or_else(|| anyhow!("missing mtime"))?
4629 .into();
4630 this.update(&mut cx, |this, cx| {
4631 let buffer = this
4632 .opened_buffers
4633 .get(&payload.buffer_id)
4634 .and_then(|buffer| buffer.upgrade(cx));
4635 if let Some(buffer) = buffer {
4636 buffer.update(cx, |buffer, cx| {
4637 buffer.did_reload(version, mtime, cx);
4638 });
4639 }
4640 Ok(())
4641 })
4642 }
4643
4644 pub fn match_paths<'a>(
4645 &self,
4646 query: &'a str,
4647 include_ignored: bool,
4648 smart_case: bool,
4649 max_results: usize,
4650 cancel_flag: &'a AtomicBool,
4651 cx: &AppContext,
4652 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4653 let worktrees = self
4654 .worktrees(cx)
4655 .filter(|worktree| worktree.read(cx).is_visible())
4656 .collect::<Vec<_>>();
4657 let include_root_name = worktrees.len() > 1;
4658 let candidate_sets = worktrees
4659 .into_iter()
4660 .map(|worktree| CandidateSet {
4661 snapshot: worktree.read(cx).snapshot(),
4662 include_ignored,
4663 include_root_name,
4664 })
4665 .collect::<Vec<_>>();
4666
4667 let background = cx.background().clone();
4668 async move {
4669 fuzzy::match_paths(
4670 candidate_sets.as_slice(),
4671 query,
4672 smart_case,
4673 max_results,
4674 cancel_flag,
4675 background,
4676 )
4677 .await
4678 }
4679 }
4680
4681 fn edits_from_lsp(
4682 &mut self,
4683 buffer: &ModelHandle<Buffer>,
4684 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4685 version: Option<i32>,
4686 cx: &mut ModelContext<Self>,
4687 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4688 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4689 cx.background().spawn(async move {
4690 let snapshot = snapshot?;
4691 let mut lsp_edits = lsp_edits
4692 .into_iter()
4693 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4694 .peekable();
4695
4696 let mut edits = Vec::new();
4697 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4698 // Combine any LSP edits that are adjacent.
4699 //
4700 // Also, combine LSP edits that are separated from each other by only
4701 // a newline. This is important because for some code actions,
4702 // Rust-analyzer rewrites the entire buffer via a series of edits that
4703 // are separated by unchanged newline characters.
4704 //
4705 // In order for the diffing logic below to work properly, any edits that
4706 // cancel each other out must be combined into one.
4707 while let Some((next_range, next_text)) = lsp_edits.peek() {
4708 if next_range.start > range.end {
4709 if next_range.start.row > range.end.row + 1
4710 || next_range.start.column > 0
4711 || snapshot.clip_point_utf16(
4712 PointUtf16::new(range.end.row, u32::MAX),
4713 Bias::Left,
4714 ) > range.end
4715 {
4716 break;
4717 }
4718 new_text.push('\n');
4719 }
4720 range.end = next_range.end;
4721 new_text.push_str(&next_text);
4722 lsp_edits.next();
4723 }
4724
4725 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4726 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4727 {
4728 return Err(anyhow!("invalid edits received from language server"));
4729 }
4730
4731 // For multiline edits, perform a diff of the old and new text so that
4732 // we can identify the changes more precisely, preserving the locations
4733 // of any anchors positioned in the unchanged regions.
4734 if range.end.row > range.start.row {
4735 let mut offset = range.start.to_offset(&snapshot);
4736 let old_text = snapshot.text_for_range(range).collect::<String>();
4737
4738 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4739 let mut moved_since_edit = true;
4740 for change in diff.iter_all_changes() {
4741 let tag = change.tag();
4742 let value = change.value();
4743 match tag {
4744 ChangeTag::Equal => {
4745 offset += value.len();
4746 moved_since_edit = true;
4747 }
4748 ChangeTag::Delete => {
4749 let start = snapshot.anchor_after(offset);
4750 let end = snapshot.anchor_before(offset + value.len());
4751 if moved_since_edit {
4752 edits.push((start..end, String::new()));
4753 } else {
4754 edits.last_mut().unwrap().0.end = end;
4755 }
4756 offset += value.len();
4757 moved_since_edit = false;
4758 }
4759 ChangeTag::Insert => {
4760 if moved_since_edit {
4761 let anchor = snapshot.anchor_after(offset);
4762 edits.push((anchor.clone()..anchor, value.to_string()));
4763 } else {
4764 edits.last_mut().unwrap().1.push_str(value);
4765 }
4766 moved_since_edit = false;
4767 }
4768 }
4769 }
4770 } else if range.end == range.start {
4771 let anchor = snapshot.anchor_after(range.start);
4772 edits.push((anchor.clone()..anchor, new_text));
4773 } else {
4774 let edit_start = snapshot.anchor_after(range.start);
4775 let edit_end = snapshot.anchor_before(range.end);
4776 edits.push((edit_start..edit_end, new_text));
4777 }
4778 }
4779
4780 Ok(edits)
4781 })
4782 }
4783
4784 fn buffer_snapshot_for_lsp_version(
4785 &mut self,
4786 buffer: &ModelHandle<Buffer>,
4787 version: Option<i32>,
4788 cx: &AppContext,
4789 ) -> Result<TextBufferSnapshot> {
4790 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4791
4792 if let Some(version) = version {
4793 let buffer_id = buffer.read(cx).remote_id();
4794 let snapshots = self
4795 .buffer_snapshots
4796 .get_mut(&buffer_id)
4797 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4798 let mut found_snapshot = None;
4799 snapshots.retain(|(snapshot_version, snapshot)| {
4800 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4801 false
4802 } else {
4803 if *snapshot_version == version {
4804 found_snapshot = Some(snapshot.clone());
4805 }
4806 true
4807 }
4808 });
4809
4810 found_snapshot.ok_or_else(|| {
4811 anyhow!(
4812 "snapshot not found for buffer {} at version {}",
4813 buffer_id,
4814 version
4815 )
4816 })
4817 } else {
4818 Ok((buffer.read(cx)).text_snapshot())
4819 }
4820 }
4821
4822 fn language_server_for_buffer(
4823 &self,
4824 buffer: &Buffer,
4825 cx: &AppContext,
4826 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4827 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4828 let worktree_id = file.worktree_id(cx);
4829 self.language_servers
4830 .get(&(worktree_id, language.lsp_adapter()?.name()))
4831 } else {
4832 None
4833 }
4834 }
4835}
4836
4837impl WorktreeHandle {
4838 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4839 match self {
4840 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4841 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4842 }
4843 }
4844}
4845
4846impl OpenBuffer {
4847 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4848 match self {
4849 OpenBuffer::Strong(handle) => Some(handle.clone()),
4850 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4851 OpenBuffer::Loading(_) => None,
4852 }
4853 }
4854}
4855
4856struct CandidateSet {
4857 snapshot: Snapshot,
4858 include_ignored: bool,
4859 include_root_name: bool,
4860}
4861
4862impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4863 type Candidates = CandidateSetIter<'a>;
4864
4865 fn id(&self) -> usize {
4866 self.snapshot.id().to_usize()
4867 }
4868
4869 fn len(&self) -> usize {
4870 if self.include_ignored {
4871 self.snapshot.file_count()
4872 } else {
4873 self.snapshot.visible_file_count()
4874 }
4875 }
4876
4877 fn prefix(&self) -> Arc<str> {
4878 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4879 self.snapshot.root_name().into()
4880 } else if self.include_root_name {
4881 format!("{}/", self.snapshot.root_name()).into()
4882 } else {
4883 "".into()
4884 }
4885 }
4886
4887 fn candidates(&'a self, start: usize) -> Self::Candidates {
4888 CandidateSetIter {
4889 traversal: self.snapshot.files(self.include_ignored, start),
4890 }
4891 }
4892}
4893
4894struct CandidateSetIter<'a> {
4895 traversal: Traversal<'a>,
4896}
4897
4898impl<'a> Iterator for CandidateSetIter<'a> {
4899 type Item = PathMatchCandidate<'a>;
4900
4901 fn next(&mut self) -> Option<Self::Item> {
4902 self.traversal.next().map(|entry| {
4903 if let EntryKind::File(char_bag) = entry.kind {
4904 PathMatchCandidate {
4905 path: &entry.path,
4906 char_bag,
4907 }
4908 } else {
4909 unreachable!()
4910 }
4911 })
4912 }
4913}
4914
4915impl Entity for Project {
4916 type Event = Event;
4917
4918 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4919 match &self.client_state {
4920 ProjectClientState::Local { remote_id_rx, .. } => {
4921 if let Some(project_id) = *remote_id_rx.borrow() {
4922 self.client
4923 .send(proto::UnregisterProject { project_id })
4924 .log_err();
4925 }
4926 }
4927 ProjectClientState::Remote { remote_id, .. } => {
4928 self.client
4929 .send(proto::LeaveProject {
4930 project_id: *remote_id,
4931 })
4932 .log_err();
4933 }
4934 }
4935 }
4936
4937 fn app_will_quit(
4938 &mut self,
4939 _: &mut MutableAppContext,
4940 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4941 let shutdown_futures = self
4942 .language_servers
4943 .drain()
4944 .filter_map(|(_, (_, server))| server.shutdown())
4945 .collect::<Vec<_>>();
4946 Some(
4947 async move {
4948 futures::future::join_all(shutdown_futures).await;
4949 }
4950 .boxed(),
4951 )
4952 }
4953}
4954
4955impl Collaborator {
4956 fn from_proto(
4957 message: proto::Collaborator,
4958 user_store: &ModelHandle<UserStore>,
4959 cx: &mut AsyncAppContext,
4960 ) -> impl Future<Output = Result<Self>> {
4961 let user = user_store.update(cx, |user_store, cx| {
4962 user_store.fetch_user(message.user_id, cx)
4963 });
4964
4965 async move {
4966 Ok(Self {
4967 peer_id: PeerId(message.peer_id),
4968 user: user.await?,
4969 replica_id: message.replica_id as ReplicaId,
4970 })
4971 }
4972 }
4973}
4974
4975impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4976 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4977 Self {
4978 worktree_id,
4979 path: path.as_ref().into(),
4980 }
4981 }
4982}
4983
4984impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4985 fn from(options: lsp::CreateFileOptions) -> Self {
4986 Self {
4987 overwrite: options.overwrite.unwrap_or(false),
4988 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4989 }
4990 }
4991}
4992
4993impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4994 fn from(options: lsp::RenameFileOptions) -> Self {
4995 Self {
4996 overwrite: options.overwrite.unwrap_or(false),
4997 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4998 }
4999 }
5000}
5001
5002impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5003 fn from(options: lsp::DeleteFileOptions) -> Self {
5004 Self {
5005 recursive: options.recursive.unwrap_or(false),
5006 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5007 }
5008 }
5009}
5010
5011fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5012 proto::Symbol {
5013 source_worktree_id: symbol.source_worktree_id.to_proto(),
5014 worktree_id: symbol.worktree_id.to_proto(),
5015 language_server_name: symbol.language_server_name.0.to_string(),
5016 name: symbol.name.clone(),
5017 kind: unsafe { mem::transmute(symbol.kind) },
5018 path: symbol.path.to_string_lossy().to_string(),
5019 start: Some(proto::Point {
5020 row: symbol.range.start.row,
5021 column: symbol.range.start.column,
5022 }),
5023 end: Some(proto::Point {
5024 row: symbol.range.end.row,
5025 column: symbol.range.end.column,
5026 }),
5027 signature: symbol.signature.to_vec(),
5028 }
5029}
5030
5031fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5032 let mut path_components = path.components();
5033 let mut base_components = base.components();
5034 let mut components: Vec<Component> = Vec::new();
5035 loop {
5036 match (path_components.next(), base_components.next()) {
5037 (None, None) => break,
5038 (Some(a), None) => {
5039 components.push(a);
5040 components.extend(path_components.by_ref());
5041 break;
5042 }
5043 (None, _) => components.push(Component::ParentDir),
5044 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5045 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5046 (Some(a), Some(_)) => {
5047 components.push(Component::ParentDir);
5048 for _ in base_components {
5049 components.push(Component::ParentDir);
5050 }
5051 components.push(a);
5052 components.extend(path_components.by_ref());
5053 break;
5054 }
5055 }
5056 }
5057 components.iter().map(|c| c.as_os_str()).collect()
5058}
5059
5060impl Item for Buffer {
5061 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5062 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5063 }
5064}
5065
5066#[cfg(test)]
5067mod tests {
5068 use crate::worktree::WorktreeHandle;
5069
5070 use super::{Event, *};
5071 use fs::RealFs;
5072 use futures::{future, StreamExt};
5073 use gpui::test::subscribe;
5074 use language::{
5075 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5076 OffsetRangeExt, Point, ToPoint,
5077 };
5078 use lsp::Url;
5079 use serde_json::json;
5080 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5081 use unindent::Unindent as _;
5082 use util::{assert_set_eq, test::temp_tree};
5083
5084 #[gpui::test]
5085 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5086 let dir = temp_tree(json!({
5087 "root": {
5088 "apple": "",
5089 "banana": {
5090 "carrot": {
5091 "date": "",
5092 "endive": "",
5093 }
5094 },
5095 "fennel": {
5096 "grape": "",
5097 }
5098 }
5099 }));
5100
5101 let root_link_path = dir.path().join("root_link");
5102 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5103 unix::fs::symlink(
5104 &dir.path().join("root/fennel"),
5105 &dir.path().join("root/finnochio"),
5106 )
5107 .unwrap();
5108
5109 let project = Project::test(Arc::new(RealFs), [root_link_path], cx).await;
5110
5111 project.read_with(cx, |project, cx| {
5112 let tree = project.worktrees(cx).next().unwrap().read(cx);
5113 assert_eq!(tree.file_count(), 5);
5114 assert_eq!(
5115 tree.inode_for_path("fennel/grape"),
5116 tree.inode_for_path("finnochio/grape")
5117 );
5118 });
5119
5120 let cancel_flag = Default::default();
5121 let results = project
5122 .read_with(cx, |project, cx| {
5123 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5124 })
5125 .await;
5126 assert_eq!(
5127 results
5128 .into_iter()
5129 .map(|result| result.path)
5130 .collect::<Vec<Arc<Path>>>(),
5131 vec![
5132 PathBuf::from("banana/carrot/date").into(),
5133 PathBuf::from("banana/carrot/endive").into(),
5134 ]
5135 );
5136 }
5137
5138 #[gpui::test]
5139 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5140 cx.foreground().forbid_parking();
5141
5142 let mut rust_language = Language::new(
5143 LanguageConfig {
5144 name: "Rust".into(),
5145 path_suffixes: vec!["rs".to_string()],
5146 ..Default::default()
5147 },
5148 Some(tree_sitter_rust::language()),
5149 );
5150 let mut json_language = Language::new(
5151 LanguageConfig {
5152 name: "JSON".into(),
5153 path_suffixes: vec!["json".to_string()],
5154 ..Default::default()
5155 },
5156 None,
5157 );
5158 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5159 name: "the-rust-language-server",
5160 capabilities: lsp::ServerCapabilities {
5161 completion_provider: Some(lsp::CompletionOptions {
5162 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5163 ..Default::default()
5164 }),
5165 ..Default::default()
5166 },
5167 ..Default::default()
5168 });
5169 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5170 name: "the-json-language-server",
5171 capabilities: lsp::ServerCapabilities {
5172 completion_provider: Some(lsp::CompletionOptions {
5173 trigger_characters: Some(vec![":".to_string()]),
5174 ..Default::default()
5175 }),
5176 ..Default::default()
5177 },
5178 ..Default::default()
5179 });
5180
5181 let fs = FakeFs::new(cx.background());
5182 fs.insert_tree(
5183 "/the-root",
5184 json!({
5185 "test.rs": "const A: i32 = 1;",
5186 "test2.rs": "",
5187 "Cargo.toml": "a = 1",
5188 "package.json": "{\"a\": 1}",
5189 }),
5190 )
5191 .await;
5192
5193 let project = Project::test(fs.clone(), ["/the-root"], cx).await;
5194 project.update(cx, |project, _| {
5195 project.languages.add(Arc::new(rust_language));
5196 project.languages.add(Arc::new(json_language));
5197 });
5198
5199 // Open a buffer without an associated language server.
5200 let toml_buffer = project
5201 .update(cx, |project, cx| {
5202 project.open_local_buffer("/the-root/Cargo.toml", cx)
5203 })
5204 .await
5205 .unwrap();
5206
5207 // Open a buffer with an associated language server.
5208 let rust_buffer = project
5209 .update(cx, |project, cx| {
5210 project.open_local_buffer("/the-root/test.rs", cx)
5211 })
5212 .await
5213 .unwrap();
5214
5215 // A server is started up, and it is notified about Rust files.
5216 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5217 assert_eq!(
5218 fake_rust_server
5219 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5220 .await
5221 .text_document,
5222 lsp::TextDocumentItem {
5223 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5224 version: 0,
5225 text: "const A: i32 = 1;".to_string(),
5226 language_id: Default::default()
5227 }
5228 );
5229
5230 // The buffer is configured based on the language server's capabilities.
5231 rust_buffer.read_with(cx, |buffer, _| {
5232 assert_eq!(
5233 buffer.completion_triggers(),
5234 &[".".to_string(), "::".to_string()]
5235 );
5236 });
5237 toml_buffer.read_with(cx, |buffer, _| {
5238 assert!(buffer.completion_triggers().is_empty());
5239 });
5240
5241 // Edit a buffer. The changes are reported to the language server.
5242 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5243 assert_eq!(
5244 fake_rust_server
5245 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5246 .await
5247 .text_document,
5248 lsp::VersionedTextDocumentIdentifier::new(
5249 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5250 1
5251 )
5252 );
5253
5254 // Open a third buffer with a different associated language server.
5255 let json_buffer = project
5256 .update(cx, |project, cx| {
5257 project.open_local_buffer("/the-root/package.json", cx)
5258 })
5259 .await
5260 .unwrap();
5261
5262 // A json language server is started up and is only notified about the json buffer.
5263 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5264 assert_eq!(
5265 fake_json_server
5266 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5267 .await
5268 .text_document,
5269 lsp::TextDocumentItem {
5270 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5271 version: 0,
5272 text: "{\"a\": 1}".to_string(),
5273 language_id: Default::default()
5274 }
5275 );
5276
5277 // This buffer is configured based on the second language server's
5278 // capabilities.
5279 json_buffer.read_with(cx, |buffer, _| {
5280 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5281 });
5282
5283 // When opening another buffer whose language server is already running,
5284 // it is also configured based on the existing language server's capabilities.
5285 let rust_buffer2 = project
5286 .update(cx, |project, cx| {
5287 project.open_local_buffer("/the-root/test2.rs", cx)
5288 })
5289 .await
5290 .unwrap();
5291 rust_buffer2.read_with(cx, |buffer, _| {
5292 assert_eq!(
5293 buffer.completion_triggers(),
5294 &[".".to_string(), "::".to_string()]
5295 );
5296 });
5297
5298 // Changes are reported only to servers matching the buffer's language.
5299 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5300 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5301 assert_eq!(
5302 fake_rust_server
5303 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5304 .await
5305 .text_document,
5306 lsp::VersionedTextDocumentIdentifier::new(
5307 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5308 1
5309 )
5310 );
5311
5312 // Save notifications are reported to all servers.
5313 toml_buffer
5314 .update(cx, |buffer, cx| buffer.save(cx))
5315 .await
5316 .unwrap();
5317 assert_eq!(
5318 fake_rust_server
5319 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5320 .await
5321 .text_document,
5322 lsp::TextDocumentIdentifier::new(
5323 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5324 )
5325 );
5326 assert_eq!(
5327 fake_json_server
5328 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5329 .await
5330 .text_document,
5331 lsp::TextDocumentIdentifier::new(
5332 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5333 )
5334 );
5335
5336 // Renames are reported only to servers matching the buffer's language.
5337 fs.rename(
5338 Path::new("/the-root/test2.rs"),
5339 Path::new("/the-root/test3.rs"),
5340 Default::default(),
5341 )
5342 .await
5343 .unwrap();
5344 assert_eq!(
5345 fake_rust_server
5346 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5347 .await
5348 .text_document,
5349 lsp::TextDocumentIdentifier::new(
5350 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5351 ),
5352 );
5353 assert_eq!(
5354 fake_rust_server
5355 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5356 .await
5357 .text_document,
5358 lsp::TextDocumentItem {
5359 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5360 version: 0,
5361 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5362 language_id: Default::default()
5363 },
5364 );
5365
5366 rust_buffer2.update(cx, |buffer, cx| {
5367 buffer.update_diagnostics(
5368 DiagnosticSet::from_sorted_entries(
5369 vec![DiagnosticEntry {
5370 diagnostic: Default::default(),
5371 range: Anchor::MIN..Anchor::MAX,
5372 }],
5373 &buffer.snapshot(),
5374 ),
5375 cx,
5376 );
5377 assert_eq!(
5378 buffer
5379 .snapshot()
5380 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5381 .count(),
5382 1
5383 );
5384 });
5385
5386 // When the rename changes the extension of the file, the buffer gets closed on the old
5387 // language server and gets opened on the new one.
5388 fs.rename(
5389 Path::new("/the-root/test3.rs"),
5390 Path::new("/the-root/test3.json"),
5391 Default::default(),
5392 )
5393 .await
5394 .unwrap();
5395 assert_eq!(
5396 fake_rust_server
5397 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5398 .await
5399 .text_document,
5400 lsp::TextDocumentIdentifier::new(
5401 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5402 ),
5403 );
5404 assert_eq!(
5405 fake_json_server
5406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5407 .await
5408 .text_document,
5409 lsp::TextDocumentItem {
5410 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5411 version: 0,
5412 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5413 language_id: Default::default()
5414 },
5415 );
5416
5417 // We clear the diagnostics, since the language has changed.
5418 rust_buffer2.read_with(cx, |buffer, _| {
5419 assert_eq!(
5420 buffer
5421 .snapshot()
5422 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5423 .count(),
5424 0
5425 );
5426 });
5427
5428 // The renamed file's version resets after changing language server.
5429 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5430 assert_eq!(
5431 fake_json_server
5432 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5433 .await
5434 .text_document,
5435 lsp::VersionedTextDocumentIdentifier::new(
5436 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5437 1
5438 )
5439 );
5440
5441 // Restart language servers
5442 project.update(cx, |project, cx| {
5443 project.restart_language_servers_for_buffers(
5444 vec![rust_buffer.clone(), json_buffer.clone()],
5445 cx,
5446 );
5447 });
5448
5449 let mut rust_shutdown_requests = fake_rust_server
5450 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5451 let mut json_shutdown_requests = fake_json_server
5452 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5453 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5454
5455 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5456 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5457
5458 // Ensure rust document is reopened in new rust language server
5459 assert_eq!(
5460 fake_rust_server
5461 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5462 .await
5463 .text_document,
5464 lsp::TextDocumentItem {
5465 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5466 version: 1,
5467 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5468 language_id: Default::default()
5469 }
5470 );
5471
5472 // Ensure json documents are reopened in new json language server
5473 assert_set_eq!(
5474 [
5475 fake_json_server
5476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5477 .await
5478 .text_document,
5479 fake_json_server
5480 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5481 .await
5482 .text_document,
5483 ],
5484 [
5485 lsp::TextDocumentItem {
5486 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5487 version: 0,
5488 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5489 language_id: Default::default()
5490 },
5491 lsp::TextDocumentItem {
5492 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5493 version: 1,
5494 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5495 language_id: Default::default()
5496 }
5497 ]
5498 );
5499
5500 // Close notifications are reported only to servers matching the buffer's language.
5501 cx.update(|_| drop(json_buffer));
5502 let close_message = lsp::DidCloseTextDocumentParams {
5503 text_document: lsp::TextDocumentIdentifier::new(
5504 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5505 ),
5506 };
5507 assert_eq!(
5508 fake_json_server
5509 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5510 .await,
5511 close_message,
5512 );
5513 }
5514
5515 #[gpui::test]
5516 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5517 cx.foreground().forbid_parking();
5518
5519 let fs = FakeFs::new(cx.background());
5520 fs.insert_tree(
5521 "/dir",
5522 json!({
5523 "a.rs": "let a = 1;",
5524 "b.rs": "let b = 2;"
5525 }),
5526 )
5527 .await;
5528
5529 let project = Project::test(fs, ["/dir/a.rs", "/dir/b.rs"], cx).await;
5530
5531 let buffer_a = project
5532 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5533 .await
5534 .unwrap();
5535 let buffer_b = project
5536 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5537 .await
5538 .unwrap();
5539
5540 project.update(cx, |project, cx| {
5541 project
5542 .update_diagnostics(
5543 lsp::PublishDiagnosticsParams {
5544 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5545 version: None,
5546 diagnostics: vec![lsp::Diagnostic {
5547 range: lsp::Range::new(
5548 lsp::Position::new(0, 4),
5549 lsp::Position::new(0, 5),
5550 ),
5551 severity: Some(lsp::DiagnosticSeverity::ERROR),
5552 message: "error 1".to_string(),
5553 ..Default::default()
5554 }],
5555 },
5556 &[],
5557 cx,
5558 )
5559 .unwrap();
5560 project
5561 .update_diagnostics(
5562 lsp::PublishDiagnosticsParams {
5563 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5564 version: None,
5565 diagnostics: vec![lsp::Diagnostic {
5566 range: lsp::Range::new(
5567 lsp::Position::new(0, 4),
5568 lsp::Position::new(0, 5),
5569 ),
5570 severity: Some(lsp::DiagnosticSeverity::WARNING),
5571 message: "error 2".to_string(),
5572 ..Default::default()
5573 }],
5574 },
5575 &[],
5576 cx,
5577 )
5578 .unwrap();
5579 });
5580
5581 buffer_a.read_with(cx, |buffer, _| {
5582 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5583 assert_eq!(
5584 chunks
5585 .iter()
5586 .map(|(s, d)| (s.as_str(), *d))
5587 .collect::<Vec<_>>(),
5588 &[
5589 ("let ", None),
5590 ("a", Some(DiagnosticSeverity::ERROR)),
5591 (" = 1;", None),
5592 ]
5593 );
5594 });
5595 buffer_b.read_with(cx, |buffer, _| {
5596 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5597 assert_eq!(
5598 chunks
5599 .iter()
5600 .map(|(s, d)| (s.as_str(), *d))
5601 .collect::<Vec<_>>(),
5602 &[
5603 ("let ", None),
5604 ("b", Some(DiagnosticSeverity::WARNING)),
5605 (" = 2;", None),
5606 ]
5607 );
5608 });
5609 }
5610
5611 #[gpui::test]
5612 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5613 cx.foreground().forbid_parking();
5614
5615 let progress_token = "the-progress-token";
5616 let mut language = Language::new(
5617 LanguageConfig {
5618 name: "Rust".into(),
5619 path_suffixes: vec!["rs".to_string()],
5620 ..Default::default()
5621 },
5622 Some(tree_sitter_rust::language()),
5623 );
5624 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5625 disk_based_diagnostics_progress_token: Some(progress_token),
5626 disk_based_diagnostics_sources: &["disk"],
5627 ..Default::default()
5628 });
5629
5630 let fs = FakeFs::new(cx.background());
5631 fs.insert_tree(
5632 "/dir",
5633 json!({
5634 "a.rs": "fn a() { A }",
5635 "b.rs": "const y: i32 = 1",
5636 }),
5637 )
5638 .await;
5639
5640 let project = Project::test(fs, ["/dir"], cx).await;
5641 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5642 let worktree_id =
5643 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5644
5645 // Cause worktree to start the fake language server
5646 let _buffer = project
5647 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5648 .await
5649 .unwrap();
5650
5651 let mut events = subscribe(&project, cx);
5652
5653 let mut fake_server = fake_servers.next().await.unwrap();
5654 fake_server.start_progress(progress_token).await;
5655 assert_eq!(
5656 events.next().await.unwrap(),
5657 Event::DiskBasedDiagnosticsStarted
5658 );
5659
5660 fake_server.start_progress(progress_token).await;
5661 fake_server.end_progress(progress_token).await;
5662 fake_server.start_progress(progress_token).await;
5663
5664 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5665 lsp::PublishDiagnosticsParams {
5666 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5667 version: None,
5668 diagnostics: vec![lsp::Diagnostic {
5669 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5670 severity: Some(lsp::DiagnosticSeverity::ERROR),
5671 message: "undefined variable 'A'".to_string(),
5672 ..Default::default()
5673 }],
5674 },
5675 );
5676 assert_eq!(
5677 events.next().await.unwrap(),
5678 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5679 );
5680
5681 fake_server.end_progress(progress_token).await;
5682 fake_server.end_progress(progress_token).await;
5683 assert_eq!(
5684 events.next().await.unwrap(),
5685 Event::DiskBasedDiagnosticsUpdated
5686 );
5687 assert_eq!(
5688 events.next().await.unwrap(),
5689 Event::DiskBasedDiagnosticsFinished
5690 );
5691
5692 let buffer = project
5693 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5694 .await
5695 .unwrap();
5696
5697 buffer.read_with(cx, |buffer, _| {
5698 let snapshot = buffer.snapshot();
5699 let diagnostics = snapshot
5700 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5701 .collect::<Vec<_>>();
5702 assert_eq!(
5703 diagnostics,
5704 &[DiagnosticEntry {
5705 range: Point::new(0, 9)..Point::new(0, 10),
5706 diagnostic: Diagnostic {
5707 severity: lsp::DiagnosticSeverity::ERROR,
5708 message: "undefined variable 'A'".to_string(),
5709 group_id: 0,
5710 is_primary: true,
5711 ..Default::default()
5712 }
5713 }]
5714 )
5715 });
5716
5717 // Ensure publishing empty diagnostics twice only results in one update event.
5718 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5719 lsp::PublishDiagnosticsParams {
5720 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5721 version: None,
5722 diagnostics: Default::default(),
5723 },
5724 );
5725 assert_eq!(
5726 events.next().await.unwrap(),
5727 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5728 );
5729
5730 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5731 lsp::PublishDiagnosticsParams {
5732 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5733 version: None,
5734 diagnostics: Default::default(),
5735 },
5736 );
5737 cx.foreground().run_until_parked();
5738 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5739 }
5740
5741 #[gpui::test]
5742 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5743 cx.foreground().forbid_parking();
5744
5745 let progress_token = "the-progress-token";
5746 let mut language = Language::new(
5747 LanguageConfig {
5748 path_suffixes: vec!["rs".to_string()],
5749 ..Default::default()
5750 },
5751 None,
5752 );
5753 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5754 disk_based_diagnostics_sources: &["disk"],
5755 disk_based_diagnostics_progress_token: Some(progress_token),
5756 ..Default::default()
5757 });
5758
5759 let fs = FakeFs::new(cx.background());
5760 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5761
5762 let project = Project::test(fs, ["/dir"], cx).await;
5763 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5764
5765 let buffer = project
5766 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5767 .await
5768 .unwrap();
5769
5770 // Simulate diagnostics starting to update.
5771 let mut fake_server = fake_servers.next().await.unwrap();
5772 fake_server.start_progress(progress_token).await;
5773
5774 // Restart the server before the diagnostics finish updating.
5775 project.update(cx, |project, cx| {
5776 project.restart_language_servers_for_buffers([buffer], cx);
5777 });
5778 let mut events = subscribe(&project, cx);
5779
5780 // Simulate the newly started server sending more diagnostics.
5781 let mut fake_server = fake_servers.next().await.unwrap();
5782 fake_server.start_progress(progress_token).await;
5783 assert_eq!(
5784 events.next().await.unwrap(),
5785 Event::DiskBasedDiagnosticsStarted
5786 );
5787
5788 // All diagnostics are considered done, despite the old server's diagnostic
5789 // task never completing.
5790 fake_server.end_progress(progress_token).await;
5791 assert_eq!(
5792 events.next().await.unwrap(),
5793 Event::DiskBasedDiagnosticsUpdated
5794 );
5795 assert_eq!(
5796 events.next().await.unwrap(),
5797 Event::DiskBasedDiagnosticsFinished
5798 );
5799 project.read_with(cx, |project, _| {
5800 assert!(!project.is_running_disk_based_diagnostics());
5801 });
5802 }
5803
5804 #[gpui::test]
5805 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5806 cx.foreground().forbid_parking();
5807
5808 let mut language = Language::new(
5809 LanguageConfig {
5810 name: "Rust".into(),
5811 path_suffixes: vec!["rs".to_string()],
5812 ..Default::default()
5813 },
5814 Some(tree_sitter_rust::language()),
5815 );
5816 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5817 disk_based_diagnostics_sources: &["disk"],
5818 ..Default::default()
5819 });
5820
5821 let text = "
5822 fn a() { A }
5823 fn b() { BB }
5824 fn c() { CCC }
5825 "
5826 .unindent();
5827
5828 let fs = FakeFs::new(cx.background());
5829 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5830
5831 let project = Project::test(fs, ["/dir"], cx).await;
5832 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5833
5834 let buffer = project
5835 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5836 .await
5837 .unwrap();
5838
5839 let mut fake_server = fake_servers.next().await.unwrap();
5840 let open_notification = fake_server
5841 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5842 .await;
5843
5844 // Edit the buffer, moving the content down
5845 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
5846 let change_notification_1 = fake_server
5847 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5848 .await;
5849 assert!(
5850 change_notification_1.text_document.version > open_notification.text_document.version
5851 );
5852
5853 // Report some diagnostics for the initial version of the buffer
5854 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5855 lsp::PublishDiagnosticsParams {
5856 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5857 version: Some(open_notification.text_document.version),
5858 diagnostics: vec![
5859 lsp::Diagnostic {
5860 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5861 severity: Some(DiagnosticSeverity::ERROR),
5862 message: "undefined variable 'A'".to_string(),
5863 source: Some("disk".to_string()),
5864 ..Default::default()
5865 },
5866 lsp::Diagnostic {
5867 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5868 severity: Some(DiagnosticSeverity::ERROR),
5869 message: "undefined variable 'BB'".to_string(),
5870 source: Some("disk".to_string()),
5871 ..Default::default()
5872 },
5873 lsp::Diagnostic {
5874 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5875 severity: Some(DiagnosticSeverity::ERROR),
5876 source: Some("disk".to_string()),
5877 message: "undefined variable 'CCC'".to_string(),
5878 ..Default::default()
5879 },
5880 ],
5881 },
5882 );
5883
5884 // The diagnostics have moved down since they were created.
5885 buffer.next_notification(cx).await;
5886 buffer.read_with(cx, |buffer, _| {
5887 assert_eq!(
5888 buffer
5889 .snapshot()
5890 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5891 .collect::<Vec<_>>(),
5892 &[
5893 DiagnosticEntry {
5894 range: Point::new(3, 9)..Point::new(3, 11),
5895 diagnostic: Diagnostic {
5896 severity: DiagnosticSeverity::ERROR,
5897 message: "undefined variable 'BB'".to_string(),
5898 is_disk_based: true,
5899 group_id: 1,
5900 is_primary: true,
5901 ..Default::default()
5902 },
5903 },
5904 DiagnosticEntry {
5905 range: Point::new(4, 9)..Point::new(4, 12),
5906 diagnostic: Diagnostic {
5907 severity: DiagnosticSeverity::ERROR,
5908 message: "undefined variable 'CCC'".to_string(),
5909 is_disk_based: true,
5910 group_id: 2,
5911 is_primary: true,
5912 ..Default::default()
5913 }
5914 }
5915 ]
5916 );
5917 assert_eq!(
5918 chunks_with_diagnostics(buffer, 0..buffer.len()),
5919 [
5920 ("\n\nfn a() { ".to_string(), None),
5921 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5922 (" }\nfn b() { ".to_string(), None),
5923 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5924 (" }\nfn c() { ".to_string(), None),
5925 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5926 (" }\n".to_string(), None),
5927 ]
5928 );
5929 assert_eq!(
5930 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5931 [
5932 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5933 (" }\nfn c() { ".to_string(), None),
5934 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5935 ]
5936 );
5937 });
5938
5939 // Ensure overlapping diagnostics are highlighted correctly.
5940 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5941 lsp::PublishDiagnosticsParams {
5942 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5943 version: Some(open_notification.text_document.version),
5944 diagnostics: vec![
5945 lsp::Diagnostic {
5946 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5947 severity: Some(DiagnosticSeverity::ERROR),
5948 message: "undefined variable 'A'".to_string(),
5949 source: Some("disk".to_string()),
5950 ..Default::default()
5951 },
5952 lsp::Diagnostic {
5953 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5954 severity: Some(DiagnosticSeverity::WARNING),
5955 message: "unreachable statement".to_string(),
5956 source: Some("disk".to_string()),
5957 ..Default::default()
5958 },
5959 ],
5960 },
5961 );
5962
5963 buffer.next_notification(cx).await;
5964 buffer.read_with(cx, |buffer, _| {
5965 assert_eq!(
5966 buffer
5967 .snapshot()
5968 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5969 .collect::<Vec<_>>(),
5970 &[
5971 DiagnosticEntry {
5972 range: Point::new(2, 9)..Point::new(2, 12),
5973 diagnostic: Diagnostic {
5974 severity: DiagnosticSeverity::WARNING,
5975 message: "unreachable statement".to_string(),
5976 is_disk_based: true,
5977 group_id: 1,
5978 is_primary: true,
5979 ..Default::default()
5980 }
5981 },
5982 DiagnosticEntry {
5983 range: Point::new(2, 9)..Point::new(2, 10),
5984 diagnostic: Diagnostic {
5985 severity: DiagnosticSeverity::ERROR,
5986 message: "undefined variable 'A'".to_string(),
5987 is_disk_based: true,
5988 group_id: 0,
5989 is_primary: true,
5990 ..Default::default()
5991 },
5992 }
5993 ]
5994 );
5995 assert_eq!(
5996 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5997 [
5998 ("fn a() { ".to_string(), None),
5999 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6000 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6001 ("\n".to_string(), None),
6002 ]
6003 );
6004 assert_eq!(
6005 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6006 [
6007 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6008 ("\n".to_string(), None),
6009 ]
6010 );
6011 });
6012
6013 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6014 // changes since the last save.
6015 buffer.update(cx, |buffer, cx| {
6016 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6017 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6018 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6019 });
6020 let change_notification_2 = fake_server
6021 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6022 .await;
6023 assert!(
6024 change_notification_2.text_document.version
6025 > change_notification_1.text_document.version
6026 );
6027
6028 // Handle out-of-order diagnostics
6029 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6030 lsp::PublishDiagnosticsParams {
6031 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6032 version: Some(change_notification_2.text_document.version),
6033 diagnostics: vec![
6034 lsp::Diagnostic {
6035 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6036 severity: Some(DiagnosticSeverity::ERROR),
6037 message: "undefined variable 'BB'".to_string(),
6038 source: Some("disk".to_string()),
6039 ..Default::default()
6040 },
6041 lsp::Diagnostic {
6042 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6043 severity: Some(DiagnosticSeverity::WARNING),
6044 message: "undefined variable 'A'".to_string(),
6045 source: Some("disk".to_string()),
6046 ..Default::default()
6047 },
6048 ],
6049 },
6050 );
6051
6052 buffer.next_notification(cx).await;
6053 buffer.read_with(cx, |buffer, _| {
6054 assert_eq!(
6055 buffer
6056 .snapshot()
6057 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6058 .collect::<Vec<_>>(),
6059 &[
6060 DiagnosticEntry {
6061 range: Point::new(2, 21)..Point::new(2, 22),
6062 diagnostic: Diagnostic {
6063 severity: DiagnosticSeverity::WARNING,
6064 message: "undefined variable 'A'".to_string(),
6065 is_disk_based: true,
6066 group_id: 1,
6067 is_primary: true,
6068 ..Default::default()
6069 }
6070 },
6071 DiagnosticEntry {
6072 range: Point::new(3, 9)..Point::new(3, 14),
6073 diagnostic: Diagnostic {
6074 severity: DiagnosticSeverity::ERROR,
6075 message: "undefined variable 'BB'".to_string(),
6076 is_disk_based: true,
6077 group_id: 0,
6078 is_primary: true,
6079 ..Default::default()
6080 },
6081 }
6082 ]
6083 );
6084 });
6085 }
6086
6087 #[gpui::test]
6088 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6089 cx.foreground().forbid_parking();
6090
6091 let text = concat!(
6092 "let one = ;\n", //
6093 "let two = \n",
6094 "let three = 3;\n",
6095 );
6096
6097 let fs = FakeFs::new(cx.background());
6098 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6099
6100 let project = Project::test(fs, ["/dir"], cx).await;
6101 let buffer = project
6102 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6103 .await
6104 .unwrap();
6105
6106 project.update(cx, |project, cx| {
6107 project
6108 .update_buffer_diagnostics(
6109 &buffer,
6110 vec![
6111 DiagnosticEntry {
6112 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6113 diagnostic: Diagnostic {
6114 severity: DiagnosticSeverity::ERROR,
6115 message: "syntax error 1".to_string(),
6116 ..Default::default()
6117 },
6118 },
6119 DiagnosticEntry {
6120 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6121 diagnostic: Diagnostic {
6122 severity: DiagnosticSeverity::ERROR,
6123 message: "syntax error 2".to_string(),
6124 ..Default::default()
6125 },
6126 },
6127 ],
6128 None,
6129 cx,
6130 )
6131 .unwrap();
6132 });
6133
6134 // An empty range is extended forward to include the following character.
6135 // At the end of a line, an empty range is extended backward to include
6136 // the preceding character.
6137 buffer.read_with(cx, |buffer, _| {
6138 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6139 assert_eq!(
6140 chunks
6141 .iter()
6142 .map(|(s, d)| (s.as_str(), *d))
6143 .collect::<Vec<_>>(),
6144 &[
6145 ("let one = ", None),
6146 (";", Some(DiagnosticSeverity::ERROR)),
6147 ("\nlet two =", None),
6148 (" ", Some(DiagnosticSeverity::ERROR)),
6149 ("\nlet three = 3;\n", None)
6150 ]
6151 );
6152 });
6153 }
6154
6155 #[gpui::test]
6156 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6157 cx.foreground().forbid_parking();
6158
6159 let mut language = Language::new(
6160 LanguageConfig {
6161 name: "Rust".into(),
6162 path_suffixes: vec!["rs".to_string()],
6163 ..Default::default()
6164 },
6165 Some(tree_sitter_rust::language()),
6166 );
6167 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6168
6169 let text = "
6170 fn a() {
6171 f1();
6172 }
6173 fn b() {
6174 f2();
6175 }
6176 fn c() {
6177 f3();
6178 }
6179 "
6180 .unindent();
6181
6182 let fs = FakeFs::new(cx.background());
6183 fs.insert_tree(
6184 "/dir",
6185 json!({
6186 "a.rs": text.clone(),
6187 }),
6188 )
6189 .await;
6190
6191 let project = Project::test(fs, ["/dir"], cx).await;
6192 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6193 let buffer = project
6194 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6195 .await
6196 .unwrap();
6197
6198 let mut fake_server = fake_servers.next().await.unwrap();
6199 let lsp_document_version = fake_server
6200 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6201 .await
6202 .text_document
6203 .version;
6204
6205 // Simulate editing the buffer after the language server computes some edits.
6206 buffer.update(cx, |buffer, cx| {
6207 buffer.edit(
6208 [(
6209 Point::new(0, 0)..Point::new(0, 0),
6210 "// above first function\n",
6211 )],
6212 cx,
6213 );
6214 buffer.edit(
6215 [(
6216 Point::new(2, 0)..Point::new(2, 0),
6217 " // inside first function\n",
6218 )],
6219 cx,
6220 );
6221 buffer.edit(
6222 [(
6223 Point::new(6, 4)..Point::new(6, 4),
6224 "// inside second function ",
6225 )],
6226 cx,
6227 );
6228
6229 assert_eq!(
6230 buffer.text(),
6231 "
6232 // above first function
6233 fn a() {
6234 // inside first function
6235 f1();
6236 }
6237 fn b() {
6238 // inside second function f2();
6239 }
6240 fn c() {
6241 f3();
6242 }
6243 "
6244 .unindent()
6245 );
6246 });
6247
6248 let edits = project
6249 .update(cx, |project, cx| {
6250 project.edits_from_lsp(
6251 &buffer,
6252 vec![
6253 // replace body of first function
6254 lsp::TextEdit {
6255 range: lsp::Range::new(
6256 lsp::Position::new(0, 0),
6257 lsp::Position::new(3, 0),
6258 ),
6259 new_text: "
6260 fn a() {
6261 f10();
6262 }
6263 "
6264 .unindent(),
6265 },
6266 // edit inside second function
6267 lsp::TextEdit {
6268 range: lsp::Range::new(
6269 lsp::Position::new(4, 6),
6270 lsp::Position::new(4, 6),
6271 ),
6272 new_text: "00".into(),
6273 },
6274 // edit inside third function via two distinct edits
6275 lsp::TextEdit {
6276 range: lsp::Range::new(
6277 lsp::Position::new(7, 5),
6278 lsp::Position::new(7, 5),
6279 ),
6280 new_text: "4000".into(),
6281 },
6282 lsp::TextEdit {
6283 range: lsp::Range::new(
6284 lsp::Position::new(7, 5),
6285 lsp::Position::new(7, 6),
6286 ),
6287 new_text: "".into(),
6288 },
6289 ],
6290 Some(lsp_document_version),
6291 cx,
6292 )
6293 })
6294 .await
6295 .unwrap();
6296
6297 buffer.update(cx, |buffer, cx| {
6298 for (range, new_text) in edits {
6299 buffer.edit([(range, new_text)], cx);
6300 }
6301 assert_eq!(
6302 buffer.text(),
6303 "
6304 // above first function
6305 fn a() {
6306 // inside first function
6307 f10();
6308 }
6309 fn b() {
6310 // inside second function f200();
6311 }
6312 fn c() {
6313 f4000();
6314 }
6315 "
6316 .unindent()
6317 );
6318 });
6319 }
6320
6321 #[gpui::test]
6322 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6323 cx.foreground().forbid_parking();
6324
6325 let text = "
6326 use a::b;
6327 use a::c;
6328
6329 fn f() {
6330 b();
6331 c();
6332 }
6333 "
6334 .unindent();
6335
6336 let fs = FakeFs::new(cx.background());
6337 fs.insert_tree(
6338 "/dir",
6339 json!({
6340 "a.rs": text.clone(),
6341 }),
6342 )
6343 .await;
6344
6345 let project = Project::test(fs, ["/dir"], cx).await;
6346 let buffer = project
6347 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6348 .await
6349 .unwrap();
6350
6351 // Simulate the language server sending us a small edit in the form of a very large diff.
6352 // Rust-analyzer does this when performing a merge-imports code action.
6353 let edits = project
6354 .update(cx, |project, cx| {
6355 project.edits_from_lsp(
6356 &buffer,
6357 [
6358 // Replace the first use statement without editing the semicolon.
6359 lsp::TextEdit {
6360 range: lsp::Range::new(
6361 lsp::Position::new(0, 4),
6362 lsp::Position::new(0, 8),
6363 ),
6364 new_text: "a::{b, c}".into(),
6365 },
6366 // Reinsert the remainder of the file between the semicolon and the final
6367 // newline of the file.
6368 lsp::TextEdit {
6369 range: lsp::Range::new(
6370 lsp::Position::new(0, 9),
6371 lsp::Position::new(0, 9),
6372 ),
6373 new_text: "\n\n".into(),
6374 },
6375 lsp::TextEdit {
6376 range: lsp::Range::new(
6377 lsp::Position::new(0, 9),
6378 lsp::Position::new(0, 9),
6379 ),
6380 new_text: "
6381 fn f() {
6382 b();
6383 c();
6384 }"
6385 .unindent(),
6386 },
6387 // Delete everything after the first newline of the file.
6388 lsp::TextEdit {
6389 range: lsp::Range::new(
6390 lsp::Position::new(1, 0),
6391 lsp::Position::new(7, 0),
6392 ),
6393 new_text: "".into(),
6394 },
6395 ],
6396 None,
6397 cx,
6398 )
6399 })
6400 .await
6401 .unwrap();
6402
6403 buffer.update(cx, |buffer, cx| {
6404 let edits = edits
6405 .into_iter()
6406 .map(|(range, text)| {
6407 (
6408 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6409 text,
6410 )
6411 })
6412 .collect::<Vec<_>>();
6413
6414 assert_eq!(
6415 edits,
6416 [
6417 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6418 (Point::new(1, 0)..Point::new(2, 0), "".into())
6419 ]
6420 );
6421
6422 for (range, new_text) in edits {
6423 buffer.edit([(range, new_text)], cx);
6424 }
6425 assert_eq!(
6426 buffer.text(),
6427 "
6428 use a::{b, c};
6429
6430 fn f() {
6431 b();
6432 c();
6433 }
6434 "
6435 .unindent()
6436 );
6437 });
6438 }
6439
6440 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6441 buffer: &Buffer,
6442 range: Range<T>,
6443 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6444 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6445 for chunk in buffer.snapshot().chunks(range, true) {
6446 if chunks.last().map_or(false, |prev_chunk| {
6447 prev_chunk.1 == chunk.diagnostic_severity
6448 }) {
6449 chunks.last_mut().unwrap().0.push_str(chunk.text);
6450 } else {
6451 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6452 }
6453 }
6454 chunks
6455 }
6456
6457 #[gpui::test]
6458 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6459 let dir = temp_tree(json!({
6460 "root": {
6461 "dir1": {},
6462 "dir2": {
6463 "dir3": {}
6464 }
6465 }
6466 }));
6467
6468 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6469 let cancel_flag = Default::default();
6470 let results = project
6471 .read_with(cx, |project, cx| {
6472 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6473 })
6474 .await;
6475
6476 assert!(results.is_empty());
6477 }
6478
6479 #[gpui::test]
6480 async fn test_definition(cx: &mut gpui::TestAppContext) {
6481 let mut language = Language::new(
6482 LanguageConfig {
6483 name: "Rust".into(),
6484 path_suffixes: vec!["rs".to_string()],
6485 ..Default::default()
6486 },
6487 Some(tree_sitter_rust::language()),
6488 );
6489 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6490
6491 let fs = FakeFs::new(cx.background());
6492 fs.insert_tree(
6493 "/dir",
6494 json!({
6495 "a.rs": "const fn a() { A }",
6496 "b.rs": "const y: i32 = crate::a()",
6497 }),
6498 )
6499 .await;
6500
6501 let project = Project::test(fs, ["/dir/b.rs"], cx).await;
6502 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6503
6504 let buffer = project
6505 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6506 .await
6507 .unwrap();
6508
6509 let fake_server = fake_servers.next().await.unwrap();
6510 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6511 let params = params.text_document_position_params;
6512 assert_eq!(
6513 params.text_document.uri.to_file_path().unwrap(),
6514 Path::new("/dir/b.rs"),
6515 );
6516 assert_eq!(params.position, lsp::Position::new(0, 22));
6517
6518 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6519 lsp::Location::new(
6520 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6521 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6522 ),
6523 )))
6524 });
6525
6526 let mut definitions = project
6527 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6528 .await
6529 .unwrap();
6530
6531 assert_eq!(definitions.len(), 1);
6532 let definition = definitions.pop().unwrap();
6533 cx.update(|cx| {
6534 let target_buffer = definition.buffer.read(cx);
6535 assert_eq!(
6536 target_buffer
6537 .file()
6538 .unwrap()
6539 .as_local()
6540 .unwrap()
6541 .abs_path(cx),
6542 Path::new("/dir/a.rs"),
6543 );
6544 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6545 assert_eq!(
6546 list_worktrees(&project, cx),
6547 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6548 );
6549
6550 drop(definition);
6551 });
6552 cx.read(|cx| {
6553 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6554 });
6555
6556 fn list_worktrees<'a>(
6557 project: &'a ModelHandle<Project>,
6558 cx: &'a AppContext,
6559 ) -> Vec<(&'a Path, bool)> {
6560 project
6561 .read(cx)
6562 .worktrees(cx)
6563 .map(|worktree| {
6564 let worktree = worktree.read(cx);
6565 (
6566 worktree.as_local().unwrap().abs_path().as_ref(),
6567 worktree.is_visible(),
6568 )
6569 })
6570 .collect::<Vec<_>>()
6571 }
6572 }
6573
6574 #[gpui::test]
6575 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6576 let mut language = Language::new(
6577 LanguageConfig {
6578 name: "TypeScript".into(),
6579 path_suffixes: vec!["ts".to_string()],
6580 ..Default::default()
6581 },
6582 Some(tree_sitter_typescript::language_typescript()),
6583 );
6584 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6585
6586 let fs = FakeFs::new(cx.background());
6587 fs.insert_tree(
6588 "/dir",
6589 json!({
6590 "a.ts": "",
6591 }),
6592 )
6593 .await;
6594
6595 let project = Project::test(fs, ["/dir"], cx).await;
6596 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6597 let buffer = project
6598 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6599 .await
6600 .unwrap();
6601
6602 let fake_server = fake_language_servers.next().await.unwrap();
6603
6604 let text = "let a = b.fqn";
6605 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6606 let completions = project.update(cx, |project, cx| {
6607 project.completions(&buffer, text.len(), cx)
6608 });
6609
6610 fake_server
6611 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6612 Ok(Some(lsp::CompletionResponse::Array(vec![
6613 lsp::CompletionItem {
6614 label: "fullyQualifiedName?".into(),
6615 insert_text: Some("fullyQualifiedName".into()),
6616 ..Default::default()
6617 },
6618 ])))
6619 })
6620 .next()
6621 .await;
6622 let completions = completions.await.unwrap();
6623 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6624 assert_eq!(completions.len(), 1);
6625 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6626 assert_eq!(
6627 completions[0].old_range.to_offset(&snapshot),
6628 text.len() - 3..text.len()
6629 );
6630 }
6631
6632 #[gpui::test(iterations = 10)]
6633 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6634 let mut language = Language::new(
6635 LanguageConfig {
6636 name: "TypeScript".into(),
6637 path_suffixes: vec!["ts".to_string()],
6638 ..Default::default()
6639 },
6640 None,
6641 );
6642 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6643
6644 let fs = FakeFs::new(cx.background());
6645 fs.insert_tree(
6646 "/dir",
6647 json!({
6648 "a.ts": "a",
6649 }),
6650 )
6651 .await;
6652
6653 let project = Project::test(fs, ["/dir"], cx).await;
6654 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6655 let buffer = project
6656 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6657 .await
6658 .unwrap();
6659
6660 let fake_server = fake_language_servers.next().await.unwrap();
6661
6662 // Language server returns code actions that contain commands, and not edits.
6663 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6664 fake_server
6665 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6666 Ok(Some(vec![
6667 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6668 title: "The code action".into(),
6669 command: Some(lsp::Command {
6670 title: "The command".into(),
6671 command: "_the/command".into(),
6672 arguments: Some(vec![json!("the-argument")]),
6673 }),
6674 ..Default::default()
6675 }),
6676 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6677 title: "two".into(),
6678 ..Default::default()
6679 }),
6680 ]))
6681 })
6682 .next()
6683 .await;
6684
6685 let action = actions.await.unwrap()[0].clone();
6686 let apply = project.update(cx, |project, cx| {
6687 project.apply_code_action(buffer.clone(), action, true, cx)
6688 });
6689
6690 // Resolving the code action does not populate its edits. In absence of
6691 // edits, we must execute the given command.
6692 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6693 |action, _| async move { Ok(action) },
6694 );
6695
6696 // While executing the command, the language server sends the editor
6697 // a `workspaceEdit` request.
6698 fake_server
6699 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6700 let fake = fake_server.clone();
6701 move |params, _| {
6702 assert_eq!(params.command, "_the/command");
6703 let fake = fake.clone();
6704 async move {
6705 fake.server
6706 .request::<lsp::request::ApplyWorkspaceEdit>(
6707 lsp::ApplyWorkspaceEditParams {
6708 label: None,
6709 edit: lsp::WorkspaceEdit {
6710 changes: Some(
6711 [(
6712 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6713 vec![lsp::TextEdit {
6714 range: lsp::Range::new(
6715 lsp::Position::new(0, 0),
6716 lsp::Position::new(0, 0),
6717 ),
6718 new_text: "X".into(),
6719 }],
6720 )]
6721 .into_iter()
6722 .collect(),
6723 ),
6724 ..Default::default()
6725 },
6726 },
6727 )
6728 .await
6729 .unwrap();
6730 Ok(Some(json!(null)))
6731 }
6732 }
6733 })
6734 .next()
6735 .await;
6736
6737 // Applying the code action returns a project transaction containing the edits
6738 // sent by the language server in its `workspaceEdit` request.
6739 let transaction = apply.await.unwrap();
6740 assert!(transaction.0.contains_key(&buffer));
6741 buffer.update(cx, |buffer, cx| {
6742 assert_eq!(buffer.text(), "Xa");
6743 buffer.undo(cx);
6744 assert_eq!(buffer.text(), "a");
6745 });
6746 }
6747
6748 #[gpui::test]
6749 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6750 let fs = FakeFs::new(cx.background());
6751 fs.insert_tree(
6752 "/dir",
6753 json!({
6754 "file1": "the old contents",
6755 }),
6756 )
6757 .await;
6758
6759 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6760 let buffer = project
6761 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6762 .await
6763 .unwrap();
6764 buffer
6765 .update(cx, |buffer, cx| {
6766 assert_eq!(buffer.text(), "the old contents");
6767 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6768 buffer.save(cx)
6769 })
6770 .await
6771 .unwrap();
6772
6773 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6774 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6775 }
6776
6777 #[gpui::test]
6778 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6779 let fs = FakeFs::new(cx.background());
6780 fs.insert_tree(
6781 "/dir",
6782 json!({
6783 "file1": "the old contents",
6784 }),
6785 )
6786 .await;
6787
6788 let project = Project::test(fs.clone(), ["/dir/file1"], cx).await;
6789 let buffer = project
6790 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6791 .await
6792 .unwrap();
6793 buffer
6794 .update(cx, |buffer, cx| {
6795 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6796 buffer.save(cx)
6797 })
6798 .await
6799 .unwrap();
6800
6801 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6802 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6803 }
6804
6805 #[gpui::test]
6806 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6807 let fs = FakeFs::new(cx.background());
6808 fs.insert_tree("/dir", json!({})).await;
6809
6810 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6811 let buffer = project.update(cx, |project, cx| {
6812 project.create_buffer("", None, cx).unwrap()
6813 });
6814 buffer.update(cx, |buffer, cx| {
6815 buffer.edit([(0..0, "abc")], cx);
6816 assert!(buffer.is_dirty());
6817 assert!(!buffer.has_conflict());
6818 });
6819 project
6820 .update(cx, |project, cx| {
6821 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6822 })
6823 .await
6824 .unwrap();
6825 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6826 buffer.read_with(cx, |buffer, cx| {
6827 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6828 assert!(!buffer.is_dirty());
6829 assert!(!buffer.has_conflict());
6830 });
6831
6832 let opened_buffer = project
6833 .update(cx, |project, cx| {
6834 project.open_local_buffer("/dir/file1", cx)
6835 })
6836 .await
6837 .unwrap();
6838 assert_eq!(opened_buffer, buffer);
6839 }
6840
6841 #[gpui::test(retries = 5)]
6842 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6843 let dir = temp_tree(json!({
6844 "a": {
6845 "file1": "",
6846 "file2": "",
6847 "file3": "",
6848 },
6849 "b": {
6850 "c": {
6851 "file4": "",
6852 "file5": "",
6853 }
6854 }
6855 }));
6856
6857 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6858 let rpc = project.read_with(cx, |p, _| p.client.clone());
6859
6860 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6861 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
6862 async move { buffer.await.unwrap() }
6863 };
6864 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6865 project.read_with(cx, |project, cx| {
6866 let tree = project.worktrees(cx).next().unwrap();
6867 tree.read(cx)
6868 .entry_for_path(path)
6869 .expect(&format!("no entry for path {}", path))
6870 .id
6871 })
6872 };
6873
6874 let buffer2 = buffer_for_path("a/file2", cx).await;
6875 let buffer3 = buffer_for_path("a/file3", cx).await;
6876 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6877 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6878
6879 let file2_id = id_for_path("a/file2", &cx);
6880 let file3_id = id_for_path("a/file3", &cx);
6881 let file4_id = id_for_path("b/c/file4", &cx);
6882
6883 // Create a remote copy of this worktree.
6884 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
6885 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6886 let (remote, load_task) = cx.update(|cx| {
6887 Worktree::remote(
6888 1,
6889 1,
6890 initial_snapshot.to_proto(&Default::default(), true),
6891 rpc.clone(),
6892 cx,
6893 )
6894 });
6895 // tree
6896 load_task.await;
6897
6898 cx.read(|cx| {
6899 assert!(!buffer2.read(cx).is_dirty());
6900 assert!(!buffer3.read(cx).is_dirty());
6901 assert!(!buffer4.read(cx).is_dirty());
6902 assert!(!buffer5.read(cx).is_dirty());
6903 });
6904
6905 // Rename and delete files and directories.
6906 tree.flush_fs_events(&cx).await;
6907 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6908 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6909 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6910 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6911 tree.flush_fs_events(&cx).await;
6912
6913 let expected_paths = vec![
6914 "a",
6915 "a/file1",
6916 "a/file2.new",
6917 "b",
6918 "d",
6919 "d/file3",
6920 "d/file4",
6921 ];
6922
6923 cx.read(|app| {
6924 assert_eq!(
6925 tree.read(app)
6926 .paths()
6927 .map(|p| p.to_str().unwrap())
6928 .collect::<Vec<_>>(),
6929 expected_paths
6930 );
6931
6932 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6933 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6934 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6935
6936 assert_eq!(
6937 buffer2.read(app).file().unwrap().path().as_ref(),
6938 Path::new("a/file2.new")
6939 );
6940 assert_eq!(
6941 buffer3.read(app).file().unwrap().path().as_ref(),
6942 Path::new("d/file3")
6943 );
6944 assert_eq!(
6945 buffer4.read(app).file().unwrap().path().as_ref(),
6946 Path::new("d/file4")
6947 );
6948 assert_eq!(
6949 buffer5.read(app).file().unwrap().path().as_ref(),
6950 Path::new("b/c/file5")
6951 );
6952
6953 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6954 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6955 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6956 assert!(buffer5.read(app).file().unwrap().is_deleted());
6957 });
6958
6959 // Update the remote worktree. Check that it becomes consistent with the
6960 // local worktree.
6961 remote.update(cx, |remote, cx| {
6962 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6963 &initial_snapshot,
6964 1,
6965 1,
6966 true,
6967 );
6968 remote
6969 .as_remote_mut()
6970 .unwrap()
6971 .snapshot
6972 .apply_remote_update(update_message)
6973 .unwrap();
6974
6975 assert_eq!(
6976 remote
6977 .paths()
6978 .map(|p| p.to_str().unwrap())
6979 .collect::<Vec<_>>(),
6980 expected_paths
6981 );
6982 });
6983 }
6984
6985 #[gpui::test]
6986 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6987 let fs = FakeFs::new(cx.background());
6988 fs.insert_tree(
6989 "/dir",
6990 json!({
6991 "a.txt": "a-contents",
6992 "b.txt": "b-contents",
6993 }),
6994 )
6995 .await;
6996
6997 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6998
6999 // Spawn multiple tasks to open paths, repeating some paths.
7000 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7001 (
7002 p.open_local_buffer("/dir/a.txt", cx),
7003 p.open_local_buffer("/dir/b.txt", cx),
7004 p.open_local_buffer("/dir/a.txt", cx),
7005 )
7006 });
7007
7008 let buffer_a_1 = buffer_a_1.await.unwrap();
7009 let buffer_a_2 = buffer_a_2.await.unwrap();
7010 let buffer_b = buffer_b.await.unwrap();
7011 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7012 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7013
7014 // There is only one buffer per path.
7015 let buffer_a_id = buffer_a_1.id();
7016 assert_eq!(buffer_a_2.id(), buffer_a_id);
7017
7018 // Open the same path again while it is still open.
7019 drop(buffer_a_1);
7020 let buffer_a_3 = project
7021 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7022 .await
7023 .unwrap();
7024
7025 // There's still only one buffer per path.
7026 assert_eq!(buffer_a_3.id(), buffer_a_id);
7027 }
7028
7029 #[gpui::test]
7030 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7031 let fs = FakeFs::new(cx.background());
7032 fs.insert_tree(
7033 "/dir",
7034 json!({
7035 "file1": "abc",
7036 "file2": "def",
7037 "file3": "ghi",
7038 }),
7039 )
7040 .await;
7041
7042 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7043
7044 let buffer1 = project
7045 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7046 .await
7047 .unwrap();
7048 let events = Rc::new(RefCell::new(Vec::new()));
7049
7050 // initially, the buffer isn't dirty.
7051 buffer1.update(cx, |buffer, cx| {
7052 cx.subscribe(&buffer1, {
7053 let events = events.clone();
7054 move |_, _, event, _| match event {
7055 BufferEvent::Operation(_) => {}
7056 _ => events.borrow_mut().push(event.clone()),
7057 }
7058 })
7059 .detach();
7060
7061 assert!(!buffer.is_dirty());
7062 assert!(events.borrow().is_empty());
7063
7064 buffer.edit([(1..2, "")], cx);
7065 });
7066
7067 // after the first edit, the buffer is dirty, and emits a dirtied event.
7068 buffer1.update(cx, |buffer, cx| {
7069 assert!(buffer.text() == "ac");
7070 assert!(buffer.is_dirty());
7071 assert_eq!(
7072 *events.borrow(),
7073 &[language::Event::Edited, language::Event::Dirtied]
7074 );
7075 events.borrow_mut().clear();
7076 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7077 });
7078
7079 // after saving, the buffer is not dirty, and emits a saved event.
7080 buffer1.update(cx, |buffer, cx| {
7081 assert!(!buffer.is_dirty());
7082 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7083 events.borrow_mut().clear();
7084
7085 buffer.edit([(1..1, "B")], cx);
7086 buffer.edit([(2..2, "D")], cx);
7087 });
7088
7089 // after editing again, the buffer is dirty, and emits another dirty event.
7090 buffer1.update(cx, |buffer, cx| {
7091 assert!(buffer.text() == "aBDc");
7092 assert!(buffer.is_dirty());
7093 assert_eq!(
7094 *events.borrow(),
7095 &[
7096 language::Event::Edited,
7097 language::Event::Dirtied,
7098 language::Event::Edited,
7099 ],
7100 );
7101 events.borrow_mut().clear();
7102
7103 // TODO - currently, after restoring the buffer to its
7104 // previously-saved state, the is still considered dirty.
7105 buffer.edit([(1..3, "")], cx);
7106 assert!(buffer.text() == "ac");
7107 assert!(buffer.is_dirty());
7108 });
7109
7110 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7111
7112 // When a file is deleted, the buffer is considered dirty.
7113 let events = Rc::new(RefCell::new(Vec::new()));
7114 let buffer2 = project
7115 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7116 .await
7117 .unwrap();
7118 buffer2.update(cx, |_, cx| {
7119 cx.subscribe(&buffer2, {
7120 let events = events.clone();
7121 move |_, _, event, _| events.borrow_mut().push(event.clone())
7122 })
7123 .detach();
7124 });
7125
7126 fs.remove_file("/dir/file2".as_ref(), Default::default())
7127 .await
7128 .unwrap();
7129 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7130 assert_eq!(
7131 *events.borrow(),
7132 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7133 );
7134
7135 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7136 let events = Rc::new(RefCell::new(Vec::new()));
7137 let buffer3 = project
7138 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7139 .await
7140 .unwrap();
7141 buffer3.update(cx, |_, cx| {
7142 cx.subscribe(&buffer3, {
7143 let events = events.clone();
7144 move |_, _, event, _| events.borrow_mut().push(event.clone())
7145 })
7146 .detach();
7147 });
7148
7149 buffer3.update(cx, |buffer, cx| {
7150 buffer.edit([(0..0, "x")], cx);
7151 });
7152 events.borrow_mut().clear();
7153 fs.remove_file("/dir/file3".as_ref(), Default::default())
7154 .await
7155 .unwrap();
7156 buffer3
7157 .condition(&cx, |_, _| !events.borrow().is_empty())
7158 .await;
7159 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7160 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7161 }
7162
7163 #[gpui::test]
7164 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7165 let initial_contents = "aaa\nbbbbb\nc\n";
7166 let fs = FakeFs::new(cx.background());
7167 fs.insert_tree(
7168 "/dir",
7169 json!({
7170 "the-file": initial_contents,
7171 }),
7172 )
7173 .await;
7174 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7175 let buffer = project
7176 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7177 .await
7178 .unwrap();
7179
7180 let anchors = (0..3)
7181 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7182 .collect::<Vec<_>>();
7183
7184 // Change the file on disk, adding two new lines of text, and removing
7185 // one line.
7186 buffer.read_with(cx, |buffer, _| {
7187 assert!(!buffer.is_dirty());
7188 assert!(!buffer.has_conflict());
7189 });
7190 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7191 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7192 .await
7193 .unwrap();
7194
7195 // Because the buffer was not modified, it is reloaded from disk. Its
7196 // contents are edited according to the diff between the old and new
7197 // file contents.
7198 buffer
7199 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7200 .await;
7201
7202 buffer.update(cx, |buffer, _| {
7203 assert_eq!(buffer.text(), new_contents);
7204 assert!(!buffer.is_dirty());
7205 assert!(!buffer.has_conflict());
7206
7207 let anchor_positions = anchors
7208 .iter()
7209 .map(|anchor| anchor.to_point(&*buffer))
7210 .collect::<Vec<_>>();
7211 assert_eq!(
7212 anchor_positions,
7213 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7214 );
7215 });
7216
7217 // Modify the buffer
7218 buffer.update(cx, |buffer, cx| {
7219 buffer.edit([(0..0, " ")], cx);
7220 assert!(buffer.is_dirty());
7221 assert!(!buffer.has_conflict());
7222 });
7223
7224 // Change the file on disk again, adding blank lines to the beginning.
7225 fs.save(
7226 "/dir/the-file".as_ref(),
7227 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7228 )
7229 .await
7230 .unwrap();
7231
7232 // Because the buffer is modified, it doesn't reload from disk, but is
7233 // marked as having a conflict.
7234 buffer
7235 .condition(&cx, |buffer, _| buffer.has_conflict())
7236 .await;
7237 }
7238
7239 #[gpui::test]
7240 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7241 cx.foreground().forbid_parking();
7242
7243 let fs = FakeFs::new(cx.background());
7244 fs.insert_tree(
7245 "/the-dir",
7246 json!({
7247 "a.rs": "
7248 fn foo(mut v: Vec<usize>) {
7249 for x in &v {
7250 v.push(1);
7251 }
7252 }
7253 "
7254 .unindent(),
7255 }),
7256 )
7257 .await;
7258
7259 let project = Project::test(fs.clone(), ["/the-dir"], cx).await;
7260 let buffer = project
7261 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7262 .await
7263 .unwrap();
7264
7265 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7266 let message = lsp::PublishDiagnosticsParams {
7267 uri: buffer_uri.clone(),
7268 diagnostics: vec![
7269 lsp::Diagnostic {
7270 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7271 severity: Some(DiagnosticSeverity::WARNING),
7272 message: "error 1".to_string(),
7273 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7274 location: lsp::Location {
7275 uri: buffer_uri.clone(),
7276 range: lsp::Range::new(
7277 lsp::Position::new(1, 8),
7278 lsp::Position::new(1, 9),
7279 ),
7280 },
7281 message: "error 1 hint 1".to_string(),
7282 }]),
7283 ..Default::default()
7284 },
7285 lsp::Diagnostic {
7286 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7287 severity: Some(DiagnosticSeverity::HINT),
7288 message: "error 1 hint 1".to_string(),
7289 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7290 location: lsp::Location {
7291 uri: buffer_uri.clone(),
7292 range: lsp::Range::new(
7293 lsp::Position::new(1, 8),
7294 lsp::Position::new(1, 9),
7295 ),
7296 },
7297 message: "original diagnostic".to_string(),
7298 }]),
7299 ..Default::default()
7300 },
7301 lsp::Diagnostic {
7302 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7303 severity: Some(DiagnosticSeverity::ERROR),
7304 message: "error 2".to_string(),
7305 related_information: Some(vec![
7306 lsp::DiagnosticRelatedInformation {
7307 location: lsp::Location {
7308 uri: buffer_uri.clone(),
7309 range: lsp::Range::new(
7310 lsp::Position::new(1, 13),
7311 lsp::Position::new(1, 15),
7312 ),
7313 },
7314 message: "error 2 hint 1".to_string(),
7315 },
7316 lsp::DiagnosticRelatedInformation {
7317 location: lsp::Location {
7318 uri: buffer_uri.clone(),
7319 range: lsp::Range::new(
7320 lsp::Position::new(1, 13),
7321 lsp::Position::new(1, 15),
7322 ),
7323 },
7324 message: "error 2 hint 2".to_string(),
7325 },
7326 ]),
7327 ..Default::default()
7328 },
7329 lsp::Diagnostic {
7330 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7331 severity: Some(DiagnosticSeverity::HINT),
7332 message: "error 2 hint 1".to_string(),
7333 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7334 location: lsp::Location {
7335 uri: buffer_uri.clone(),
7336 range: lsp::Range::new(
7337 lsp::Position::new(2, 8),
7338 lsp::Position::new(2, 17),
7339 ),
7340 },
7341 message: "original diagnostic".to_string(),
7342 }]),
7343 ..Default::default()
7344 },
7345 lsp::Diagnostic {
7346 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7347 severity: Some(DiagnosticSeverity::HINT),
7348 message: "error 2 hint 2".to_string(),
7349 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7350 location: lsp::Location {
7351 uri: buffer_uri.clone(),
7352 range: lsp::Range::new(
7353 lsp::Position::new(2, 8),
7354 lsp::Position::new(2, 17),
7355 ),
7356 },
7357 message: "original diagnostic".to_string(),
7358 }]),
7359 ..Default::default()
7360 },
7361 ],
7362 version: None,
7363 };
7364
7365 project
7366 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7367 .unwrap();
7368 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7369
7370 assert_eq!(
7371 buffer
7372 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7373 .collect::<Vec<_>>(),
7374 &[
7375 DiagnosticEntry {
7376 range: Point::new(1, 8)..Point::new(1, 9),
7377 diagnostic: Diagnostic {
7378 severity: DiagnosticSeverity::WARNING,
7379 message: "error 1".to_string(),
7380 group_id: 0,
7381 is_primary: true,
7382 ..Default::default()
7383 }
7384 },
7385 DiagnosticEntry {
7386 range: Point::new(1, 8)..Point::new(1, 9),
7387 diagnostic: Diagnostic {
7388 severity: DiagnosticSeverity::HINT,
7389 message: "error 1 hint 1".to_string(),
7390 group_id: 0,
7391 is_primary: false,
7392 ..Default::default()
7393 }
7394 },
7395 DiagnosticEntry {
7396 range: Point::new(1, 13)..Point::new(1, 15),
7397 diagnostic: Diagnostic {
7398 severity: DiagnosticSeverity::HINT,
7399 message: "error 2 hint 1".to_string(),
7400 group_id: 1,
7401 is_primary: false,
7402 ..Default::default()
7403 }
7404 },
7405 DiagnosticEntry {
7406 range: Point::new(1, 13)..Point::new(1, 15),
7407 diagnostic: Diagnostic {
7408 severity: DiagnosticSeverity::HINT,
7409 message: "error 2 hint 2".to_string(),
7410 group_id: 1,
7411 is_primary: false,
7412 ..Default::default()
7413 }
7414 },
7415 DiagnosticEntry {
7416 range: Point::new(2, 8)..Point::new(2, 17),
7417 diagnostic: Diagnostic {
7418 severity: DiagnosticSeverity::ERROR,
7419 message: "error 2".to_string(),
7420 group_id: 1,
7421 is_primary: true,
7422 ..Default::default()
7423 }
7424 }
7425 ]
7426 );
7427
7428 assert_eq!(
7429 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7430 &[
7431 DiagnosticEntry {
7432 range: Point::new(1, 8)..Point::new(1, 9),
7433 diagnostic: Diagnostic {
7434 severity: DiagnosticSeverity::WARNING,
7435 message: "error 1".to_string(),
7436 group_id: 0,
7437 is_primary: true,
7438 ..Default::default()
7439 }
7440 },
7441 DiagnosticEntry {
7442 range: Point::new(1, 8)..Point::new(1, 9),
7443 diagnostic: Diagnostic {
7444 severity: DiagnosticSeverity::HINT,
7445 message: "error 1 hint 1".to_string(),
7446 group_id: 0,
7447 is_primary: false,
7448 ..Default::default()
7449 }
7450 },
7451 ]
7452 );
7453 assert_eq!(
7454 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7455 &[
7456 DiagnosticEntry {
7457 range: Point::new(1, 13)..Point::new(1, 15),
7458 diagnostic: Diagnostic {
7459 severity: DiagnosticSeverity::HINT,
7460 message: "error 2 hint 1".to_string(),
7461 group_id: 1,
7462 is_primary: false,
7463 ..Default::default()
7464 }
7465 },
7466 DiagnosticEntry {
7467 range: Point::new(1, 13)..Point::new(1, 15),
7468 diagnostic: Diagnostic {
7469 severity: DiagnosticSeverity::HINT,
7470 message: "error 2 hint 2".to_string(),
7471 group_id: 1,
7472 is_primary: false,
7473 ..Default::default()
7474 }
7475 },
7476 DiagnosticEntry {
7477 range: Point::new(2, 8)..Point::new(2, 17),
7478 diagnostic: Diagnostic {
7479 severity: DiagnosticSeverity::ERROR,
7480 message: "error 2".to_string(),
7481 group_id: 1,
7482 is_primary: true,
7483 ..Default::default()
7484 }
7485 }
7486 ]
7487 );
7488 }
7489
7490 #[gpui::test]
7491 async fn test_rename(cx: &mut gpui::TestAppContext) {
7492 cx.foreground().forbid_parking();
7493
7494 let mut language = Language::new(
7495 LanguageConfig {
7496 name: "Rust".into(),
7497 path_suffixes: vec!["rs".to_string()],
7498 ..Default::default()
7499 },
7500 Some(tree_sitter_rust::language()),
7501 );
7502 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7503 capabilities: lsp::ServerCapabilities {
7504 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7505 prepare_provider: Some(true),
7506 work_done_progress_options: Default::default(),
7507 })),
7508 ..Default::default()
7509 },
7510 ..Default::default()
7511 });
7512
7513 let fs = FakeFs::new(cx.background());
7514 fs.insert_tree(
7515 "/dir",
7516 json!({
7517 "one.rs": "const ONE: usize = 1;",
7518 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7519 }),
7520 )
7521 .await;
7522
7523 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7524 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7525 let buffer = project
7526 .update(cx, |project, cx| {
7527 project.open_local_buffer("/dir/one.rs", cx)
7528 })
7529 .await
7530 .unwrap();
7531
7532 let fake_server = fake_servers.next().await.unwrap();
7533
7534 let response = project.update(cx, |project, cx| {
7535 project.prepare_rename(buffer.clone(), 7, cx)
7536 });
7537 fake_server
7538 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7539 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7540 assert_eq!(params.position, lsp::Position::new(0, 7));
7541 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7542 lsp::Position::new(0, 6),
7543 lsp::Position::new(0, 9),
7544 ))))
7545 })
7546 .next()
7547 .await
7548 .unwrap();
7549 let range = response.await.unwrap().unwrap();
7550 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7551 assert_eq!(range, 6..9);
7552
7553 let response = project.update(cx, |project, cx| {
7554 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7555 });
7556 fake_server
7557 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7558 assert_eq!(
7559 params.text_document_position.text_document.uri.as_str(),
7560 "file:///dir/one.rs"
7561 );
7562 assert_eq!(
7563 params.text_document_position.position,
7564 lsp::Position::new(0, 7)
7565 );
7566 assert_eq!(params.new_name, "THREE");
7567 Ok(Some(lsp::WorkspaceEdit {
7568 changes: Some(
7569 [
7570 (
7571 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7572 vec![lsp::TextEdit::new(
7573 lsp::Range::new(
7574 lsp::Position::new(0, 6),
7575 lsp::Position::new(0, 9),
7576 ),
7577 "THREE".to_string(),
7578 )],
7579 ),
7580 (
7581 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7582 vec![
7583 lsp::TextEdit::new(
7584 lsp::Range::new(
7585 lsp::Position::new(0, 24),
7586 lsp::Position::new(0, 27),
7587 ),
7588 "THREE".to_string(),
7589 ),
7590 lsp::TextEdit::new(
7591 lsp::Range::new(
7592 lsp::Position::new(0, 35),
7593 lsp::Position::new(0, 38),
7594 ),
7595 "THREE".to_string(),
7596 ),
7597 ],
7598 ),
7599 ]
7600 .into_iter()
7601 .collect(),
7602 ),
7603 ..Default::default()
7604 }))
7605 })
7606 .next()
7607 .await
7608 .unwrap();
7609 let mut transaction = response.await.unwrap().0;
7610 assert_eq!(transaction.len(), 2);
7611 assert_eq!(
7612 transaction
7613 .remove_entry(&buffer)
7614 .unwrap()
7615 .0
7616 .read_with(cx, |buffer, _| buffer.text()),
7617 "const THREE: usize = 1;"
7618 );
7619 assert_eq!(
7620 transaction
7621 .into_keys()
7622 .next()
7623 .unwrap()
7624 .read_with(cx, |buffer, _| buffer.text()),
7625 "const TWO: usize = one::THREE + one::THREE;"
7626 );
7627 }
7628
7629 #[gpui::test]
7630 async fn test_search(cx: &mut gpui::TestAppContext) {
7631 let fs = FakeFs::new(cx.background());
7632 fs.insert_tree(
7633 "/dir",
7634 json!({
7635 "one.rs": "const ONE: usize = 1;",
7636 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7637 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7638 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7639 }),
7640 )
7641 .await;
7642 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7643 assert_eq!(
7644 search(&project, SearchQuery::text("TWO", false, true), cx)
7645 .await
7646 .unwrap(),
7647 HashMap::from_iter([
7648 ("two.rs".to_string(), vec![6..9]),
7649 ("three.rs".to_string(), vec![37..40])
7650 ])
7651 );
7652
7653 let buffer_4 = project
7654 .update(cx, |project, cx| {
7655 project.open_local_buffer("/dir/four.rs", cx)
7656 })
7657 .await
7658 .unwrap();
7659 buffer_4.update(cx, |buffer, cx| {
7660 let text = "two::TWO";
7661 buffer.edit([(20..28, text), (31..43, text)], cx);
7662 });
7663
7664 assert_eq!(
7665 search(&project, SearchQuery::text("TWO", false, true), cx)
7666 .await
7667 .unwrap(),
7668 HashMap::from_iter([
7669 ("two.rs".to_string(), vec![6..9]),
7670 ("three.rs".to_string(), vec![37..40]),
7671 ("four.rs".to_string(), vec![25..28, 36..39])
7672 ])
7673 );
7674
7675 async fn search(
7676 project: &ModelHandle<Project>,
7677 query: SearchQuery,
7678 cx: &mut gpui::TestAppContext,
7679 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7680 let results = project
7681 .update(cx, |project, cx| project.search(query, cx))
7682 .await?;
7683
7684 Ok(results
7685 .into_iter()
7686 .map(|(buffer, ranges)| {
7687 buffer.read_with(cx, |buffer, _| {
7688 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7689 let ranges = ranges
7690 .into_iter()
7691 .map(|range| range.to_offset(buffer))
7692 .collect::<Vec<_>>();
7693 (path, ranges)
7694 })
7695 })
7696 .collect())
7697 }
7698 }
7699}