1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use util::{post_inc, ResultExt, TryFutureExt as _};
53
54pub use fs::*;
55pub use worktree::*;
56
57pub trait Item: Entity {
58 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
59}
60
61pub struct Project {
62 worktrees: Vec<WorktreeHandle>,
63 active_entry: Option<ProjectEntryId>,
64 languages: Arc<LanguageRegistry>,
65 language_servers:
66 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
67 started_language_servers:
68 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
69 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
70 language_server_settings: Arc<Mutex<serde_json::Value>>,
71 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
72 next_language_server_id: usize,
73 client: Arc<client::Client>,
74 next_entry_id: Arc<AtomicUsize>,
75 user_store: ModelHandle<UserStore>,
76 fs: Arc<dyn Fs>,
77 client_state: ProjectClientState,
78 collaborators: HashMap<PeerId, Collaborator>,
79 subscriptions: Vec<client::Subscription>,
80 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
81 shared_buffers: HashMap<PeerId, HashSet<u64>>,
82 loading_buffers: HashMap<
83 ProjectPath,
84 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
85 >,
86 loading_local_worktrees:
87 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
88 opened_buffers: HashMap<u64, OpenBuffer>,
89 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
90 nonce: u128,
91}
92
93enum OpenBuffer {
94 Strong(ModelHandle<Buffer>),
95 Weak(WeakModelHandle<Buffer>),
96 Loading(Vec<Operation>),
97}
98
99enum WorktreeHandle {
100 Strong(ModelHandle<Worktree>),
101 Weak(WeakModelHandle<Worktree>),
102}
103
104enum ProjectClientState {
105 Local {
106 is_shared: bool,
107 remote_id_tx: watch::Sender<Option<u64>>,
108 remote_id_rx: watch::Receiver<Option<u64>>,
109 _maintain_remote_id_task: Task<Option<()>>,
110 },
111 Remote {
112 sharing_has_stopped: bool,
113 remote_id: u64,
114 replica_id: ReplicaId,
115 _detect_unshare_task: Task<Option<()>>,
116 },
117}
118
119#[derive(Clone, Debug)]
120pub struct Collaborator {
121 pub user: Arc<User>,
122 pub peer_id: PeerId,
123 pub replica_id: ReplicaId,
124}
125
126#[derive(Clone, Debug, PartialEq)]
127pub enum Event {
128 ActiveEntryChanged(Option<ProjectEntryId>),
129 WorktreeRemoved(WorktreeId),
130 DiskBasedDiagnosticsStarted,
131 DiskBasedDiagnosticsUpdated,
132 DiskBasedDiagnosticsFinished,
133 DiagnosticsUpdated(ProjectPath),
134 RemoteIdChanged(Option<u64>),
135 CollaboratorLeft(PeerId),
136}
137
138#[derive(Serialize)]
139pub struct LanguageServerStatus {
140 pub name: String,
141 pub pending_work: BTreeMap<String, LanguageServerProgress>,
142 pub pending_diagnostic_updates: isize,
143}
144
145#[derive(Clone, Debug, Serialize)]
146pub struct LanguageServerProgress {
147 pub message: Option<String>,
148 pub percentage: Option<usize>,
149 #[serde(skip_serializing)]
150 pub last_update_at: Instant,
151}
152
153#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
154pub struct ProjectPath {
155 pub worktree_id: WorktreeId,
156 pub path: Arc<Path>,
157}
158
159#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
160pub struct DiagnosticSummary {
161 pub error_count: usize,
162 pub warning_count: usize,
163}
164
165#[derive(Debug)]
166pub struct Location {
167 pub buffer: ModelHandle<Buffer>,
168 pub range: Range<language::Anchor>,
169}
170
171#[derive(Debug)]
172pub struct DocumentHighlight {
173 pub range: Range<language::Anchor>,
174 pub kind: DocumentHighlightKind,
175}
176
177#[derive(Clone, Debug)]
178pub struct Symbol {
179 pub source_worktree_id: WorktreeId,
180 pub worktree_id: WorktreeId,
181 pub language_server_name: LanguageServerName,
182 pub path: PathBuf,
183 pub label: CodeLabel,
184 pub name: String,
185 pub kind: lsp::SymbolKind,
186 pub range: Range<PointUtf16>,
187 pub signature: [u8; 32],
188}
189
190#[derive(Default)]
191pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
192
193impl DiagnosticSummary {
194 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
195 let mut this = Self {
196 error_count: 0,
197 warning_count: 0,
198 };
199
200 for entry in diagnostics {
201 if entry.diagnostic.is_primary {
202 match entry.diagnostic.severity {
203 DiagnosticSeverity::ERROR => this.error_count += 1,
204 DiagnosticSeverity::WARNING => this.warning_count += 1,
205 _ => {}
206 }
207 }
208 }
209
210 this
211 }
212
213 pub fn is_empty(&self) -> bool {
214 self.error_count == 0 && self.warning_count == 0
215 }
216
217 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
218 proto::DiagnosticSummary {
219 path: path.to_string_lossy().to_string(),
220 error_count: self.error_count as u32,
221 warning_count: self.warning_count as u32,
222 }
223 }
224}
225
226#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
227pub struct ProjectEntryId(usize);
228
229impl ProjectEntryId {
230 pub const MAX: Self = Self(usize::MAX);
231
232 pub fn new(counter: &AtomicUsize) -> Self {
233 Self(counter.fetch_add(1, SeqCst))
234 }
235
236 pub fn from_proto(id: u64) -> Self {
237 Self(id as usize)
238 }
239
240 pub fn to_proto(&self) -> u64 {
241 self.0 as u64
242 }
243
244 pub fn to_usize(&self) -> usize {
245 self.0
246 }
247}
248
249impl Project {
250 pub fn init(client: &Arc<Client>) {
251 client.add_model_message_handler(Self::handle_add_collaborator);
252 client.add_model_message_handler(Self::handle_buffer_reloaded);
253 client.add_model_message_handler(Self::handle_buffer_saved);
254 client.add_model_message_handler(Self::handle_start_language_server);
255 client.add_model_message_handler(Self::handle_update_language_server);
256 client.add_model_message_handler(Self::handle_remove_collaborator);
257 client.add_model_message_handler(Self::handle_register_worktree);
258 client.add_model_message_handler(Self::handle_unregister_worktree);
259 client.add_model_message_handler(Self::handle_unshare_project);
260 client.add_model_message_handler(Self::handle_update_buffer_file);
261 client.add_model_message_handler(Self::handle_update_buffer);
262 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
263 client.add_model_message_handler(Self::handle_update_worktree);
264 client.add_model_request_handler(Self::handle_create_project_entry);
265 client.add_model_request_handler(Self::handle_rename_project_entry);
266 client.add_model_request_handler(Self::handle_delete_project_entry);
267 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
268 client.add_model_request_handler(Self::handle_apply_code_action);
269 client.add_model_request_handler(Self::handle_reload_buffers);
270 client.add_model_request_handler(Self::handle_format_buffers);
271 client.add_model_request_handler(Self::handle_get_code_actions);
272 client.add_model_request_handler(Self::handle_get_completions);
273 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
274 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
275 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
276 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
277 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
278 client.add_model_request_handler(Self::handle_search_project);
279 client.add_model_request_handler(Self::handle_get_project_symbols);
280 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
281 client.add_model_request_handler(Self::handle_open_buffer_by_id);
282 client.add_model_request_handler(Self::handle_open_buffer_by_path);
283 client.add_model_request_handler(Self::handle_save_buffer);
284 }
285
286 pub fn local(
287 client: Arc<Client>,
288 user_store: ModelHandle<UserStore>,
289 languages: Arc<LanguageRegistry>,
290 fs: Arc<dyn Fs>,
291 cx: &mut MutableAppContext,
292 ) -> ModelHandle<Self> {
293 cx.add_model(|cx: &mut ModelContext<Self>| {
294 let (remote_id_tx, remote_id_rx) = watch::channel();
295 let _maintain_remote_id_task = cx.spawn_weak({
296 let rpc = client.clone();
297 move |this, mut cx| {
298 async move {
299 let mut status = rpc.status();
300 while let Some(status) = status.next().await {
301 if let Some(this) = this.upgrade(&cx) {
302 if status.is_connected() {
303 this.update(&mut cx, |this, cx| this.register(cx)).await?;
304 } else {
305 this.update(&mut cx, |this, cx| this.unregister(cx));
306 }
307 }
308 }
309 Ok(())
310 }
311 .log_err()
312 }
313 });
314
315 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
316 Self {
317 worktrees: Default::default(),
318 collaborators: Default::default(),
319 opened_buffers: Default::default(),
320 shared_buffers: Default::default(),
321 loading_buffers: Default::default(),
322 loading_local_worktrees: Default::default(),
323 buffer_snapshots: Default::default(),
324 client_state: ProjectClientState::Local {
325 is_shared: false,
326 remote_id_tx,
327 remote_id_rx,
328 _maintain_remote_id_task,
329 },
330 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
331 subscriptions: Vec::new(),
332 active_entry: None,
333 languages,
334 client,
335 user_store,
336 fs,
337 next_entry_id: Default::default(),
338 language_servers: Default::default(),
339 started_language_servers: Default::default(),
340 language_server_statuses: Default::default(),
341 last_workspace_edits_by_language_server: Default::default(),
342 language_server_settings: Default::default(),
343 next_language_server_id: 0,
344 nonce: StdRng::from_entropy().gen(),
345 }
346 })
347 }
348
349 pub async fn remote(
350 remote_id: u64,
351 client: Arc<Client>,
352 user_store: ModelHandle<UserStore>,
353 languages: Arc<LanguageRegistry>,
354 fs: Arc<dyn Fs>,
355 cx: &mut AsyncAppContext,
356 ) -> Result<ModelHandle<Self>> {
357 client.authenticate_and_connect(true, &cx).await?;
358
359 let response = client
360 .request(proto::JoinProject {
361 project_id: remote_id,
362 })
363 .await?;
364
365 let replica_id = response.replica_id as ReplicaId;
366
367 let mut worktrees = Vec::new();
368 for worktree in response.worktrees {
369 let (worktree, load_task) = cx
370 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
371 worktrees.push(worktree);
372 load_task.detach();
373 }
374
375 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
376 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
377 let mut this = Self {
378 worktrees: Vec::new(),
379 loading_buffers: Default::default(),
380 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
381 shared_buffers: Default::default(),
382 loading_local_worktrees: Default::default(),
383 active_entry: None,
384 collaborators: Default::default(),
385 languages,
386 user_store: user_store.clone(),
387 fs,
388 next_entry_id: Default::default(),
389 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
390 client: client.clone(),
391 client_state: ProjectClientState::Remote {
392 sharing_has_stopped: false,
393 remote_id,
394 replica_id,
395 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
396 async move {
397 let mut status = client.status();
398 let is_connected =
399 status.next().await.map_or(false, |s| s.is_connected());
400 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
401 if !is_connected || status.next().await.is_some() {
402 if let Some(this) = this.upgrade(&cx) {
403 this.update(&mut cx, |this, cx| this.project_unshared(cx))
404 }
405 }
406 Ok(())
407 }
408 .log_err()
409 }),
410 },
411 language_servers: Default::default(),
412 started_language_servers: Default::default(),
413 language_server_settings: Default::default(),
414 language_server_statuses: response
415 .language_servers
416 .into_iter()
417 .map(|server| {
418 (
419 server.id as usize,
420 LanguageServerStatus {
421 name: server.name,
422 pending_work: Default::default(),
423 pending_diagnostic_updates: 0,
424 },
425 )
426 })
427 .collect(),
428 last_workspace_edits_by_language_server: Default::default(),
429 next_language_server_id: 0,
430 opened_buffers: Default::default(),
431 buffer_snapshots: Default::default(),
432 nonce: StdRng::from_entropy().gen(),
433 };
434 for worktree in worktrees {
435 this.add_worktree(&worktree, cx);
436 }
437 this
438 });
439
440 let user_ids = response
441 .collaborators
442 .iter()
443 .map(|peer| peer.user_id)
444 .collect();
445 user_store
446 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
447 .await?;
448 let mut collaborators = HashMap::default();
449 for message in response.collaborators {
450 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
451 collaborators.insert(collaborator.peer_id, collaborator);
452 }
453
454 this.update(cx, |this, _| {
455 this.collaborators = collaborators;
456 });
457
458 Ok(this)
459 }
460
461 #[cfg(any(test, feature = "test-support"))]
462 pub async fn test(
463 fs: Arc<dyn Fs>,
464 root_paths: impl IntoIterator<Item = impl AsRef<Path>>,
465 cx: &mut gpui::TestAppContext,
466 ) -> ModelHandle<Project> {
467 let languages = Arc::new(LanguageRegistry::test());
468 let http_client = client::test::FakeHttpClient::with_404_response();
469 let client = client::Client::new(http_client.clone());
470 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
471 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
472 for path in root_paths {
473 let (tree, _) = project
474 .update(cx, |project, cx| {
475 project.find_or_create_local_worktree(path, true, cx)
476 })
477 .await
478 .unwrap();
479 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
480 .await;
481 }
482 project
483 }
484
485 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
486 self.opened_buffers
487 .get(&remote_id)
488 .and_then(|buffer| buffer.upgrade(cx))
489 }
490
491 pub fn languages(&self) -> &Arc<LanguageRegistry> {
492 &self.languages
493 }
494
495 #[cfg(any(test, feature = "test-support"))]
496 pub fn check_invariants(&self, cx: &AppContext) {
497 if self.is_local() {
498 let mut worktree_root_paths = HashMap::default();
499 for worktree in self.worktrees(cx) {
500 let worktree = worktree.read(cx);
501 let abs_path = worktree.as_local().unwrap().abs_path().clone();
502 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
503 assert_eq!(
504 prev_worktree_id,
505 None,
506 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
507 abs_path,
508 worktree.id(),
509 prev_worktree_id
510 )
511 }
512 } else {
513 let replica_id = self.replica_id();
514 for buffer in self.opened_buffers.values() {
515 if let Some(buffer) = buffer.upgrade(cx) {
516 let buffer = buffer.read(cx);
517 assert_eq!(
518 buffer.deferred_ops_len(),
519 0,
520 "replica {}, buffer {} has deferred operations",
521 replica_id,
522 buffer.remote_id()
523 );
524 }
525 }
526 }
527 }
528
529 #[cfg(any(test, feature = "test-support"))]
530 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
531 let path = path.into();
532 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
533 self.opened_buffers.iter().any(|(_, buffer)| {
534 if let Some(buffer) = buffer.upgrade(cx) {
535 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
536 if file.worktree == worktree && file.path() == &path.path {
537 return true;
538 }
539 }
540 }
541 false
542 })
543 } else {
544 false
545 }
546 }
547
548 pub fn fs(&self) -> &Arc<dyn Fs> {
549 &self.fs
550 }
551
552 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
553 self.unshare(cx);
554 for worktree in &self.worktrees {
555 if let Some(worktree) = worktree.upgrade(cx) {
556 worktree.update(cx, |worktree, _| {
557 worktree.as_local_mut().unwrap().unregister();
558 });
559 }
560 }
561
562 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
563 *remote_id_tx.borrow_mut() = None;
564 }
565
566 self.subscriptions.clear();
567 }
568
569 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
570 self.unregister(cx);
571
572 let response = self.client.request(proto::RegisterProject {});
573 cx.spawn(|this, mut cx| async move {
574 let remote_id = response.await?.project_id;
575
576 let mut registrations = Vec::new();
577 this.update(&mut cx, |this, cx| {
578 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
579 *remote_id_tx.borrow_mut() = Some(remote_id);
580 }
581
582 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
583
584 this.subscriptions
585 .push(this.client.add_model_for_remote_entity(remote_id, cx));
586
587 for worktree in &this.worktrees {
588 if let Some(worktree) = worktree.upgrade(cx) {
589 registrations.push(worktree.update(cx, |worktree, cx| {
590 let worktree = worktree.as_local_mut().unwrap();
591 worktree.register(remote_id, cx)
592 }));
593 }
594 }
595 });
596
597 futures::future::try_join_all(registrations).await?;
598 Ok(())
599 })
600 }
601
602 pub fn remote_id(&self) -> Option<u64> {
603 match &self.client_state {
604 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
605 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
606 }
607 }
608
609 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
610 let mut id = None;
611 let mut watch = None;
612 match &self.client_state {
613 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
614 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
615 }
616
617 async move {
618 if let Some(id) = id {
619 return id;
620 }
621 let mut watch = watch.unwrap();
622 loop {
623 let id = *watch.borrow();
624 if let Some(id) = id {
625 return id;
626 }
627 watch.next().await;
628 }
629 }
630 }
631
632 pub fn replica_id(&self) -> ReplicaId {
633 match &self.client_state {
634 ProjectClientState::Local { .. } => 0,
635 ProjectClientState::Remote { replica_id, .. } => *replica_id,
636 }
637 }
638
639 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
640 &self.collaborators
641 }
642
643 pub fn worktrees<'a>(
644 &'a self,
645 cx: &'a AppContext,
646 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
647 self.worktrees
648 .iter()
649 .filter_map(move |worktree| worktree.upgrade(cx))
650 }
651
652 pub fn visible_worktrees<'a>(
653 &'a self,
654 cx: &'a AppContext,
655 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
656 self.worktrees.iter().filter_map(|worktree| {
657 worktree.upgrade(cx).and_then(|worktree| {
658 if worktree.read(cx).is_visible() {
659 Some(worktree)
660 } else {
661 None
662 }
663 })
664 })
665 }
666
667 pub fn worktree_for_id(
668 &self,
669 id: WorktreeId,
670 cx: &AppContext,
671 ) -> Option<ModelHandle<Worktree>> {
672 self.worktrees(cx)
673 .find(|worktree| worktree.read(cx).id() == id)
674 }
675
676 pub fn worktree_for_entry(
677 &self,
678 entry_id: ProjectEntryId,
679 cx: &AppContext,
680 ) -> Option<ModelHandle<Worktree>> {
681 self.worktrees(cx)
682 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
683 }
684
685 pub fn worktree_id_for_entry(
686 &self,
687 entry_id: ProjectEntryId,
688 cx: &AppContext,
689 ) -> Option<WorktreeId> {
690 self.worktree_for_entry(entry_id, cx)
691 .map(|worktree| worktree.read(cx).id())
692 }
693
694 pub fn create_entry(
695 &mut self,
696 project_path: impl Into<ProjectPath>,
697 is_directory: bool,
698 cx: &mut ModelContext<Self>,
699 ) -> Option<Task<Result<Entry>>> {
700 let project_path = project_path.into();
701 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
702 if self.is_local() {
703 Some(worktree.update(cx, |worktree, cx| {
704 worktree
705 .as_local_mut()
706 .unwrap()
707 .create_entry(project_path.path, is_directory, cx)
708 }))
709 } else {
710 let client = self.client.clone();
711 let project_id = self.remote_id().unwrap();
712 Some(cx.spawn_weak(|_, mut cx| async move {
713 let response = client
714 .request(proto::CreateProjectEntry {
715 worktree_id: project_path.worktree_id.to_proto(),
716 project_id,
717 path: project_path.path.as_os_str().as_bytes().to_vec(),
718 is_directory,
719 })
720 .await?;
721 let entry = response
722 .entry
723 .ok_or_else(|| anyhow!("missing entry in response"))?;
724 worktree
725 .update(&mut cx, |worktree, cx| {
726 worktree.as_remote().unwrap().insert_entry(
727 entry,
728 response.worktree_scan_id as usize,
729 cx,
730 )
731 })
732 .await
733 }))
734 }
735 }
736
737 pub fn rename_entry(
738 &mut self,
739 entry_id: ProjectEntryId,
740 new_path: impl Into<Arc<Path>>,
741 cx: &mut ModelContext<Self>,
742 ) -> Option<Task<Result<Entry>>> {
743 let worktree = self.worktree_for_entry(entry_id, cx)?;
744 let new_path = new_path.into();
745 if self.is_local() {
746 worktree.update(cx, |worktree, cx| {
747 worktree
748 .as_local_mut()
749 .unwrap()
750 .rename_entry(entry_id, new_path, cx)
751 })
752 } else {
753 let client = self.client.clone();
754 let project_id = self.remote_id().unwrap();
755
756 Some(cx.spawn_weak(|_, mut cx| async move {
757 let response = client
758 .request(proto::RenameProjectEntry {
759 project_id,
760 entry_id: entry_id.to_proto(),
761 new_path: new_path.as_os_str().as_bytes().to_vec(),
762 })
763 .await?;
764 let entry = response
765 .entry
766 .ok_or_else(|| anyhow!("missing entry in response"))?;
767 worktree
768 .update(&mut cx, |worktree, cx| {
769 worktree.as_remote().unwrap().insert_entry(
770 entry,
771 response.worktree_scan_id as usize,
772 cx,
773 )
774 })
775 .await
776 }))
777 }
778 }
779
780 pub fn delete_entry(
781 &mut self,
782 entry_id: ProjectEntryId,
783 cx: &mut ModelContext<Self>,
784 ) -> Option<Task<Result<()>>> {
785 let worktree = self.worktree_for_entry(entry_id, cx)?;
786 if self.is_local() {
787 worktree.update(cx, |worktree, cx| {
788 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
789 })
790 } else {
791 let client = self.client.clone();
792 let project_id = self.remote_id().unwrap();
793 Some(cx.spawn_weak(|_, mut cx| async move {
794 let response = client
795 .request(proto::DeleteProjectEntry {
796 project_id,
797 entry_id: entry_id.to_proto(),
798 })
799 .await?;
800 worktree
801 .update(&mut cx, move |worktree, cx| {
802 worktree.as_remote().unwrap().delete_entry(
803 entry_id,
804 response.worktree_scan_id as usize,
805 cx,
806 )
807 })
808 .await
809 }))
810 }
811 }
812
813 pub fn can_share(&self, cx: &AppContext) -> bool {
814 self.is_local() && self.visible_worktrees(cx).next().is_some()
815 }
816
817 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
818 let rpc = self.client.clone();
819 cx.spawn(|this, mut cx| async move {
820 let project_id = this.update(&mut cx, |this, cx| {
821 if let ProjectClientState::Local {
822 is_shared,
823 remote_id_rx,
824 ..
825 } = &mut this.client_state
826 {
827 *is_shared = true;
828
829 for open_buffer in this.opened_buffers.values_mut() {
830 match open_buffer {
831 OpenBuffer::Strong(_) => {}
832 OpenBuffer::Weak(buffer) => {
833 if let Some(buffer) = buffer.upgrade(cx) {
834 *open_buffer = OpenBuffer::Strong(buffer);
835 }
836 }
837 OpenBuffer::Loading(_) => unreachable!(),
838 }
839 }
840
841 for worktree_handle in this.worktrees.iter_mut() {
842 match worktree_handle {
843 WorktreeHandle::Strong(_) => {}
844 WorktreeHandle::Weak(worktree) => {
845 if let Some(worktree) = worktree.upgrade(cx) {
846 *worktree_handle = WorktreeHandle::Strong(worktree);
847 }
848 }
849 }
850 }
851
852 remote_id_rx
853 .borrow()
854 .ok_or_else(|| anyhow!("no project id"))
855 } else {
856 Err(anyhow!("can't share a remote project"))
857 }
858 })?;
859
860 rpc.request(proto::ShareProject { project_id }).await?;
861
862 let mut tasks = Vec::new();
863 this.update(&mut cx, |this, cx| {
864 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
865 worktree.update(cx, |worktree, cx| {
866 let worktree = worktree.as_local_mut().unwrap();
867 tasks.push(worktree.share(project_id, cx));
868 });
869 }
870 });
871 for task in tasks {
872 task.await?;
873 }
874 this.update(&mut cx, |_, cx| cx.notify());
875 Ok(())
876 })
877 }
878
879 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
880 let rpc = self.client.clone();
881
882 if let ProjectClientState::Local {
883 is_shared,
884 remote_id_rx,
885 ..
886 } = &mut self.client_state
887 {
888 if !*is_shared {
889 return;
890 }
891
892 *is_shared = false;
893 self.collaborators.clear();
894 self.shared_buffers.clear();
895 for worktree_handle in self.worktrees.iter_mut() {
896 if let WorktreeHandle::Strong(worktree) = worktree_handle {
897 let is_visible = worktree.update(cx, |worktree, _| {
898 worktree.as_local_mut().unwrap().unshare();
899 worktree.is_visible()
900 });
901 if !is_visible {
902 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
903 }
904 }
905 }
906
907 for open_buffer in self.opened_buffers.values_mut() {
908 match open_buffer {
909 OpenBuffer::Strong(buffer) => {
910 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
911 }
912 _ => {}
913 }
914 }
915
916 if let Some(project_id) = *remote_id_rx.borrow() {
917 rpc.send(proto::UnshareProject { project_id }).log_err();
918 }
919
920 cx.notify();
921 } else {
922 log::error!("attempted to unshare a remote project");
923 }
924 }
925
926 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
927 if let ProjectClientState::Remote {
928 sharing_has_stopped,
929 ..
930 } = &mut self.client_state
931 {
932 *sharing_has_stopped = true;
933 self.collaborators.clear();
934 cx.notify();
935 }
936 }
937
938 pub fn is_read_only(&self) -> bool {
939 match &self.client_state {
940 ProjectClientState::Local { .. } => false,
941 ProjectClientState::Remote {
942 sharing_has_stopped,
943 ..
944 } => *sharing_has_stopped,
945 }
946 }
947
948 pub fn is_local(&self) -> bool {
949 match &self.client_state {
950 ProjectClientState::Local { .. } => true,
951 ProjectClientState::Remote { .. } => false,
952 }
953 }
954
955 pub fn is_remote(&self) -> bool {
956 !self.is_local()
957 }
958
959 pub fn create_buffer(
960 &mut self,
961 text: &str,
962 language: Option<Arc<Language>>,
963 cx: &mut ModelContext<Self>,
964 ) -> Result<ModelHandle<Buffer>> {
965 if self.is_remote() {
966 return Err(anyhow!("creating buffers as a guest is not supported yet"));
967 }
968
969 let buffer = cx.add_model(|cx| {
970 Buffer::new(self.replica_id(), text, cx)
971 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
972 });
973 self.register_buffer(&buffer, cx)?;
974 Ok(buffer)
975 }
976
977 pub fn open_path(
978 &mut self,
979 path: impl Into<ProjectPath>,
980 cx: &mut ModelContext<Self>,
981 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
982 let task = self.open_buffer(path, cx);
983 cx.spawn_weak(|_, cx| async move {
984 let buffer = task.await?;
985 let project_entry_id = buffer
986 .read_with(&cx, |buffer, cx| {
987 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
988 })
989 .ok_or_else(|| anyhow!("no project entry"))?;
990 Ok((project_entry_id, buffer.into()))
991 })
992 }
993
994 pub fn open_local_buffer(
995 &mut self,
996 abs_path: impl AsRef<Path>,
997 cx: &mut ModelContext<Self>,
998 ) -> Task<Result<ModelHandle<Buffer>>> {
999 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1000 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1001 } else {
1002 Task::ready(Err(anyhow!("no such path")))
1003 }
1004 }
1005
1006 pub fn open_buffer(
1007 &mut self,
1008 path: impl Into<ProjectPath>,
1009 cx: &mut ModelContext<Self>,
1010 ) -> Task<Result<ModelHandle<Buffer>>> {
1011 let project_path = path.into();
1012 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1013 worktree
1014 } else {
1015 return Task::ready(Err(anyhow!("no such worktree")));
1016 };
1017
1018 // If there is already a buffer for the given path, then return it.
1019 let existing_buffer = self.get_open_buffer(&project_path, cx);
1020 if let Some(existing_buffer) = existing_buffer {
1021 return Task::ready(Ok(existing_buffer));
1022 }
1023
1024 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1025 // If the given path is already being loaded, then wait for that existing
1026 // task to complete and return the same buffer.
1027 hash_map::Entry::Occupied(e) => e.get().clone(),
1028
1029 // Otherwise, record the fact that this path is now being loaded.
1030 hash_map::Entry::Vacant(entry) => {
1031 let (mut tx, rx) = postage::watch::channel();
1032 entry.insert(rx.clone());
1033
1034 let load_buffer = if worktree.read(cx).is_local() {
1035 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1036 } else {
1037 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1038 };
1039
1040 cx.spawn(move |this, mut cx| async move {
1041 let load_result = load_buffer.await;
1042 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1043 // Record the fact that the buffer is no longer loading.
1044 this.loading_buffers.remove(&project_path);
1045 let buffer = load_result.map_err(Arc::new)?;
1046 Ok(buffer)
1047 }));
1048 })
1049 .detach();
1050 rx
1051 }
1052 };
1053
1054 cx.foreground().spawn(async move {
1055 loop {
1056 if let Some(result) = loading_watch.borrow().as_ref() {
1057 match result {
1058 Ok(buffer) => return Ok(buffer.clone()),
1059 Err(error) => return Err(anyhow!("{}", error)),
1060 }
1061 }
1062 loading_watch.next().await;
1063 }
1064 })
1065 }
1066
1067 fn open_local_buffer_internal(
1068 &mut self,
1069 path: &Arc<Path>,
1070 worktree: &ModelHandle<Worktree>,
1071 cx: &mut ModelContext<Self>,
1072 ) -> Task<Result<ModelHandle<Buffer>>> {
1073 let load_buffer = worktree.update(cx, |worktree, cx| {
1074 let worktree = worktree.as_local_mut().unwrap();
1075 worktree.load_buffer(path, cx)
1076 });
1077 cx.spawn(|this, mut cx| async move {
1078 let buffer = load_buffer.await?;
1079 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1080 Ok(buffer)
1081 })
1082 }
1083
1084 fn open_remote_buffer_internal(
1085 &mut self,
1086 path: &Arc<Path>,
1087 worktree: &ModelHandle<Worktree>,
1088 cx: &mut ModelContext<Self>,
1089 ) -> Task<Result<ModelHandle<Buffer>>> {
1090 let rpc = self.client.clone();
1091 let project_id = self.remote_id().unwrap();
1092 let remote_worktree_id = worktree.read(cx).id();
1093 let path = path.clone();
1094 let path_string = path.to_string_lossy().to_string();
1095 cx.spawn(|this, mut cx| async move {
1096 let response = rpc
1097 .request(proto::OpenBufferByPath {
1098 project_id,
1099 worktree_id: remote_worktree_id.to_proto(),
1100 path: path_string,
1101 })
1102 .await?;
1103 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1104 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1105 .await
1106 })
1107 }
1108
1109 fn open_local_buffer_via_lsp(
1110 &mut self,
1111 abs_path: lsp::Url,
1112 lsp_adapter: Arc<dyn LspAdapter>,
1113 lsp_server: Arc<LanguageServer>,
1114 cx: &mut ModelContext<Self>,
1115 ) -> Task<Result<ModelHandle<Buffer>>> {
1116 cx.spawn(|this, mut cx| async move {
1117 let abs_path = abs_path
1118 .to_file_path()
1119 .map_err(|_| anyhow!("can't convert URI to path"))?;
1120 let (worktree, relative_path) = if let Some(result) =
1121 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1122 {
1123 result
1124 } else {
1125 let worktree = this
1126 .update(&mut cx, |this, cx| {
1127 this.create_local_worktree(&abs_path, false, cx)
1128 })
1129 .await?;
1130 this.update(&mut cx, |this, cx| {
1131 this.language_servers.insert(
1132 (worktree.read(cx).id(), lsp_adapter.name()),
1133 (lsp_adapter, lsp_server),
1134 );
1135 });
1136 (worktree, PathBuf::new())
1137 };
1138
1139 let project_path = ProjectPath {
1140 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1141 path: relative_path.into(),
1142 };
1143 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1144 .await
1145 })
1146 }
1147
1148 pub fn open_buffer_by_id(
1149 &mut self,
1150 id: u64,
1151 cx: &mut ModelContext<Self>,
1152 ) -> Task<Result<ModelHandle<Buffer>>> {
1153 if let Some(buffer) = self.buffer_for_id(id, cx) {
1154 Task::ready(Ok(buffer))
1155 } else if self.is_local() {
1156 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1157 } else if let Some(project_id) = self.remote_id() {
1158 let request = self
1159 .client
1160 .request(proto::OpenBufferById { project_id, id });
1161 cx.spawn(|this, mut cx| async move {
1162 let buffer = request
1163 .await?
1164 .buffer
1165 .ok_or_else(|| anyhow!("invalid buffer"))?;
1166 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1167 .await
1168 })
1169 } else {
1170 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1171 }
1172 }
1173
1174 pub fn save_buffer_as(
1175 &mut self,
1176 buffer: ModelHandle<Buffer>,
1177 abs_path: PathBuf,
1178 cx: &mut ModelContext<Project>,
1179 ) -> Task<Result<()>> {
1180 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1181 let old_path =
1182 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1183 cx.spawn(|this, mut cx| async move {
1184 if let Some(old_path) = old_path {
1185 this.update(&mut cx, |this, cx| {
1186 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1187 });
1188 }
1189 let (worktree, path) = worktree_task.await?;
1190 worktree
1191 .update(&mut cx, |worktree, cx| {
1192 worktree
1193 .as_local_mut()
1194 .unwrap()
1195 .save_buffer_as(buffer.clone(), path, cx)
1196 })
1197 .await?;
1198 this.update(&mut cx, |this, cx| {
1199 this.assign_language_to_buffer(&buffer, cx);
1200 this.register_buffer_with_language_server(&buffer, cx);
1201 });
1202 Ok(())
1203 })
1204 }
1205
1206 pub fn get_open_buffer(
1207 &mut self,
1208 path: &ProjectPath,
1209 cx: &mut ModelContext<Self>,
1210 ) -> Option<ModelHandle<Buffer>> {
1211 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1212 self.opened_buffers.values().find_map(|buffer| {
1213 let buffer = buffer.upgrade(cx)?;
1214 let file = File::from_dyn(buffer.read(cx).file())?;
1215 if file.worktree == worktree && file.path() == &path.path {
1216 Some(buffer)
1217 } else {
1218 None
1219 }
1220 })
1221 }
1222
1223 fn register_buffer(
1224 &mut self,
1225 buffer: &ModelHandle<Buffer>,
1226 cx: &mut ModelContext<Self>,
1227 ) -> Result<()> {
1228 let remote_id = buffer.read(cx).remote_id();
1229 let open_buffer = if self.is_remote() || self.is_shared() {
1230 OpenBuffer::Strong(buffer.clone())
1231 } else {
1232 OpenBuffer::Weak(buffer.downgrade())
1233 };
1234
1235 match self.opened_buffers.insert(remote_id, open_buffer) {
1236 None => {}
1237 Some(OpenBuffer::Loading(operations)) => {
1238 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1239 }
1240 Some(OpenBuffer::Weak(existing_handle)) => {
1241 if existing_handle.upgrade(cx).is_some() {
1242 Err(anyhow!(
1243 "already registered buffer with remote id {}",
1244 remote_id
1245 ))?
1246 }
1247 }
1248 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1249 "already registered buffer with remote id {}",
1250 remote_id
1251 ))?,
1252 }
1253 cx.subscribe(buffer, |this, buffer, event, cx| {
1254 this.on_buffer_event(buffer, event, cx);
1255 })
1256 .detach();
1257
1258 self.assign_language_to_buffer(buffer, cx);
1259 self.register_buffer_with_language_server(buffer, cx);
1260 cx.observe_release(buffer, |this, buffer, cx| {
1261 if let Some(file) = File::from_dyn(buffer.file()) {
1262 if file.is_local() {
1263 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1264 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1265 server
1266 .notify::<lsp::notification::DidCloseTextDocument>(
1267 lsp::DidCloseTextDocumentParams {
1268 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1269 },
1270 )
1271 .log_err();
1272 }
1273 }
1274 }
1275 })
1276 .detach();
1277
1278 Ok(())
1279 }
1280
1281 fn register_buffer_with_language_server(
1282 &mut self,
1283 buffer_handle: &ModelHandle<Buffer>,
1284 cx: &mut ModelContext<Self>,
1285 ) {
1286 let buffer = buffer_handle.read(cx);
1287 let buffer_id = buffer.remote_id();
1288 if let Some(file) = File::from_dyn(buffer.file()) {
1289 if file.is_local() {
1290 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1291 let initial_snapshot = buffer.text_snapshot();
1292
1293 let mut language_server = None;
1294 let mut language_id = None;
1295 if let Some(language) = buffer.language() {
1296 let worktree_id = file.worktree_id(cx);
1297 if let Some(adapter) = language.lsp_adapter() {
1298 language_id = adapter.id_for_language(language.name().as_ref());
1299 language_server = self
1300 .language_servers
1301 .get(&(worktree_id, adapter.name()))
1302 .cloned();
1303 }
1304 }
1305
1306 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1307 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1308 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1309 .log_err();
1310 }
1311 }
1312
1313 if let Some((_, server)) = language_server {
1314 server
1315 .notify::<lsp::notification::DidOpenTextDocument>(
1316 lsp::DidOpenTextDocumentParams {
1317 text_document: lsp::TextDocumentItem::new(
1318 uri,
1319 language_id.unwrap_or_default(),
1320 0,
1321 initial_snapshot.text(),
1322 ),
1323 }
1324 .clone(),
1325 )
1326 .log_err();
1327 buffer_handle.update(cx, |buffer, cx| {
1328 buffer.set_completion_triggers(
1329 server
1330 .capabilities()
1331 .completion_provider
1332 .as_ref()
1333 .and_then(|provider| provider.trigger_characters.clone())
1334 .unwrap_or(Vec::new()),
1335 cx,
1336 )
1337 });
1338 self.buffer_snapshots
1339 .insert(buffer_id, vec![(0, initial_snapshot)]);
1340 }
1341 }
1342 }
1343 }
1344
1345 fn unregister_buffer_from_language_server(
1346 &mut self,
1347 buffer: &ModelHandle<Buffer>,
1348 old_path: PathBuf,
1349 cx: &mut ModelContext<Self>,
1350 ) {
1351 buffer.update(cx, |buffer, cx| {
1352 buffer.update_diagnostics(Default::default(), cx);
1353 self.buffer_snapshots.remove(&buffer.remote_id());
1354 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1355 language_server
1356 .notify::<lsp::notification::DidCloseTextDocument>(
1357 lsp::DidCloseTextDocumentParams {
1358 text_document: lsp::TextDocumentIdentifier::new(
1359 lsp::Url::from_file_path(old_path).unwrap(),
1360 ),
1361 },
1362 )
1363 .log_err();
1364 }
1365 });
1366 }
1367
1368 fn on_buffer_event(
1369 &mut self,
1370 buffer: ModelHandle<Buffer>,
1371 event: &BufferEvent,
1372 cx: &mut ModelContext<Self>,
1373 ) -> Option<()> {
1374 match event {
1375 BufferEvent::Operation(operation) => {
1376 let project_id = self.remote_id()?;
1377 let request = self.client.request(proto::UpdateBuffer {
1378 project_id,
1379 buffer_id: buffer.read(cx).remote_id(),
1380 operations: vec![language::proto::serialize_operation(&operation)],
1381 });
1382 cx.background().spawn(request).detach_and_log_err(cx);
1383 }
1384 BufferEvent::Edited { .. } => {
1385 let (_, language_server) = self
1386 .language_server_for_buffer(buffer.read(cx), cx)?
1387 .clone();
1388 let buffer = buffer.read(cx);
1389 let file = File::from_dyn(buffer.file())?;
1390 let abs_path = file.as_local()?.abs_path(cx);
1391 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1392 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1393 let (version, prev_snapshot) = buffer_snapshots.last()?;
1394 let next_snapshot = buffer.text_snapshot();
1395 let next_version = version + 1;
1396
1397 let content_changes = buffer
1398 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1399 .map(|edit| {
1400 let edit_start = edit.new.start.0;
1401 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1402 let new_text = next_snapshot
1403 .text_for_range(edit.new.start.1..edit.new.end.1)
1404 .collect();
1405 lsp::TextDocumentContentChangeEvent {
1406 range: Some(lsp::Range::new(
1407 point_to_lsp(edit_start),
1408 point_to_lsp(edit_end),
1409 )),
1410 range_length: None,
1411 text: new_text,
1412 }
1413 })
1414 .collect();
1415
1416 buffer_snapshots.push((next_version, next_snapshot));
1417
1418 language_server
1419 .notify::<lsp::notification::DidChangeTextDocument>(
1420 lsp::DidChangeTextDocumentParams {
1421 text_document: lsp::VersionedTextDocumentIdentifier::new(
1422 uri,
1423 next_version,
1424 ),
1425 content_changes,
1426 },
1427 )
1428 .log_err();
1429 }
1430 BufferEvent::Saved => {
1431 let file = File::from_dyn(buffer.read(cx).file())?;
1432 let worktree_id = file.worktree_id(cx);
1433 let abs_path = file.as_local()?.abs_path(cx);
1434 let text_document = lsp::TextDocumentIdentifier {
1435 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1436 };
1437
1438 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1439 server
1440 .notify::<lsp::notification::DidSaveTextDocument>(
1441 lsp::DidSaveTextDocumentParams {
1442 text_document: text_document.clone(),
1443 text: None,
1444 },
1445 )
1446 .log_err();
1447 }
1448 }
1449 _ => {}
1450 }
1451
1452 None
1453 }
1454
1455 fn language_servers_for_worktree(
1456 &self,
1457 worktree_id: WorktreeId,
1458 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1459 self.language_servers.iter().filter_map(
1460 move |((language_server_worktree_id, _), server)| {
1461 if *language_server_worktree_id == worktree_id {
1462 Some(server)
1463 } else {
1464 None
1465 }
1466 },
1467 )
1468 }
1469
1470 fn assign_language_to_buffer(
1471 &mut self,
1472 buffer: &ModelHandle<Buffer>,
1473 cx: &mut ModelContext<Self>,
1474 ) -> Option<()> {
1475 // If the buffer has a language, set it and start the language server if we haven't already.
1476 let full_path = buffer.read(cx).file()?.full_path(cx);
1477 let language = self.languages.select_language(&full_path)?;
1478 buffer.update(cx, |buffer, cx| {
1479 buffer.set_language(Some(language.clone()), cx);
1480 });
1481
1482 let file = File::from_dyn(buffer.read(cx).file())?;
1483 let worktree = file.worktree.read(cx).as_local()?;
1484 let worktree_id = worktree.id();
1485 let worktree_abs_path = worktree.abs_path().clone();
1486 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1487
1488 None
1489 }
1490
1491 fn start_language_server(
1492 &mut self,
1493 worktree_id: WorktreeId,
1494 worktree_path: Arc<Path>,
1495 language: Arc<Language>,
1496 cx: &mut ModelContext<Self>,
1497 ) {
1498 let adapter = if let Some(adapter) = language.lsp_adapter() {
1499 adapter
1500 } else {
1501 return;
1502 };
1503 let key = (worktree_id, adapter.name());
1504 self.started_language_servers
1505 .entry(key.clone())
1506 .or_insert_with(|| {
1507 let server_id = post_inc(&mut self.next_language_server_id);
1508 let language_server = self.languages.start_language_server(
1509 server_id,
1510 language.clone(),
1511 worktree_path,
1512 self.client.http_client(),
1513 cx,
1514 );
1515 cx.spawn_weak(|this, mut cx| async move {
1516 let language_server = language_server?.await.log_err()?;
1517 let language_server = language_server
1518 .initialize(adapter.initialization_options())
1519 .await
1520 .log_err()?;
1521 let this = this.upgrade(&cx)?;
1522 let disk_based_diagnostics_progress_token =
1523 adapter.disk_based_diagnostics_progress_token();
1524
1525 language_server
1526 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1527 let this = this.downgrade();
1528 let adapter = adapter.clone();
1529 move |params, mut cx| {
1530 if let Some(this) = this.upgrade(&cx) {
1531 this.update(&mut cx, |this, cx| {
1532 this.on_lsp_diagnostics_published(
1533 server_id,
1534 params,
1535 &adapter,
1536 disk_based_diagnostics_progress_token,
1537 cx,
1538 );
1539 });
1540 }
1541 }
1542 })
1543 .detach();
1544
1545 language_server
1546 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1547 let settings = this
1548 .read_with(&cx, |this, _| this.language_server_settings.clone());
1549 move |params, _| {
1550 let settings = settings.lock().clone();
1551 async move {
1552 Ok(params
1553 .items
1554 .into_iter()
1555 .map(|item| {
1556 if let Some(section) = &item.section {
1557 settings
1558 .get(section)
1559 .cloned()
1560 .unwrap_or(serde_json::Value::Null)
1561 } else {
1562 settings.clone()
1563 }
1564 })
1565 .collect())
1566 }
1567 }
1568 })
1569 .detach();
1570
1571 language_server
1572 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1573 let this = this.downgrade();
1574 let adapter = adapter.clone();
1575 let language_server = language_server.clone();
1576 move |params, cx| {
1577 Self::on_lsp_workspace_edit(
1578 this,
1579 params,
1580 server_id,
1581 adapter.clone(),
1582 language_server.clone(),
1583 cx,
1584 )
1585 }
1586 })
1587 .detach();
1588
1589 language_server
1590 .on_notification::<lsp::notification::Progress, _>({
1591 let this = this.downgrade();
1592 move |params, mut cx| {
1593 if let Some(this) = this.upgrade(&cx) {
1594 this.update(&mut cx, |this, cx| {
1595 this.on_lsp_progress(
1596 params,
1597 server_id,
1598 disk_based_diagnostics_progress_token,
1599 cx,
1600 );
1601 });
1602 }
1603 }
1604 })
1605 .detach();
1606
1607 this.update(&mut cx, |this, cx| {
1608 this.language_servers
1609 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1610 this.language_server_statuses.insert(
1611 server_id,
1612 LanguageServerStatus {
1613 name: language_server.name().to_string(),
1614 pending_work: Default::default(),
1615 pending_diagnostic_updates: 0,
1616 },
1617 );
1618 language_server
1619 .notify::<lsp::notification::DidChangeConfiguration>(
1620 lsp::DidChangeConfigurationParams {
1621 settings: this.language_server_settings.lock().clone(),
1622 },
1623 )
1624 .ok();
1625
1626 if let Some(project_id) = this.remote_id() {
1627 this.client
1628 .send(proto::StartLanguageServer {
1629 project_id,
1630 server: Some(proto::LanguageServer {
1631 id: server_id as u64,
1632 name: language_server.name().to_string(),
1633 }),
1634 })
1635 .log_err();
1636 }
1637
1638 // Tell the language server about every open buffer in the worktree that matches the language.
1639 for buffer in this.opened_buffers.values() {
1640 if let Some(buffer_handle) = buffer.upgrade(cx) {
1641 let buffer = buffer_handle.read(cx);
1642 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1643 file
1644 } else {
1645 continue;
1646 };
1647 let language = if let Some(language) = buffer.language() {
1648 language
1649 } else {
1650 continue;
1651 };
1652 if file.worktree.read(cx).id() != key.0
1653 || language.lsp_adapter().map(|a| a.name())
1654 != Some(key.1.clone())
1655 {
1656 continue;
1657 }
1658
1659 let file = file.as_local()?;
1660 let versions = this
1661 .buffer_snapshots
1662 .entry(buffer.remote_id())
1663 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1664 let (version, initial_snapshot) = versions.last().unwrap();
1665 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1666 let language_id = adapter.id_for_language(language.name().as_ref());
1667 language_server
1668 .notify::<lsp::notification::DidOpenTextDocument>(
1669 lsp::DidOpenTextDocumentParams {
1670 text_document: lsp::TextDocumentItem::new(
1671 uri,
1672 language_id.unwrap_or_default(),
1673 *version,
1674 initial_snapshot.text(),
1675 ),
1676 },
1677 )
1678 .log_err()?;
1679 buffer_handle.update(cx, |buffer, cx| {
1680 buffer.set_completion_triggers(
1681 language_server
1682 .capabilities()
1683 .completion_provider
1684 .as_ref()
1685 .and_then(|provider| {
1686 provider.trigger_characters.clone()
1687 })
1688 .unwrap_or(Vec::new()),
1689 cx,
1690 )
1691 });
1692 }
1693 }
1694
1695 cx.notify();
1696 Some(())
1697 });
1698
1699 Some(language_server)
1700 })
1701 });
1702 }
1703
1704 pub fn restart_language_servers_for_buffers(
1705 &mut self,
1706 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1707 cx: &mut ModelContext<Self>,
1708 ) -> Option<()> {
1709 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1710 .into_iter()
1711 .filter_map(|buffer| {
1712 let file = File::from_dyn(buffer.read(cx).file())?;
1713 let worktree = file.worktree.read(cx).as_local()?;
1714 let worktree_id = worktree.id();
1715 let worktree_abs_path = worktree.abs_path().clone();
1716 let full_path = file.full_path(cx);
1717 Some((worktree_id, worktree_abs_path, full_path))
1718 })
1719 .collect();
1720 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1721 let language = self.languages.select_language(&full_path)?;
1722 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1723 }
1724
1725 None
1726 }
1727
1728 fn restart_language_server(
1729 &mut self,
1730 worktree_id: WorktreeId,
1731 worktree_path: Arc<Path>,
1732 language: Arc<Language>,
1733 cx: &mut ModelContext<Self>,
1734 ) {
1735 let adapter = if let Some(adapter) = language.lsp_adapter() {
1736 adapter
1737 } else {
1738 return;
1739 };
1740 let key = (worktree_id, adapter.name());
1741 let server_to_shutdown = self.language_servers.remove(&key);
1742 self.started_language_servers.remove(&key);
1743 server_to_shutdown
1744 .as_ref()
1745 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1746 cx.spawn_weak(|this, mut cx| async move {
1747 if let Some(this) = this.upgrade(&cx) {
1748 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1749 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1750 shutdown_task.await;
1751 }
1752 }
1753
1754 this.update(&mut cx, |this, cx| {
1755 this.start_language_server(worktree_id, worktree_path, language, cx);
1756 });
1757 }
1758 })
1759 .detach();
1760 }
1761
1762 fn on_lsp_diagnostics_published(
1763 &mut self,
1764 server_id: usize,
1765 mut params: lsp::PublishDiagnosticsParams,
1766 adapter: &Arc<dyn LspAdapter>,
1767 disk_based_diagnostics_progress_token: Option<&str>,
1768 cx: &mut ModelContext<Self>,
1769 ) {
1770 adapter.process_diagnostics(&mut params);
1771 if disk_based_diagnostics_progress_token.is_none() {
1772 self.disk_based_diagnostics_started(cx);
1773 self.broadcast_language_server_update(
1774 server_id,
1775 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1776 proto::LspDiskBasedDiagnosticsUpdating {},
1777 ),
1778 );
1779 }
1780 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1781 .log_err();
1782 if disk_based_diagnostics_progress_token.is_none() {
1783 self.disk_based_diagnostics_finished(cx);
1784 self.broadcast_language_server_update(
1785 server_id,
1786 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1787 proto::LspDiskBasedDiagnosticsUpdated {},
1788 ),
1789 );
1790 }
1791 }
1792
1793 fn on_lsp_progress(
1794 &mut self,
1795 progress: lsp::ProgressParams,
1796 server_id: usize,
1797 disk_based_diagnostics_progress_token: Option<&str>,
1798 cx: &mut ModelContext<Self>,
1799 ) {
1800 let token = match progress.token {
1801 lsp::NumberOrString::String(token) => token,
1802 lsp::NumberOrString::Number(token) => {
1803 log::info!("skipping numeric progress token {}", token);
1804 return;
1805 }
1806 };
1807 let progress = match progress.value {
1808 lsp::ProgressParamsValue::WorkDone(value) => value,
1809 };
1810 let language_server_status =
1811 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1812 status
1813 } else {
1814 return;
1815 };
1816 match progress {
1817 lsp::WorkDoneProgress::Begin(_) => {
1818 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1819 language_server_status.pending_diagnostic_updates += 1;
1820 if language_server_status.pending_diagnostic_updates == 1 {
1821 self.disk_based_diagnostics_started(cx);
1822 self.broadcast_language_server_update(
1823 server_id,
1824 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1825 proto::LspDiskBasedDiagnosticsUpdating {},
1826 ),
1827 );
1828 }
1829 } else {
1830 self.on_lsp_work_start(server_id, token.clone(), cx);
1831 self.broadcast_language_server_update(
1832 server_id,
1833 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1834 token,
1835 }),
1836 );
1837 }
1838 }
1839 lsp::WorkDoneProgress::Report(report) => {
1840 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1841 self.on_lsp_work_progress(
1842 server_id,
1843 token.clone(),
1844 LanguageServerProgress {
1845 message: report.message.clone(),
1846 percentage: report.percentage.map(|p| p as usize),
1847 last_update_at: Instant::now(),
1848 },
1849 cx,
1850 );
1851 self.broadcast_language_server_update(
1852 server_id,
1853 proto::update_language_server::Variant::WorkProgress(
1854 proto::LspWorkProgress {
1855 token,
1856 message: report.message,
1857 percentage: report.percentage.map(|p| p as u32),
1858 },
1859 ),
1860 );
1861 }
1862 }
1863 lsp::WorkDoneProgress::End(_) => {
1864 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1865 language_server_status.pending_diagnostic_updates -= 1;
1866 if language_server_status.pending_diagnostic_updates == 0 {
1867 self.disk_based_diagnostics_finished(cx);
1868 self.broadcast_language_server_update(
1869 server_id,
1870 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1871 proto::LspDiskBasedDiagnosticsUpdated {},
1872 ),
1873 );
1874 }
1875 } else {
1876 self.on_lsp_work_end(server_id, token.clone(), cx);
1877 self.broadcast_language_server_update(
1878 server_id,
1879 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1880 token,
1881 }),
1882 );
1883 }
1884 }
1885 }
1886 }
1887
1888 fn on_lsp_work_start(
1889 &mut self,
1890 language_server_id: usize,
1891 token: String,
1892 cx: &mut ModelContext<Self>,
1893 ) {
1894 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1895 status.pending_work.insert(
1896 token,
1897 LanguageServerProgress {
1898 message: None,
1899 percentage: None,
1900 last_update_at: Instant::now(),
1901 },
1902 );
1903 cx.notify();
1904 }
1905 }
1906
1907 fn on_lsp_work_progress(
1908 &mut self,
1909 language_server_id: usize,
1910 token: String,
1911 progress: LanguageServerProgress,
1912 cx: &mut ModelContext<Self>,
1913 ) {
1914 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1915 status.pending_work.insert(token, progress);
1916 cx.notify();
1917 }
1918 }
1919
1920 fn on_lsp_work_end(
1921 &mut self,
1922 language_server_id: usize,
1923 token: String,
1924 cx: &mut ModelContext<Self>,
1925 ) {
1926 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1927 status.pending_work.remove(&token);
1928 cx.notify();
1929 }
1930 }
1931
1932 async fn on_lsp_workspace_edit(
1933 this: WeakModelHandle<Self>,
1934 params: lsp::ApplyWorkspaceEditParams,
1935 server_id: usize,
1936 adapter: Arc<dyn LspAdapter>,
1937 language_server: Arc<LanguageServer>,
1938 mut cx: AsyncAppContext,
1939 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1940 let this = this
1941 .upgrade(&cx)
1942 .ok_or_else(|| anyhow!("project project closed"))?;
1943 let transaction = Self::deserialize_workspace_edit(
1944 this.clone(),
1945 params.edit,
1946 true,
1947 adapter.clone(),
1948 language_server.clone(),
1949 &mut cx,
1950 )
1951 .await
1952 .log_err();
1953 this.update(&mut cx, |this, _| {
1954 if let Some(transaction) = transaction {
1955 this.last_workspace_edits_by_language_server
1956 .insert(server_id, transaction);
1957 }
1958 });
1959 Ok(lsp::ApplyWorkspaceEditResponse {
1960 applied: true,
1961 failed_change: None,
1962 failure_reason: None,
1963 })
1964 }
1965
1966 fn broadcast_language_server_update(
1967 &self,
1968 language_server_id: usize,
1969 event: proto::update_language_server::Variant,
1970 ) {
1971 if let Some(project_id) = self.remote_id() {
1972 self.client
1973 .send(proto::UpdateLanguageServer {
1974 project_id,
1975 language_server_id: language_server_id as u64,
1976 variant: Some(event),
1977 })
1978 .log_err();
1979 }
1980 }
1981
1982 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1983 for (_, server) in self.language_servers.values() {
1984 server
1985 .notify::<lsp::notification::DidChangeConfiguration>(
1986 lsp::DidChangeConfigurationParams {
1987 settings: settings.clone(),
1988 },
1989 )
1990 .ok();
1991 }
1992 *self.language_server_settings.lock() = settings;
1993 }
1994
1995 pub fn language_server_statuses(
1996 &self,
1997 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1998 self.language_server_statuses.values()
1999 }
2000
2001 pub fn update_diagnostics(
2002 &mut self,
2003 params: lsp::PublishDiagnosticsParams,
2004 disk_based_sources: &[&str],
2005 cx: &mut ModelContext<Self>,
2006 ) -> Result<()> {
2007 let abs_path = params
2008 .uri
2009 .to_file_path()
2010 .map_err(|_| anyhow!("URI is not a file"))?;
2011 let mut next_group_id = 0;
2012 let mut diagnostics = Vec::default();
2013 let mut primary_diagnostic_group_ids = HashMap::default();
2014 let mut sources_by_group_id = HashMap::default();
2015 let mut supporting_diagnostics = HashMap::default();
2016 for diagnostic in ¶ms.diagnostics {
2017 let source = diagnostic.source.as_ref();
2018 let code = diagnostic.code.as_ref().map(|code| match code {
2019 lsp::NumberOrString::Number(code) => code.to_string(),
2020 lsp::NumberOrString::String(code) => code.clone(),
2021 });
2022 let range = range_from_lsp(diagnostic.range);
2023 let is_supporting = diagnostic
2024 .related_information
2025 .as_ref()
2026 .map_or(false, |infos| {
2027 infos.iter().any(|info| {
2028 primary_diagnostic_group_ids.contains_key(&(
2029 source,
2030 code.clone(),
2031 range_from_lsp(info.location.range),
2032 ))
2033 })
2034 });
2035
2036 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2037 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2038 });
2039
2040 if is_supporting {
2041 supporting_diagnostics.insert(
2042 (source, code.clone(), range),
2043 (diagnostic.severity, is_unnecessary),
2044 );
2045 } else {
2046 let group_id = post_inc(&mut next_group_id);
2047 let is_disk_based = source.map_or(false, |source| {
2048 disk_based_sources.contains(&source.as_str())
2049 });
2050
2051 sources_by_group_id.insert(group_id, source);
2052 primary_diagnostic_group_ids
2053 .insert((source, code.clone(), range.clone()), group_id);
2054
2055 diagnostics.push(DiagnosticEntry {
2056 range,
2057 diagnostic: Diagnostic {
2058 code: code.clone(),
2059 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2060 message: diagnostic.message.clone(),
2061 group_id,
2062 is_primary: true,
2063 is_valid: true,
2064 is_disk_based,
2065 is_unnecessary,
2066 },
2067 });
2068 if let Some(infos) = &diagnostic.related_information {
2069 for info in infos {
2070 if info.location.uri == params.uri && !info.message.is_empty() {
2071 let range = range_from_lsp(info.location.range);
2072 diagnostics.push(DiagnosticEntry {
2073 range,
2074 diagnostic: Diagnostic {
2075 code: code.clone(),
2076 severity: DiagnosticSeverity::INFORMATION,
2077 message: info.message.clone(),
2078 group_id,
2079 is_primary: false,
2080 is_valid: true,
2081 is_disk_based,
2082 is_unnecessary: false,
2083 },
2084 });
2085 }
2086 }
2087 }
2088 }
2089 }
2090
2091 for entry in &mut diagnostics {
2092 let diagnostic = &mut entry.diagnostic;
2093 if !diagnostic.is_primary {
2094 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2095 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2096 source,
2097 diagnostic.code.clone(),
2098 entry.range.clone(),
2099 )) {
2100 if let Some(severity) = severity {
2101 diagnostic.severity = severity;
2102 }
2103 diagnostic.is_unnecessary = is_unnecessary;
2104 }
2105 }
2106 }
2107
2108 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2109 Ok(())
2110 }
2111
2112 pub fn update_diagnostic_entries(
2113 &mut self,
2114 abs_path: PathBuf,
2115 version: Option<i32>,
2116 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2117 cx: &mut ModelContext<Project>,
2118 ) -> Result<(), anyhow::Error> {
2119 let (worktree, relative_path) = self
2120 .find_local_worktree(&abs_path, cx)
2121 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2122 if !worktree.read(cx).is_visible() {
2123 return Ok(());
2124 }
2125
2126 let project_path = ProjectPath {
2127 worktree_id: worktree.read(cx).id(),
2128 path: relative_path.into(),
2129 };
2130 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2131 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2132 }
2133
2134 let updated = worktree.update(cx, |worktree, cx| {
2135 worktree
2136 .as_local_mut()
2137 .ok_or_else(|| anyhow!("not a local worktree"))?
2138 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2139 })?;
2140 if updated {
2141 cx.emit(Event::DiagnosticsUpdated(project_path));
2142 }
2143 Ok(())
2144 }
2145
2146 fn update_buffer_diagnostics(
2147 &mut self,
2148 buffer: &ModelHandle<Buffer>,
2149 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2150 version: Option<i32>,
2151 cx: &mut ModelContext<Self>,
2152 ) -> Result<()> {
2153 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2154 Ordering::Equal
2155 .then_with(|| b.is_primary.cmp(&a.is_primary))
2156 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2157 .then_with(|| a.severity.cmp(&b.severity))
2158 .then_with(|| a.message.cmp(&b.message))
2159 }
2160
2161 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2162
2163 diagnostics.sort_unstable_by(|a, b| {
2164 Ordering::Equal
2165 .then_with(|| a.range.start.cmp(&b.range.start))
2166 .then_with(|| b.range.end.cmp(&a.range.end))
2167 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2168 });
2169
2170 let mut sanitized_diagnostics = Vec::new();
2171 let edits_since_save = Patch::new(
2172 snapshot
2173 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2174 .collect(),
2175 );
2176 for entry in diagnostics {
2177 let start;
2178 let end;
2179 if entry.diagnostic.is_disk_based {
2180 // Some diagnostics are based on files on disk instead of buffers'
2181 // current contents. Adjust these diagnostics' ranges to reflect
2182 // any unsaved edits.
2183 start = edits_since_save.old_to_new(entry.range.start);
2184 end = edits_since_save.old_to_new(entry.range.end);
2185 } else {
2186 start = entry.range.start;
2187 end = entry.range.end;
2188 }
2189
2190 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2191 ..snapshot.clip_point_utf16(end, Bias::Right);
2192
2193 // Expand empty ranges by one character
2194 if range.start == range.end {
2195 range.end.column += 1;
2196 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2197 if range.start == range.end && range.end.column > 0 {
2198 range.start.column -= 1;
2199 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2200 }
2201 }
2202
2203 sanitized_diagnostics.push(DiagnosticEntry {
2204 range,
2205 diagnostic: entry.diagnostic,
2206 });
2207 }
2208 drop(edits_since_save);
2209
2210 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2211 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2212 Ok(())
2213 }
2214
2215 pub fn reload_buffers(
2216 &self,
2217 buffers: HashSet<ModelHandle<Buffer>>,
2218 push_to_history: bool,
2219 cx: &mut ModelContext<Self>,
2220 ) -> Task<Result<ProjectTransaction>> {
2221 let mut local_buffers = Vec::new();
2222 let mut remote_buffers = None;
2223 for buffer_handle in buffers {
2224 let buffer = buffer_handle.read(cx);
2225 if buffer.is_dirty() {
2226 if let Some(file) = File::from_dyn(buffer.file()) {
2227 if file.is_local() {
2228 local_buffers.push(buffer_handle);
2229 } else {
2230 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2231 }
2232 }
2233 }
2234 }
2235
2236 let remote_buffers = self.remote_id().zip(remote_buffers);
2237 let client = self.client.clone();
2238
2239 cx.spawn(|this, mut cx| async move {
2240 let mut project_transaction = ProjectTransaction::default();
2241
2242 if let Some((project_id, remote_buffers)) = remote_buffers {
2243 let response = client
2244 .request(proto::ReloadBuffers {
2245 project_id,
2246 buffer_ids: remote_buffers
2247 .iter()
2248 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2249 .collect(),
2250 })
2251 .await?
2252 .transaction
2253 .ok_or_else(|| anyhow!("missing transaction"))?;
2254 project_transaction = this
2255 .update(&mut cx, |this, cx| {
2256 this.deserialize_project_transaction(response, push_to_history, cx)
2257 })
2258 .await?;
2259 }
2260
2261 for buffer in local_buffers {
2262 let transaction = buffer
2263 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2264 .await?;
2265 buffer.update(&mut cx, |buffer, cx| {
2266 if let Some(transaction) = transaction {
2267 if !push_to_history {
2268 buffer.forget_transaction(transaction.id);
2269 }
2270 project_transaction.0.insert(cx.handle(), transaction);
2271 }
2272 });
2273 }
2274
2275 Ok(project_transaction)
2276 })
2277 }
2278
2279 pub fn format(
2280 &self,
2281 buffers: HashSet<ModelHandle<Buffer>>,
2282 push_to_history: bool,
2283 cx: &mut ModelContext<Project>,
2284 ) -> Task<Result<ProjectTransaction>> {
2285 let mut local_buffers = Vec::new();
2286 let mut remote_buffers = None;
2287 for buffer_handle in buffers {
2288 let buffer = buffer_handle.read(cx);
2289 if let Some(file) = File::from_dyn(buffer.file()) {
2290 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2291 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2292 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2293 }
2294 } else {
2295 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2296 }
2297 } else {
2298 return Task::ready(Ok(Default::default()));
2299 }
2300 }
2301
2302 let remote_buffers = self.remote_id().zip(remote_buffers);
2303 let client = self.client.clone();
2304
2305 cx.spawn(|this, mut cx| async move {
2306 let mut project_transaction = ProjectTransaction::default();
2307
2308 if let Some((project_id, remote_buffers)) = remote_buffers {
2309 let response = client
2310 .request(proto::FormatBuffers {
2311 project_id,
2312 buffer_ids: remote_buffers
2313 .iter()
2314 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2315 .collect(),
2316 })
2317 .await?
2318 .transaction
2319 .ok_or_else(|| anyhow!("missing transaction"))?;
2320 project_transaction = this
2321 .update(&mut cx, |this, cx| {
2322 this.deserialize_project_transaction(response, push_to_history, cx)
2323 })
2324 .await?;
2325 }
2326
2327 for (buffer, buffer_abs_path, language_server) in local_buffers {
2328 let text_document = lsp::TextDocumentIdentifier::new(
2329 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2330 );
2331 let capabilities = &language_server.capabilities();
2332 let tab_size = cx.update(|cx| {
2333 let language_name = buffer.read(cx).language().map(|language| language.name());
2334 cx.global::<Settings>().tab_size(language_name.as_deref())
2335 });
2336 let lsp_edits = if capabilities
2337 .document_formatting_provider
2338 .as_ref()
2339 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2340 {
2341 language_server
2342 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2343 text_document,
2344 options: lsp::FormattingOptions {
2345 tab_size,
2346 insert_spaces: true,
2347 insert_final_newline: Some(true),
2348 ..Default::default()
2349 },
2350 work_done_progress_params: Default::default(),
2351 })
2352 .await?
2353 } else if capabilities
2354 .document_range_formatting_provider
2355 .as_ref()
2356 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2357 {
2358 let buffer_start = lsp::Position::new(0, 0);
2359 let buffer_end =
2360 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2361 language_server
2362 .request::<lsp::request::RangeFormatting>(
2363 lsp::DocumentRangeFormattingParams {
2364 text_document,
2365 range: lsp::Range::new(buffer_start, buffer_end),
2366 options: lsp::FormattingOptions {
2367 tab_size: 4,
2368 insert_spaces: true,
2369 insert_final_newline: Some(true),
2370 ..Default::default()
2371 },
2372 work_done_progress_params: Default::default(),
2373 },
2374 )
2375 .await?
2376 } else {
2377 continue;
2378 };
2379
2380 if let Some(lsp_edits) = lsp_edits {
2381 let edits = this
2382 .update(&mut cx, |this, cx| {
2383 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2384 })
2385 .await?;
2386 buffer.update(&mut cx, |buffer, cx| {
2387 buffer.finalize_last_transaction();
2388 buffer.start_transaction();
2389 for (range, text) in edits {
2390 buffer.edit([(range, text)], cx);
2391 }
2392 if buffer.end_transaction(cx).is_some() {
2393 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2394 if !push_to_history {
2395 buffer.forget_transaction(transaction.id);
2396 }
2397 project_transaction.0.insert(cx.handle(), transaction);
2398 }
2399 });
2400 }
2401 }
2402
2403 Ok(project_transaction)
2404 })
2405 }
2406
2407 pub fn definition<T: ToPointUtf16>(
2408 &self,
2409 buffer: &ModelHandle<Buffer>,
2410 position: T,
2411 cx: &mut ModelContext<Self>,
2412 ) -> Task<Result<Vec<Location>>> {
2413 let position = position.to_point_utf16(buffer.read(cx));
2414 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2415 }
2416
2417 pub fn references<T: ToPointUtf16>(
2418 &self,
2419 buffer: &ModelHandle<Buffer>,
2420 position: T,
2421 cx: &mut ModelContext<Self>,
2422 ) -> Task<Result<Vec<Location>>> {
2423 let position = position.to_point_utf16(buffer.read(cx));
2424 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2425 }
2426
2427 pub fn document_highlights<T: ToPointUtf16>(
2428 &self,
2429 buffer: &ModelHandle<Buffer>,
2430 position: T,
2431 cx: &mut ModelContext<Self>,
2432 ) -> Task<Result<Vec<DocumentHighlight>>> {
2433 let position = position.to_point_utf16(buffer.read(cx));
2434
2435 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2436 }
2437
2438 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2439 if self.is_local() {
2440 let mut requests = Vec::new();
2441 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2442 let worktree_id = *worktree_id;
2443 if let Some(worktree) = self
2444 .worktree_for_id(worktree_id, cx)
2445 .and_then(|worktree| worktree.read(cx).as_local())
2446 {
2447 let lsp_adapter = lsp_adapter.clone();
2448 let worktree_abs_path = worktree.abs_path().clone();
2449 requests.push(
2450 language_server
2451 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2452 query: query.to_string(),
2453 ..Default::default()
2454 })
2455 .log_err()
2456 .map(move |response| {
2457 (
2458 lsp_adapter,
2459 worktree_id,
2460 worktree_abs_path,
2461 response.unwrap_or_default(),
2462 )
2463 }),
2464 );
2465 }
2466 }
2467
2468 cx.spawn_weak(|this, cx| async move {
2469 let responses = futures::future::join_all(requests).await;
2470 let this = if let Some(this) = this.upgrade(&cx) {
2471 this
2472 } else {
2473 return Ok(Default::default());
2474 };
2475 this.read_with(&cx, |this, cx| {
2476 let mut symbols = Vec::new();
2477 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2478 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2479 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2480 let mut worktree_id = source_worktree_id;
2481 let path;
2482 if let Some((worktree, rel_path)) =
2483 this.find_local_worktree(&abs_path, cx)
2484 {
2485 worktree_id = worktree.read(cx).id();
2486 path = rel_path;
2487 } else {
2488 path = relativize_path(&worktree_abs_path, &abs_path);
2489 }
2490
2491 let label = this
2492 .languages
2493 .select_language(&path)
2494 .and_then(|language| {
2495 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2496 })
2497 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2498 let signature = this.symbol_signature(worktree_id, &path);
2499
2500 Some(Symbol {
2501 source_worktree_id,
2502 worktree_id,
2503 language_server_name: adapter.name(),
2504 name: lsp_symbol.name,
2505 kind: lsp_symbol.kind,
2506 label,
2507 path,
2508 range: range_from_lsp(lsp_symbol.location.range),
2509 signature,
2510 })
2511 }));
2512 }
2513 Ok(symbols)
2514 })
2515 })
2516 } else if let Some(project_id) = self.remote_id() {
2517 let request = self.client.request(proto::GetProjectSymbols {
2518 project_id,
2519 query: query.to_string(),
2520 });
2521 cx.spawn_weak(|this, cx| async move {
2522 let response = request.await?;
2523 let mut symbols = Vec::new();
2524 if let Some(this) = this.upgrade(&cx) {
2525 this.read_with(&cx, |this, _| {
2526 symbols.extend(
2527 response
2528 .symbols
2529 .into_iter()
2530 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2531 );
2532 })
2533 }
2534 Ok(symbols)
2535 })
2536 } else {
2537 Task::ready(Ok(Default::default()))
2538 }
2539 }
2540
2541 pub fn open_buffer_for_symbol(
2542 &mut self,
2543 symbol: &Symbol,
2544 cx: &mut ModelContext<Self>,
2545 ) -> Task<Result<ModelHandle<Buffer>>> {
2546 if self.is_local() {
2547 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2548 symbol.source_worktree_id,
2549 symbol.language_server_name.clone(),
2550 )) {
2551 server.clone()
2552 } else {
2553 return Task::ready(Err(anyhow!(
2554 "language server for worktree and language not found"
2555 )));
2556 };
2557
2558 let worktree_abs_path = if let Some(worktree_abs_path) = self
2559 .worktree_for_id(symbol.worktree_id, cx)
2560 .and_then(|worktree| worktree.read(cx).as_local())
2561 .map(|local_worktree| local_worktree.abs_path())
2562 {
2563 worktree_abs_path
2564 } else {
2565 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2566 };
2567 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2568 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2569 uri
2570 } else {
2571 return Task::ready(Err(anyhow!("invalid symbol path")));
2572 };
2573
2574 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2575 } else if let Some(project_id) = self.remote_id() {
2576 let request = self.client.request(proto::OpenBufferForSymbol {
2577 project_id,
2578 symbol: Some(serialize_symbol(symbol)),
2579 });
2580 cx.spawn(|this, mut cx| async move {
2581 let response = request.await?;
2582 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2583 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2584 .await
2585 })
2586 } else {
2587 Task::ready(Err(anyhow!("project does not have a remote id")))
2588 }
2589 }
2590
2591 pub fn completions<T: ToPointUtf16>(
2592 &self,
2593 source_buffer_handle: &ModelHandle<Buffer>,
2594 position: T,
2595 cx: &mut ModelContext<Self>,
2596 ) -> Task<Result<Vec<Completion>>> {
2597 let source_buffer_handle = source_buffer_handle.clone();
2598 let source_buffer = source_buffer_handle.read(cx);
2599 let buffer_id = source_buffer.remote_id();
2600 let language = source_buffer.language().cloned();
2601 let worktree;
2602 let buffer_abs_path;
2603 if let Some(file) = File::from_dyn(source_buffer.file()) {
2604 worktree = file.worktree.clone();
2605 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2606 } else {
2607 return Task::ready(Ok(Default::default()));
2608 };
2609
2610 let position = position.to_point_utf16(source_buffer);
2611 let anchor = source_buffer.anchor_after(position);
2612
2613 if worktree.read(cx).as_local().is_some() {
2614 let buffer_abs_path = buffer_abs_path.unwrap();
2615 let (_, lang_server) =
2616 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2617 server.clone()
2618 } else {
2619 return Task::ready(Ok(Default::default()));
2620 };
2621
2622 cx.spawn(|_, cx| async move {
2623 let completions = lang_server
2624 .request::<lsp::request::Completion>(lsp::CompletionParams {
2625 text_document_position: lsp::TextDocumentPositionParams::new(
2626 lsp::TextDocumentIdentifier::new(
2627 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2628 ),
2629 point_to_lsp(position),
2630 ),
2631 context: Default::default(),
2632 work_done_progress_params: Default::default(),
2633 partial_result_params: Default::default(),
2634 })
2635 .await
2636 .context("lsp completion request failed")?;
2637
2638 let completions = if let Some(completions) = completions {
2639 match completions {
2640 lsp::CompletionResponse::Array(completions) => completions,
2641 lsp::CompletionResponse::List(list) => list.items,
2642 }
2643 } else {
2644 Default::default()
2645 };
2646
2647 source_buffer_handle.read_with(&cx, |this, _| {
2648 let snapshot = this.snapshot();
2649 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2650 let mut range_for_token = None;
2651 Ok(completions
2652 .into_iter()
2653 .filter_map(|lsp_completion| {
2654 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2655 // If the language server provides a range to overwrite, then
2656 // check that the range is valid.
2657 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2658 let range = range_from_lsp(edit.range);
2659 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2660 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2661 if start != range.start || end != range.end {
2662 log::info!("completion out of expected range");
2663 return None;
2664 }
2665 (
2666 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2667 edit.new_text.clone(),
2668 )
2669 }
2670 // If the language server does not provide a range, then infer
2671 // the range based on the syntax tree.
2672 None => {
2673 if position != clipped_position {
2674 log::info!("completion out of expected range");
2675 return None;
2676 }
2677 let Range { start, end } = range_for_token
2678 .get_or_insert_with(|| {
2679 let offset = position.to_offset(&snapshot);
2680 snapshot
2681 .range_for_word_token_at(offset)
2682 .unwrap_or_else(|| offset..offset)
2683 })
2684 .clone();
2685 let text = lsp_completion
2686 .insert_text
2687 .as_ref()
2688 .unwrap_or(&lsp_completion.label)
2689 .clone();
2690 (
2691 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2692 text.clone(),
2693 )
2694 }
2695 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2696 log::info!("unsupported insert/replace completion");
2697 return None;
2698 }
2699 };
2700
2701 Some(Completion {
2702 old_range,
2703 new_text,
2704 label: language
2705 .as_ref()
2706 .and_then(|l| l.label_for_completion(&lsp_completion))
2707 .unwrap_or_else(|| {
2708 CodeLabel::plain(
2709 lsp_completion.label.clone(),
2710 lsp_completion.filter_text.as_deref(),
2711 )
2712 }),
2713 lsp_completion,
2714 })
2715 })
2716 .collect())
2717 })
2718 })
2719 } else if let Some(project_id) = self.remote_id() {
2720 let rpc = self.client.clone();
2721 let message = proto::GetCompletions {
2722 project_id,
2723 buffer_id,
2724 position: Some(language::proto::serialize_anchor(&anchor)),
2725 version: serialize_version(&source_buffer.version()),
2726 };
2727 cx.spawn_weak(|_, mut cx| async move {
2728 let response = rpc.request(message).await?;
2729
2730 source_buffer_handle
2731 .update(&mut cx, |buffer, _| {
2732 buffer.wait_for_version(deserialize_version(response.version))
2733 })
2734 .await;
2735
2736 response
2737 .completions
2738 .into_iter()
2739 .map(|completion| {
2740 language::proto::deserialize_completion(completion, language.as_ref())
2741 })
2742 .collect()
2743 })
2744 } else {
2745 Task::ready(Ok(Default::default()))
2746 }
2747 }
2748
2749 pub fn apply_additional_edits_for_completion(
2750 &self,
2751 buffer_handle: ModelHandle<Buffer>,
2752 completion: Completion,
2753 push_to_history: bool,
2754 cx: &mut ModelContext<Self>,
2755 ) -> Task<Result<Option<Transaction>>> {
2756 let buffer = buffer_handle.read(cx);
2757 let buffer_id = buffer.remote_id();
2758
2759 if self.is_local() {
2760 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2761 {
2762 server.clone()
2763 } else {
2764 return Task::ready(Ok(Default::default()));
2765 };
2766
2767 cx.spawn(|this, mut cx| async move {
2768 let resolved_completion = lang_server
2769 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2770 .await?;
2771 if let Some(edits) = resolved_completion.additional_text_edits {
2772 let edits = this
2773 .update(&mut cx, |this, cx| {
2774 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2775 })
2776 .await?;
2777 buffer_handle.update(&mut cx, |buffer, cx| {
2778 buffer.finalize_last_transaction();
2779 buffer.start_transaction();
2780 for (range, text) in edits {
2781 buffer.edit([(range, text)], cx);
2782 }
2783 let transaction = if buffer.end_transaction(cx).is_some() {
2784 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2785 if !push_to_history {
2786 buffer.forget_transaction(transaction.id);
2787 }
2788 Some(transaction)
2789 } else {
2790 None
2791 };
2792 Ok(transaction)
2793 })
2794 } else {
2795 Ok(None)
2796 }
2797 })
2798 } else if let Some(project_id) = self.remote_id() {
2799 let client = self.client.clone();
2800 cx.spawn(|_, mut cx| async move {
2801 let response = client
2802 .request(proto::ApplyCompletionAdditionalEdits {
2803 project_id,
2804 buffer_id,
2805 completion: Some(language::proto::serialize_completion(&completion)),
2806 })
2807 .await?;
2808
2809 if let Some(transaction) = response.transaction {
2810 let transaction = language::proto::deserialize_transaction(transaction)?;
2811 buffer_handle
2812 .update(&mut cx, |buffer, _| {
2813 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2814 })
2815 .await;
2816 if push_to_history {
2817 buffer_handle.update(&mut cx, |buffer, _| {
2818 buffer.push_transaction(transaction.clone(), Instant::now());
2819 });
2820 }
2821 Ok(Some(transaction))
2822 } else {
2823 Ok(None)
2824 }
2825 })
2826 } else {
2827 Task::ready(Err(anyhow!("project does not have a remote id")))
2828 }
2829 }
2830
2831 pub fn code_actions<T: Clone + ToOffset>(
2832 &self,
2833 buffer_handle: &ModelHandle<Buffer>,
2834 range: Range<T>,
2835 cx: &mut ModelContext<Self>,
2836 ) -> Task<Result<Vec<CodeAction>>> {
2837 let buffer_handle = buffer_handle.clone();
2838 let buffer = buffer_handle.read(cx);
2839 let snapshot = buffer.snapshot();
2840 let relevant_diagnostics = snapshot
2841 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2842 .map(|entry| entry.to_lsp_diagnostic_stub())
2843 .collect();
2844 let buffer_id = buffer.remote_id();
2845 let worktree;
2846 let buffer_abs_path;
2847 if let Some(file) = File::from_dyn(buffer.file()) {
2848 worktree = file.worktree.clone();
2849 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2850 } else {
2851 return Task::ready(Ok(Default::default()));
2852 };
2853 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2854
2855 if worktree.read(cx).as_local().is_some() {
2856 let buffer_abs_path = buffer_abs_path.unwrap();
2857 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2858 {
2859 server.clone()
2860 } else {
2861 return Task::ready(Ok(Default::default()));
2862 };
2863
2864 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2865 cx.foreground().spawn(async move {
2866 if !lang_server.capabilities().code_action_provider.is_some() {
2867 return Ok(Default::default());
2868 }
2869
2870 Ok(lang_server
2871 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2872 text_document: lsp::TextDocumentIdentifier::new(
2873 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2874 ),
2875 range: lsp_range,
2876 work_done_progress_params: Default::default(),
2877 partial_result_params: Default::default(),
2878 context: lsp::CodeActionContext {
2879 diagnostics: relevant_diagnostics,
2880 only: Some(vec![
2881 lsp::CodeActionKind::QUICKFIX,
2882 lsp::CodeActionKind::REFACTOR,
2883 lsp::CodeActionKind::REFACTOR_EXTRACT,
2884 lsp::CodeActionKind::SOURCE,
2885 ]),
2886 },
2887 })
2888 .await?
2889 .unwrap_or_default()
2890 .into_iter()
2891 .filter_map(|entry| {
2892 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2893 Some(CodeAction {
2894 range: range.clone(),
2895 lsp_action,
2896 })
2897 } else {
2898 None
2899 }
2900 })
2901 .collect())
2902 })
2903 } else if let Some(project_id) = self.remote_id() {
2904 let rpc = self.client.clone();
2905 let version = buffer.version();
2906 cx.spawn_weak(|_, mut cx| async move {
2907 let response = rpc
2908 .request(proto::GetCodeActions {
2909 project_id,
2910 buffer_id,
2911 start: Some(language::proto::serialize_anchor(&range.start)),
2912 end: Some(language::proto::serialize_anchor(&range.end)),
2913 version: serialize_version(&version),
2914 })
2915 .await?;
2916
2917 buffer_handle
2918 .update(&mut cx, |buffer, _| {
2919 buffer.wait_for_version(deserialize_version(response.version))
2920 })
2921 .await;
2922
2923 response
2924 .actions
2925 .into_iter()
2926 .map(language::proto::deserialize_code_action)
2927 .collect()
2928 })
2929 } else {
2930 Task::ready(Ok(Default::default()))
2931 }
2932 }
2933
2934 pub fn apply_code_action(
2935 &self,
2936 buffer_handle: ModelHandle<Buffer>,
2937 mut action: CodeAction,
2938 push_to_history: bool,
2939 cx: &mut ModelContext<Self>,
2940 ) -> Task<Result<ProjectTransaction>> {
2941 if self.is_local() {
2942 let buffer = buffer_handle.read(cx);
2943 let (lsp_adapter, lang_server) =
2944 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2945 server.clone()
2946 } else {
2947 return Task::ready(Ok(Default::default()));
2948 };
2949 let range = action.range.to_point_utf16(buffer);
2950
2951 cx.spawn(|this, mut cx| async move {
2952 if let Some(lsp_range) = action
2953 .lsp_action
2954 .data
2955 .as_mut()
2956 .and_then(|d| d.get_mut("codeActionParams"))
2957 .and_then(|d| d.get_mut("range"))
2958 {
2959 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2960 action.lsp_action = lang_server
2961 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2962 .await?;
2963 } else {
2964 let actions = this
2965 .update(&mut cx, |this, cx| {
2966 this.code_actions(&buffer_handle, action.range, cx)
2967 })
2968 .await?;
2969 action.lsp_action = actions
2970 .into_iter()
2971 .find(|a| a.lsp_action.title == action.lsp_action.title)
2972 .ok_or_else(|| anyhow!("code action is outdated"))?
2973 .lsp_action;
2974 }
2975
2976 if let Some(edit) = action.lsp_action.edit {
2977 Self::deserialize_workspace_edit(
2978 this,
2979 edit,
2980 push_to_history,
2981 lsp_adapter,
2982 lang_server,
2983 &mut cx,
2984 )
2985 .await
2986 } else if let Some(command) = action.lsp_action.command {
2987 this.update(&mut cx, |this, _| {
2988 this.last_workspace_edits_by_language_server
2989 .remove(&lang_server.server_id());
2990 });
2991 lang_server
2992 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2993 command: command.command,
2994 arguments: command.arguments.unwrap_or_default(),
2995 ..Default::default()
2996 })
2997 .await?;
2998 Ok(this.update(&mut cx, |this, _| {
2999 this.last_workspace_edits_by_language_server
3000 .remove(&lang_server.server_id())
3001 .unwrap_or_default()
3002 }))
3003 } else {
3004 Ok(ProjectTransaction::default())
3005 }
3006 })
3007 } else if let Some(project_id) = self.remote_id() {
3008 let client = self.client.clone();
3009 let request = proto::ApplyCodeAction {
3010 project_id,
3011 buffer_id: buffer_handle.read(cx).remote_id(),
3012 action: Some(language::proto::serialize_code_action(&action)),
3013 };
3014 cx.spawn(|this, mut cx| async move {
3015 let response = client
3016 .request(request)
3017 .await?
3018 .transaction
3019 .ok_or_else(|| anyhow!("missing transaction"))?;
3020 this.update(&mut cx, |this, cx| {
3021 this.deserialize_project_transaction(response, push_to_history, cx)
3022 })
3023 .await
3024 })
3025 } else {
3026 Task::ready(Err(anyhow!("project does not have a remote id")))
3027 }
3028 }
3029
3030 async fn deserialize_workspace_edit(
3031 this: ModelHandle<Self>,
3032 edit: lsp::WorkspaceEdit,
3033 push_to_history: bool,
3034 lsp_adapter: Arc<dyn LspAdapter>,
3035 language_server: Arc<LanguageServer>,
3036 cx: &mut AsyncAppContext,
3037 ) -> Result<ProjectTransaction> {
3038 let fs = this.read_with(cx, |this, _| this.fs.clone());
3039 let mut operations = Vec::new();
3040 if let Some(document_changes) = edit.document_changes {
3041 match document_changes {
3042 lsp::DocumentChanges::Edits(edits) => {
3043 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3044 }
3045 lsp::DocumentChanges::Operations(ops) => operations = ops,
3046 }
3047 } else if let Some(changes) = edit.changes {
3048 operations.extend(changes.into_iter().map(|(uri, edits)| {
3049 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3050 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3051 uri,
3052 version: None,
3053 },
3054 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3055 })
3056 }));
3057 }
3058
3059 let mut project_transaction = ProjectTransaction::default();
3060 for operation in operations {
3061 match operation {
3062 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3063 let abs_path = op
3064 .uri
3065 .to_file_path()
3066 .map_err(|_| anyhow!("can't convert URI to path"))?;
3067
3068 if let Some(parent_path) = abs_path.parent() {
3069 fs.create_dir(parent_path).await?;
3070 }
3071 if abs_path.ends_with("/") {
3072 fs.create_dir(&abs_path).await?;
3073 } else {
3074 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3075 .await?;
3076 }
3077 }
3078 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3079 let source_abs_path = op
3080 .old_uri
3081 .to_file_path()
3082 .map_err(|_| anyhow!("can't convert URI to path"))?;
3083 let target_abs_path = op
3084 .new_uri
3085 .to_file_path()
3086 .map_err(|_| anyhow!("can't convert URI to path"))?;
3087 fs.rename(
3088 &source_abs_path,
3089 &target_abs_path,
3090 op.options.map(Into::into).unwrap_or_default(),
3091 )
3092 .await?;
3093 }
3094 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3095 let abs_path = op
3096 .uri
3097 .to_file_path()
3098 .map_err(|_| anyhow!("can't convert URI to path"))?;
3099 let options = op.options.map(Into::into).unwrap_or_default();
3100 if abs_path.ends_with("/") {
3101 fs.remove_dir(&abs_path, options).await?;
3102 } else {
3103 fs.remove_file(&abs_path, options).await?;
3104 }
3105 }
3106 lsp::DocumentChangeOperation::Edit(op) => {
3107 let buffer_to_edit = this
3108 .update(cx, |this, cx| {
3109 this.open_local_buffer_via_lsp(
3110 op.text_document.uri,
3111 lsp_adapter.clone(),
3112 language_server.clone(),
3113 cx,
3114 )
3115 })
3116 .await?;
3117
3118 let edits = this
3119 .update(cx, |this, cx| {
3120 let edits = op.edits.into_iter().map(|edit| match edit {
3121 lsp::OneOf::Left(edit) => edit,
3122 lsp::OneOf::Right(edit) => edit.text_edit,
3123 });
3124 this.edits_from_lsp(
3125 &buffer_to_edit,
3126 edits,
3127 op.text_document.version,
3128 cx,
3129 )
3130 })
3131 .await?;
3132
3133 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3134 buffer.finalize_last_transaction();
3135 buffer.start_transaction();
3136 for (range, text) in edits {
3137 buffer.edit([(range, text)], cx);
3138 }
3139 let transaction = if buffer.end_transaction(cx).is_some() {
3140 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3141 if !push_to_history {
3142 buffer.forget_transaction(transaction.id);
3143 }
3144 Some(transaction)
3145 } else {
3146 None
3147 };
3148
3149 transaction
3150 });
3151 if let Some(transaction) = transaction {
3152 project_transaction.0.insert(buffer_to_edit, transaction);
3153 }
3154 }
3155 }
3156 }
3157
3158 Ok(project_transaction)
3159 }
3160
3161 pub fn prepare_rename<T: ToPointUtf16>(
3162 &self,
3163 buffer: ModelHandle<Buffer>,
3164 position: T,
3165 cx: &mut ModelContext<Self>,
3166 ) -> Task<Result<Option<Range<Anchor>>>> {
3167 let position = position.to_point_utf16(buffer.read(cx));
3168 self.request_lsp(buffer, PrepareRename { position }, cx)
3169 }
3170
3171 pub fn perform_rename<T: ToPointUtf16>(
3172 &self,
3173 buffer: ModelHandle<Buffer>,
3174 position: T,
3175 new_name: String,
3176 push_to_history: bool,
3177 cx: &mut ModelContext<Self>,
3178 ) -> Task<Result<ProjectTransaction>> {
3179 let position = position.to_point_utf16(buffer.read(cx));
3180 self.request_lsp(
3181 buffer,
3182 PerformRename {
3183 position,
3184 new_name,
3185 push_to_history,
3186 },
3187 cx,
3188 )
3189 }
3190
3191 pub fn search(
3192 &self,
3193 query: SearchQuery,
3194 cx: &mut ModelContext<Self>,
3195 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3196 if self.is_local() {
3197 let snapshots = self
3198 .visible_worktrees(cx)
3199 .filter_map(|tree| {
3200 let tree = tree.read(cx).as_local()?;
3201 Some(tree.snapshot())
3202 })
3203 .collect::<Vec<_>>();
3204
3205 let background = cx.background().clone();
3206 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3207 if path_count == 0 {
3208 return Task::ready(Ok(Default::default()));
3209 }
3210 let workers = background.num_cpus().min(path_count);
3211 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3212 cx.background()
3213 .spawn({
3214 let fs = self.fs.clone();
3215 let background = cx.background().clone();
3216 let query = query.clone();
3217 async move {
3218 let fs = &fs;
3219 let query = &query;
3220 let matching_paths_tx = &matching_paths_tx;
3221 let paths_per_worker = (path_count + workers - 1) / workers;
3222 let snapshots = &snapshots;
3223 background
3224 .scoped(|scope| {
3225 for worker_ix in 0..workers {
3226 let worker_start_ix = worker_ix * paths_per_worker;
3227 let worker_end_ix = worker_start_ix + paths_per_worker;
3228 scope.spawn(async move {
3229 let mut snapshot_start_ix = 0;
3230 let mut abs_path = PathBuf::new();
3231 for snapshot in snapshots {
3232 let snapshot_end_ix =
3233 snapshot_start_ix + snapshot.visible_file_count();
3234 if worker_end_ix <= snapshot_start_ix {
3235 break;
3236 } else if worker_start_ix > snapshot_end_ix {
3237 snapshot_start_ix = snapshot_end_ix;
3238 continue;
3239 } else {
3240 let start_in_snapshot = worker_start_ix
3241 .saturating_sub(snapshot_start_ix);
3242 let end_in_snapshot =
3243 cmp::min(worker_end_ix, snapshot_end_ix)
3244 - snapshot_start_ix;
3245
3246 for entry in snapshot
3247 .files(false, start_in_snapshot)
3248 .take(end_in_snapshot - start_in_snapshot)
3249 {
3250 if matching_paths_tx.is_closed() {
3251 break;
3252 }
3253
3254 abs_path.clear();
3255 abs_path.push(&snapshot.abs_path());
3256 abs_path.push(&entry.path);
3257 let matches = if let Some(file) =
3258 fs.open_sync(&abs_path).await.log_err()
3259 {
3260 query.detect(file).unwrap_or(false)
3261 } else {
3262 false
3263 };
3264
3265 if matches {
3266 let project_path =
3267 (snapshot.id(), entry.path.clone());
3268 if matching_paths_tx
3269 .send(project_path)
3270 .await
3271 .is_err()
3272 {
3273 break;
3274 }
3275 }
3276 }
3277
3278 snapshot_start_ix = snapshot_end_ix;
3279 }
3280 }
3281 });
3282 }
3283 })
3284 .await;
3285 }
3286 })
3287 .detach();
3288
3289 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3290 let open_buffers = self
3291 .opened_buffers
3292 .values()
3293 .filter_map(|b| b.upgrade(cx))
3294 .collect::<HashSet<_>>();
3295 cx.spawn(|this, cx| async move {
3296 for buffer in &open_buffers {
3297 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3298 buffers_tx.send((buffer.clone(), snapshot)).await?;
3299 }
3300
3301 let open_buffers = Rc::new(RefCell::new(open_buffers));
3302 while let Some(project_path) = matching_paths_rx.next().await {
3303 if buffers_tx.is_closed() {
3304 break;
3305 }
3306
3307 let this = this.clone();
3308 let open_buffers = open_buffers.clone();
3309 let buffers_tx = buffers_tx.clone();
3310 cx.spawn(|mut cx| async move {
3311 if let Some(buffer) = this
3312 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3313 .await
3314 .log_err()
3315 {
3316 if open_buffers.borrow_mut().insert(buffer.clone()) {
3317 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3318 buffers_tx.send((buffer, snapshot)).await?;
3319 }
3320 }
3321
3322 Ok::<_, anyhow::Error>(())
3323 })
3324 .detach();
3325 }
3326
3327 Ok::<_, anyhow::Error>(())
3328 })
3329 .detach_and_log_err(cx);
3330
3331 let background = cx.background().clone();
3332 cx.background().spawn(async move {
3333 let query = &query;
3334 let mut matched_buffers = Vec::new();
3335 for _ in 0..workers {
3336 matched_buffers.push(HashMap::default());
3337 }
3338 background
3339 .scoped(|scope| {
3340 for worker_matched_buffers in matched_buffers.iter_mut() {
3341 let mut buffers_rx = buffers_rx.clone();
3342 scope.spawn(async move {
3343 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3344 let buffer_matches = query
3345 .search(snapshot.as_rope())
3346 .await
3347 .iter()
3348 .map(|range| {
3349 snapshot.anchor_before(range.start)
3350 ..snapshot.anchor_after(range.end)
3351 })
3352 .collect::<Vec<_>>();
3353 if !buffer_matches.is_empty() {
3354 worker_matched_buffers
3355 .insert(buffer.clone(), buffer_matches);
3356 }
3357 }
3358 });
3359 }
3360 })
3361 .await;
3362 Ok(matched_buffers.into_iter().flatten().collect())
3363 })
3364 } else if let Some(project_id) = self.remote_id() {
3365 let request = self.client.request(query.to_proto(project_id));
3366 cx.spawn(|this, mut cx| async move {
3367 let response = request.await?;
3368 let mut result = HashMap::default();
3369 for location in response.locations {
3370 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3371 let target_buffer = this
3372 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3373 .await?;
3374 let start = location
3375 .start
3376 .and_then(deserialize_anchor)
3377 .ok_or_else(|| anyhow!("missing target start"))?;
3378 let end = location
3379 .end
3380 .and_then(deserialize_anchor)
3381 .ok_or_else(|| anyhow!("missing target end"))?;
3382 result
3383 .entry(target_buffer)
3384 .or_insert(Vec::new())
3385 .push(start..end)
3386 }
3387 Ok(result)
3388 })
3389 } else {
3390 Task::ready(Ok(Default::default()))
3391 }
3392 }
3393
3394 fn request_lsp<R: LspCommand>(
3395 &self,
3396 buffer_handle: ModelHandle<Buffer>,
3397 request: R,
3398 cx: &mut ModelContext<Self>,
3399 ) -> Task<Result<R::Response>>
3400 where
3401 <R::LspRequest as lsp::request::Request>::Result: Send,
3402 {
3403 let buffer = buffer_handle.read(cx);
3404 if self.is_local() {
3405 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3406 if let Some((file, (_, language_server))) =
3407 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3408 {
3409 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3410 return cx.spawn(|this, cx| async move {
3411 if !request.check_capabilities(&language_server.capabilities()) {
3412 return Ok(Default::default());
3413 }
3414
3415 let response = language_server
3416 .request::<R::LspRequest>(lsp_params)
3417 .await
3418 .context("lsp request failed")?;
3419 request
3420 .response_from_lsp(response, this, buffer_handle, cx)
3421 .await
3422 });
3423 }
3424 } else if let Some(project_id) = self.remote_id() {
3425 let rpc = self.client.clone();
3426 let message = request.to_proto(project_id, buffer);
3427 return cx.spawn(|this, cx| async move {
3428 let response = rpc.request(message).await?;
3429 request
3430 .response_from_proto(response, this, buffer_handle, cx)
3431 .await
3432 });
3433 }
3434 Task::ready(Ok(Default::default()))
3435 }
3436
3437 pub fn find_or_create_local_worktree(
3438 &mut self,
3439 abs_path: impl AsRef<Path>,
3440 visible: bool,
3441 cx: &mut ModelContext<Self>,
3442 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3443 let abs_path = abs_path.as_ref();
3444 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3445 Task::ready(Ok((tree.clone(), relative_path.into())))
3446 } else {
3447 let worktree = self.create_local_worktree(abs_path, visible, cx);
3448 cx.foreground()
3449 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3450 }
3451 }
3452
3453 pub fn find_local_worktree(
3454 &self,
3455 abs_path: &Path,
3456 cx: &AppContext,
3457 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3458 for tree in self.worktrees(cx) {
3459 if let Some(relative_path) = tree
3460 .read(cx)
3461 .as_local()
3462 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3463 {
3464 return Some((tree.clone(), relative_path.into()));
3465 }
3466 }
3467 None
3468 }
3469
3470 pub fn is_shared(&self) -> bool {
3471 match &self.client_state {
3472 ProjectClientState::Local { is_shared, .. } => *is_shared,
3473 ProjectClientState::Remote { .. } => false,
3474 }
3475 }
3476
3477 fn create_local_worktree(
3478 &mut self,
3479 abs_path: impl AsRef<Path>,
3480 visible: bool,
3481 cx: &mut ModelContext<Self>,
3482 ) -> Task<Result<ModelHandle<Worktree>>> {
3483 let fs = self.fs.clone();
3484 let client = self.client.clone();
3485 let next_entry_id = self.next_entry_id.clone();
3486 let path: Arc<Path> = abs_path.as_ref().into();
3487 let task = self
3488 .loading_local_worktrees
3489 .entry(path.clone())
3490 .or_insert_with(|| {
3491 cx.spawn(|project, mut cx| {
3492 async move {
3493 let worktree = Worktree::local(
3494 client.clone(),
3495 path.clone(),
3496 visible,
3497 fs,
3498 next_entry_id,
3499 &mut cx,
3500 )
3501 .await;
3502 project.update(&mut cx, |project, _| {
3503 project.loading_local_worktrees.remove(&path);
3504 });
3505 let worktree = worktree?;
3506
3507 let (remote_project_id, is_shared) =
3508 project.update(&mut cx, |project, cx| {
3509 project.add_worktree(&worktree, cx);
3510 (project.remote_id(), project.is_shared())
3511 });
3512
3513 if let Some(project_id) = remote_project_id {
3514 if is_shared {
3515 worktree
3516 .update(&mut cx, |worktree, cx| {
3517 worktree.as_local_mut().unwrap().share(project_id, cx)
3518 })
3519 .await?;
3520 } else {
3521 worktree
3522 .update(&mut cx, |worktree, cx| {
3523 worktree.as_local_mut().unwrap().register(project_id, cx)
3524 })
3525 .await?;
3526 }
3527 }
3528
3529 Ok(worktree)
3530 }
3531 .map_err(|err| Arc::new(err))
3532 })
3533 .shared()
3534 })
3535 .clone();
3536 cx.foreground().spawn(async move {
3537 match task.await {
3538 Ok(worktree) => Ok(worktree),
3539 Err(err) => Err(anyhow!("{}", err)),
3540 }
3541 })
3542 }
3543
3544 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3545 self.worktrees.retain(|worktree| {
3546 worktree
3547 .upgrade(cx)
3548 .map_or(false, |w| w.read(cx).id() != id)
3549 });
3550 cx.notify();
3551 }
3552
3553 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3554 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3555 if worktree.read(cx).is_local() {
3556 cx.subscribe(&worktree, |this, worktree, _, cx| {
3557 this.update_local_worktree_buffers(worktree, cx);
3558 })
3559 .detach();
3560 }
3561
3562 let push_strong_handle = {
3563 let worktree = worktree.read(cx);
3564 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3565 };
3566 if push_strong_handle {
3567 self.worktrees
3568 .push(WorktreeHandle::Strong(worktree.clone()));
3569 } else {
3570 cx.observe_release(&worktree, |this, _, cx| {
3571 this.worktrees
3572 .retain(|worktree| worktree.upgrade(cx).is_some());
3573 cx.notify();
3574 })
3575 .detach();
3576 self.worktrees
3577 .push(WorktreeHandle::Weak(worktree.downgrade()));
3578 }
3579 cx.notify();
3580 }
3581
3582 fn update_local_worktree_buffers(
3583 &mut self,
3584 worktree_handle: ModelHandle<Worktree>,
3585 cx: &mut ModelContext<Self>,
3586 ) {
3587 let snapshot = worktree_handle.read(cx).snapshot();
3588 let mut buffers_to_delete = Vec::new();
3589 let mut renamed_buffers = Vec::new();
3590 for (buffer_id, buffer) in &self.opened_buffers {
3591 if let Some(buffer) = buffer.upgrade(cx) {
3592 buffer.update(cx, |buffer, cx| {
3593 if let Some(old_file) = File::from_dyn(buffer.file()) {
3594 if old_file.worktree != worktree_handle {
3595 return;
3596 }
3597
3598 let new_file = if let Some(entry) = old_file
3599 .entry_id
3600 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3601 {
3602 File {
3603 is_local: true,
3604 entry_id: Some(entry.id),
3605 mtime: entry.mtime,
3606 path: entry.path.clone(),
3607 worktree: worktree_handle.clone(),
3608 }
3609 } else if let Some(entry) =
3610 snapshot.entry_for_path(old_file.path().as_ref())
3611 {
3612 File {
3613 is_local: true,
3614 entry_id: Some(entry.id),
3615 mtime: entry.mtime,
3616 path: entry.path.clone(),
3617 worktree: worktree_handle.clone(),
3618 }
3619 } else {
3620 File {
3621 is_local: true,
3622 entry_id: None,
3623 path: old_file.path().clone(),
3624 mtime: old_file.mtime(),
3625 worktree: worktree_handle.clone(),
3626 }
3627 };
3628
3629 let old_path = old_file.abs_path(cx);
3630 if new_file.abs_path(cx) != old_path {
3631 renamed_buffers.push((cx.handle(), old_path));
3632 }
3633
3634 if let Some(project_id) = self.remote_id() {
3635 self.client
3636 .send(proto::UpdateBufferFile {
3637 project_id,
3638 buffer_id: *buffer_id as u64,
3639 file: Some(new_file.to_proto()),
3640 })
3641 .log_err();
3642 }
3643 buffer.file_updated(Box::new(new_file), cx).detach();
3644 }
3645 });
3646 } else {
3647 buffers_to_delete.push(*buffer_id);
3648 }
3649 }
3650
3651 for buffer_id in buffers_to_delete {
3652 self.opened_buffers.remove(&buffer_id);
3653 }
3654
3655 for (buffer, old_path) in renamed_buffers {
3656 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3657 self.assign_language_to_buffer(&buffer, cx);
3658 self.register_buffer_with_language_server(&buffer, cx);
3659 }
3660 }
3661
3662 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3663 let new_active_entry = entry.and_then(|project_path| {
3664 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3665 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3666 Some(entry.id)
3667 });
3668 if new_active_entry != self.active_entry {
3669 self.active_entry = new_active_entry;
3670 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3671 }
3672 }
3673
3674 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3675 self.language_server_statuses
3676 .values()
3677 .any(|status| status.pending_diagnostic_updates > 0)
3678 }
3679
3680 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3681 let mut summary = DiagnosticSummary::default();
3682 for (_, path_summary) in self.diagnostic_summaries(cx) {
3683 summary.error_count += path_summary.error_count;
3684 summary.warning_count += path_summary.warning_count;
3685 }
3686 summary
3687 }
3688
3689 pub fn diagnostic_summaries<'a>(
3690 &'a self,
3691 cx: &'a AppContext,
3692 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3693 self.worktrees(cx).flat_map(move |worktree| {
3694 let worktree = worktree.read(cx);
3695 let worktree_id = worktree.id();
3696 worktree
3697 .diagnostic_summaries()
3698 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3699 })
3700 }
3701
3702 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3703 if self
3704 .language_server_statuses
3705 .values()
3706 .map(|status| status.pending_diagnostic_updates)
3707 .sum::<isize>()
3708 == 1
3709 {
3710 cx.emit(Event::DiskBasedDiagnosticsStarted);
3711 }
3712 }
3713
3714 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3715 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3716 if self
3717 .language_server_statuses
3718 .values()
3719 .map(|status| status.pending_diagnostic_updates)
3720 .sum::<isize>()
3721 == 0
3722 {
3723 cx.emit(Event::DiskBasedDiagnosticsFinished);
3724 }
3725 }
3726
3727 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3728 self.active_entry
3729 }
3730
3731 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3732 self.worktree_for_id(path.worktree_id, cx)?
3733 .read(cx)
3734 .entry_for_path(&path.path)
3735 .map(|entry| entry.id)
3736 }
3737
3738 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3739 let worktree = self.worktree_for_entry(entry_id, cx)?;
3740 let worktree = worktree.read(cx);
3741 let worktree_id = worktree.id();
3742 let path = worktree.entry_for_id(entry_id)?.path.clone();
3743 Some(ProjectPath { worktree_id, path })
3744 }
3745
3746 // RPC message handlers
3747
3748 async fn handle_unshare_project(
3749 this: ModelHandle<Self>,
3750 _: TypedEnvelope<proto::UnshareProject>,
3751 _: Arc<Client>,
3752 mut cx: AsyncAppContext,
3753 ) -> Result<()> {
3754 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3755 Ok(())
3756 }
3757
3758 async fn handle_add_collaborator(
3759 this: ModelHandle<Self>,
3760 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3761 _: Arc<Client>,
3762 mut cx: AsyncAppContext,
3763 ) -> Result<()> {
3764 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3765 let collaborator = envelope
3766 .payload
3767 .collaborator
3768 .take()
3769 .ok_or_else(|| anyhow!("empty collaborator"))?;
3770
3771 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3772 this.update(&mut cx, |this, cx| {
3773 this.collaborators
3774 .insert(collaborator.peer_id, collaborator);
3775 cx.notify();
3776 });
3777
3778 Ok(())
3779 }
3780
3781 async fn handle_remove_collaborator(
3782 this: ModelHandle<Self>,
3783 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3784 _: Arc<Client>,
3785 mut cx: AsyncAppContext,
3786 ) -> Result<()> {
3787 this.update(&mut cx, |this, cx| {
3788 let peer_id = PeerId(envelope.payload.peer_id);
3789 let replica_id = this
3790 .collaborators
3791 .remove(&peer_id)
3792 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3793 .replica_id;
3794 for (_, buffer) in &this.opened_buffers {
3795 if let Some(buffer) = buffer.upgrade(cx) {
3796 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3797 }
3798 }
3799 cx.emit(Event::CollaboratorLeft(peer_id));
3800 cx.notify();
3801 Ok(())
3802 })
3803 }
3804
3805 async fn handle_register_worktree(
3806 this: ModelHandle<Self>,
3807 envelope: TypedEnvelope<proto::RegisterWorktree>,
3808 client: Arc<Client>,
3809 mut cx: AsyncAppContext,
3810 ) -> Result<()> {
3811 this.update(&mut cx, |this, cx| {
3812 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3813 let replica_id = this.replica_id();
3814 let worktree = proto::Worktree {
3815 id: envelope.payload.worktree_id,
3816 root_name: envelope.payload.root_name,
3817 entries: Default::default(),
3818 diagnostic_summaries: Default::default(),
3819 visible: envelope.payload.visible,
3820 scan_id: 0,
3821 };
3822 let (worktree, load_task) =
3823 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3824 this.add_worktree(&worktree, cx);
3825 load_task.detach();
3826 Ok(())
3827 })
3828 }
3829
3830 async fn handle_unregister_worktree(
3831 this: ModelHandle<Self>,
3832 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3833 _: Arc<Client>,
3834 mut cx: AsyncAppContext,
3835 ) -> Result<()> {
3836 this.update(&mut cx, |this, cx| {
3837 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3838 this.remove_worktree(worktree_id, cx);
3839 Ok(())
3840 })
3841 }
3842
3843 async fn handle_update_worktree(
3844 this: ModelHandle<Self>,
3845 envelope: TypedEnvelope<proto::UpdateWorktree>,
3846 _: Arc<Client>,
3847 mut cx: AsyncAppContext,
3848 ) -> Result<()> {
3849 this.update(&mut cx, |this, cx| {
3850 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3851 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3852 worktree.update(cx, |worktree, _| {
3853 let worktree = worktree.as_remote_mut().unwrap();
3854 worktree.update_from_remote(envelope)
3855 })?;
3856 }
3857 Ok(())
3858 })
3859 }
3860
3861 async fn handle_create_project_entry(
3862 this: ModelHandle<Self>,
3863 envelope: TypedEnvelope<proto::CreateProjectEntry>,
3864 _: Arc<Client>,
3865 mut cx: AsyncAppContext,
3866 ) -> Result<proto::ProjectEntryResponse> {
3867 let worktree = this.update(&mut cx, |this, cx| {
3868 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3869 this.worktree_for_id(worktree_id, cx)
3870 .ok_or_else(|| anyhow!("worktree not found"))
3871 })?;
3872 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3873 let entry = worktree
3874 .update(&mut cx, |worktree, cx| {
3875 let worktree = worktree.as_local_mut().unwrap();
3876 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
3877 worktree.create_entry(path, envelope.payload.is_directory, cx)
3878 })
3879 .await?;
3880 Ok(proto::ProjectEntryResponse {
3881 entry: Some((&entry).into()),
3882 worktree_scan_id: worktree_scan_id as u64,
3883 })
3884 }
3885
3886 async fn handle_rename_project_entry(
3887 this: ModelHandle<Self>,
3888 envelope: TypedEnvelope<proto::RenameProjectEntry>,
3889 _: Arc<Client>,
3890 mut cx: AsyncAppContext,
3891 ) -> Result<proto::ProjectEntryResponse> {
3892 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
3893 let worktree = this.read_with(&cx, |this, cx| {
3894 this.worktree_for_entry(entry_id, cx)
3895 .ok_or_else(|| anyhow!("worktree not found"))
3896 })?;
3897 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3898 let entry = worktree
3899 .update(&mut cx, |worktree, cx| {
3900 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
3901 worktree
3902 .as_local_mut()
3903 .unwrap()
3904 .rename_entry(entry_id, new_path, cx)
3905 .ok_or_else(|| anyhow!("invalid entry"))
3906 })?
3907 .await?;
3908 Ok(proto::ProjectEntryResponse {
3909 entry: Some((&entry).into()),
3910 worktree_scan_id: worktree_scan_id as u64,
3911 })
3912 }
3913
3914 async fn handle_delete_project_entry(
3915 this: ModelHandle<Self>,
3916 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
3917 _: Arc<Client>,
3918 mut cx: AsyncAppContext,
3919 ) -> Result<proto::ProjectEntryResponse> {
3920 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
3921 let worktree = this.read_with(&cx, |this, cx| {
3922 this.worktree_for_entry(entry_id, cx)
3923 .ok_or_else(|| anyhow!("worktree not found"))
3924 })?;
3925 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3926 worktree
3927 .update(&mut cx, |worktree, cx| {
3928 worktree
3929 .as_local_mut()
3930 .unwrap()
3931 .delete_entry(entry_id, cx)
3932 .ok_or_else(|| anyhow!("invalid entry"))
3933 })?
3934 .await?;
3935 Ok(proto::ProjectEntryResponse {
3936 entry: None,
3937 worktree_scan_id: worktree_scan_id as u64,
3938 })
3939 }
3940
3941 async fn handle_update_diagnostic_summary(
3942 this: ModelHandle<Self>,
3943 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3944 _: Arc<Client>,
3945 mut cx: AsyncAppContext,
3946 ) -> Result<()> {
3947 this.update(&mut cx, |this, cx| {
3948 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3949 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3950 if let Some(summary) = envelope.payload.summary {
3951 let project_path = ProjectPath {
3952 worktree_id,
3953 path: Path::new(&summary.path).into(),
3954 };
3955 worktree.update(cx, |worktree, _| {
3956 worktree
3957 .as_remote_mut()
3958 .unwrap()
3959 .update_diagnostic_summary(project_path.path.clone(), &summary);
3960 });
3961 cx.emit(Event::DiagnosticsUpdated(project_path));
3962 }
3963 }
3964 Ok(())
3965 })
3966 }
3967
3968 async fn handle_start_language_server(
3969 this: ModelHandle<Self>,
3970 envelope: TypedEnvelope<proto::StartLanguageServer>,
3971 _: Arc<Client>,
3972 mut cx: AsyncAppContext,
3973 ) -> Result<()> {
3974 let server = envelope
3975 .payload
3976 .server
3977 .ok_or_else(|| anyhow!("invalid server"))?;
3978 this.update(&mut cx, |this, cx| {
3979 this.language_server_statuses.insert(
3980 server.id as usize,
3981 LanguageServerStatus {
3982 name: server.name,
3983 pending_work: Default::default(),
3984 pending_diagnostic_updates: 0,
3985 },
3986 );
3987 cx.notify();
3988 });
3989 Ok(())
3990 }
3991
3992 async fn handle_update_language_server(
3993 this: ModelHandle<Self>,
3994 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3995 _: Arc<Client>,
3996 mut cx: AsyncAppContext,
3997 ) -> Result<()> {
3998 let language_server_id = envelope.payload.language_server_id as usize;
3999 match envelope
4000 .payload
4001 .variant
4002 .ok_or_else(|| anyhow!("invalid variant"))?
4003 {
4004 proto::update_language_server::Variant::WorkStart(payload) => {
4005 this.update(&mut cx, |this, cx| {
4006 this.on_lsp_work_start(language_server_id, payload.token, cx);
4007 })
4008 }
4009 proto::update_language_server::Variant::WorkProgress(payload) => {
4010 this.update(&mut cx, |this, cx| {
4011 this.on_lsp_work_progress(
4012 language_server_id,
4013 payload.token,
4014 LanguageServerProgress {
4015 message: payload.message,
4016 percentage: payload.percentage.map(|p| p as usize),
4017 last_update_at: Instant::now(),
4018 },
4019 cx,
4020 );
4021 })
4022 }
4023 proto::update_language_server::Variant::WorkEnd(payload) => {
4024 this.update(&mut cx, |this, cx| {
4025 this.on_lsp_work_end(language_server_id, payload.token, cx);
4026 })
4027 }
4028 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4029 this.update(&mut cx, |this, cx| {
4030 this.disk_based_diagnostics_started(cx);
4031 })
4032 }
4033 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4034 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4035 }
4036 }
4037
4038 Ok(())
4039 }
4040
4041 async fn handle_update_buffer(
4042 this: ModelHandle<Self>,
4043 envelope: TypedEnvelope<proto::UpdateBuffer>,
4044 _: Arc<Client>,
4045 mut cx: AsyncAppContext,
4046 ) -> Result<()> {
4047 this.update(&mut cx, |this, cx| {
4048 let payload = envelope.payload.clone();
4049 let buffer_id = payload.buffer_id;
4050 let ops = payload
4051 .operations
4052 .into_iter()
4053 .map(|op| language::proto::deserialize_operation(op))
4054 .collect::<Result<Vec<_>, _>>()?;
4055 match this.opened_buffers.entry(buffer_id) {
4056 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4057 OpenBuffer::Strong(buffer) => {
4058 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4059 }
4060 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4061 OpenBuffer::Weak(_) => {}
4062 },
4063 hash_map::Entry::Vacant(e) => {
4064 e.insert(OpenBuffer::Loading(ops));
4065 }
4066 }
4067 Ok(())
4068 })
4069 }
4070
4071 async fn handle_update_buffer_file(
4072 this: ModelHandle<Self>,
4073 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4074 _: Arc<Client>,
4075 mut cx: AsyncAppContext,
4076 ) -> Result<()> {
4077 this.update(&mut cx, |this, cx| {
4078 let payload = envelope.payload.clone();
4079 let buffer_id = payload.buffer_id;
4080 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4081 let worktree = this
4082 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4083 .ok_or_else(|| anyhow!("no such worktree"))?;
4084 let file = File::from_proto(file, worktree.clone(), cx)?;
4085 let buffer = this
4086 .opened_buffers
4087 .get_mut(&buffer_id)
4088 .and_then(|b| b.upgrade(cx))
4089 .ok_or_else(|| anyhow!("no such buffer"))?;
4090 buffer.update(cx, |buffer, cx| {
4091 buffer.file_updated(Box::new(file), cx).detach();
4092 });
4093 Ok(())
4094 })
4095 }
4096
4097 async fn handle_save_buffer(
4098 this: ModelHandle<Self>,
4099 envelope: TypedEnvelope<proto::SaveBuffer>,
4100 _: Arc<Client>,
4101 mut cx: AsyncAppContext,
4102 ) -> Result<proto::BufferSaved> {
4103 let buffer_id = envelope.payload.buffer_id;
4104 let requested_version = deserialize_version(envelope.payload.version);
4105
4106 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4107 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4108 let buffer = this
4109 .opened_buffers
4110 .get(&buffer_id)
4111 .and_then(|buffer| buffer.upgrade(cx))
4112 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4113 Ok::<_, anyhow::Error>((project_id, buffer))
4114 })?;
4115 buffer
4116 .update(&mut cx, |buffer, _| {
4117 buffer.wait_for_version(requested_version)
4118 })
4119 .await;
4120
4121 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4122 Ok(proto::BufferSaved {
4123 project_id,
4124 buffer_id,
4125 version: serialize_version(&saved_version),
4126 mtime: Some(mtime.into()),
4127 })
4128 }
4129
4130 async fn handle_reload_buffers(
4131 this: ModelHandle<Self>,
4132 envelope: TypedEnvelope<proto::ReloadBuffers>,
4133 _: Arc<Client>,
4134 mut cx: AsyncAppContext,
4135 ) -> Result<proto::ReloadBuffersResponse> {
4136 let sender_id = envelope.original_sender_id()?;
4137 let reload = this.update(&mut cx, |this, cx| {
4138 let mut buffers = HashSet::default();
4139 for buffer_id in &envelope.payload.buffer_ids {
4140 buffers.insert(
4141 this.opened_buffers
4142 .get(buffer_id)
4143 .and_then(|buffer| buffer.upgrade(cx))
4144 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4145 );
4146 }
4147 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4148 })?;
4149
4150 let project_transaction = reload.await?;
4151 let project_transaction = this.update(&mut cx, |this, cx| {
4152 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4153 });
4154 Ok(proto::ReloadBuffersResponse {
4155 transaction: Some(project_transaction),
4156 })
4157 }
4158
4159 async fn handle_format_buffers(
4160 this: ModelHandle<Self>,
4161 envelope: TypedEnvelope<proto::FormatBuffers>,
4162 _: Arc<Client>,
4163 mut cx: AsyncAppContext,
4164 ) -> Result<proto::FormatBuffersResponse> {
4165 let sender_id = envelope.original_sender_id()?;
4166 let format = this.update(&mut cx, |this, cx| {
4167 let mut buffers = HashSet::default();
4168 for buffer_id in &envelope.payload.buffer_ids {
4169 buffers.insert(
4170 this.opened_buffers
4171 .get(buffer_id)
4172 .and_then(|buffer| buffer.upgrade(cx))
4173 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4174 );
4175 }
4176 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4177 })?;
4178
4179 let project_transaction = format.await?;
4180 let project_transaction = this.update(&mut cx, |this, cx| {
4181 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4182 });
4183 Ok(proto::FormatBuffersResponse {
4184 transaction: Some(project_transaction),
4185 })
4186 }
4187
4188 async fn handle_get_completions(
4189 this: ModelHandle<Self>,
4190 envelope: TypedEnvelope<proto::GetCompletions>,
4191 _: Arc<Client>,
4192 mut cx: AsyncAppContext,
4193 ) -> Result<proto::GetCompletionsResponse> {
4194 let position = envelope
4195 .payload
4196 .position
4197 .and_then(language::proto::deserialize_anchor)
4198 .ok_or_else(|| anyhow!("invalid position"))?;
4199 let version = deserialize_version(envelope.payload.version);
4200 let buffer = this.read_with(&cx, |this, cx| {
4201 this.opened_buffers
4202 .get(&envelope.payload.buffer_id)
4203 .and_then(|buffer| buffer.upgrade(cx))
4204 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4205 })?;
4206 buffer
4207 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4208 .await;
4209 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4210 let completions = this
4211 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4212 .await?;
4213
4214 Ok(proto::GetCompletionsResponse {
4215 completions: completions
4216 .iter()
4217 .map(language::proto::serialize_completion)
4218 .collect(),
4219 version: serialize_version(&version),
4220 })
4221 }
4222
4223 async fn handle_apply_additional_edits_for_completion(
4224 this: ModelHandle<Self>,
4225 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4226 _: Arc<Client>,
4227 mut cx: AsyncAppContext,
4228 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4229 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4230 let buffer = this
4231 .opened_buffers
4232 .get(&envelope.payload.buffer_id)
4233 .and_then(|buffer| buffer.upgrade(cx))
4234 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4235 let language = buffer.read(cx).language();
4236 let completion = language::proto::deserialize_completion(
4237 envelope
4238 .payload
4239 .completion
4240 .ok_or_else(|| anyhow!("invalid completion"))?,
4241 language,
4242 )?;
4243 Ok::<_, anyhow::Error>(
4244 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4245 )
4246 })?;
4247
4248 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4249 transaction: apply_additional_edits
4250 .await?
4251 .as_ref()
4252 .map(language::proto::serialize_transaction),
4253 })
4254 }
4255
4256 async fn handle_get_code_actions(
4257 this: ModelHandle<Self>,
4258 envelope: TypedEnvelope<proto::GetCodeActions>,
4259 _: Arc<Client>,
4260 mut cx: AsyncAppContext,
4261 ) -> Result<proto::GetCodeActionsResponse> {
4262 let start = envelope
4263 .payload
4264 .start
4265 .and_then(language::proto::deserialize_anchor)
4266 .ok_or_else(|| anyhow!("invalid start"))?;
4267 let end = envelope
4268 .payload
4269 .end
4270 .and_then(language::proto::deserialize_anchor)
4271 .ok_or_else(|| anyhow!("invalid end"))?;
4272 let buffer = this.update(&mut cx, |this, cx| {
4273 this.opened_buffers
4274 .get(&envelope.payload.buffer_id)
4275 .and_then(|buffer| buffer.upgrade(cx))
4276 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4277 })?;
4278 buffer
4279 .update(&mut cx, |buffer, _| {
4280 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4281 })
4282 .await;
4283
4284 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4285 let code_actions = this.update(&mut cx, |this, cx| {
4286 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4287 })?;
4288
4289 Ok(proto::GetCodeActionsResponse {
4290 actions: code_actions
4291 .await?
4292 .iter()
4293 .map(language::proto::serialize_code_action)
4294 .collect(),
4295 version: serialize_version(&version),
4296 })
4297 }
4298
4299 async fn handle_apply_code_action(
4300 this: ModelHandle<Self>,
4301 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4302 _: Arc<Client>,
4303 mut cx: AsyncAppContext,
4304 ) -> Result<proto::ApplyCodeActionResponse> {
4305 let sender_id = envelope.original_sender_id()?;
4306 let action = language::proto::deserialize_code_action(
4307 envelope
4308 .payload
4309 .action
4310 .ok_or_else(|| anyhow!("invalid action"))?,
4311 )?;
4312 let apply_code_action = this.update(&mut cx, |this, cx| {
4313 let buffer = this
4314 .opened_buffers
4315 .get(&envelope.payload.buffer_id)
4316 .and_then(|buffer| buffer.upgrade(cx))
4317 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4318 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4319 })?;
4320
4321 let project_transaction = apply_code_action.await?;
4322 let project_transaction = this.update(&mut cx, |this, cx| {
4323 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4324 });
4325 Ok(proto::ApplyCodeActionResponse {
4326 transaction: Some(project_transaction),
4327 })
4328 }
4329
4330 async fn handle_lsp_command<T: LspCommand>(
4331 this: ModelHandle<Self>,
4332 envelope: TypedEnvelope<T::ProtoRequest>,
4333 _: Arc<Client>,
4334 mut cx: AsyncAppContext,
4335 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4336 where
4337 <T::LspRequest as lsp::request::Request>::Result: Send,
4338 {
4339 let sender_id = envelope.original_sender_id()?;
4340 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4341 let buffer_handle = this.read_with(&cx, |this, _| {
4342 this.opened_buffers
4343 .get(&buffer_id)
4344 .and_then(|buffer| buffer.upgrade(&cx))
4345 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4346 })?;
4347 let request = T::from_proto(
4348 envelope.payload,
4349 this.clone(),
4350 buffer_handle.clone(),
4351 cx.clone(),
4352 )
4353 .await?;
4354 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4355 let response = this
4356 .update(&mut cx, |this, cx| {
4357 this.request_lsp(buffer_handle, request, cx)
4358 })
4359 .await?;
4360 this.update(&mut cx, |this, cx| {
4361 Ok(T::response_to_proto(
4362 response,
4363 this,
4364 sender_id,
4365 &buffer_version,
4366 cx,
4367 ))
4368 })
4369 }
4370
4371 async fn handle_get_project_symbols(
4372 this: ModelHandle<Self>,
4373 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4374 _: Arc<Client>,
4375 mut cx: AsyncAppContext,
4376 ) -> Result<proto::GetProjectSymbolsResponse> {
4377 let symbols = this
4378 .update(&mut cx, |this, cx| {
4379 this.symbols(&envelope.payload.query, cx)
4380 })
4381 .await?;
4382
4383 Ok(proto::GetProjectSymbolsResponse {
4384 symbols: symbols.iter().map(serialize_symbol).collect(),
4385 })
4386 }
4387
4388 async fn handle_search_project(
4389 this: ModelHandle<Self>,
4390 envelope: TypedEnvelope<proto::SearchProject>,
4391 _: Arc<Client>,
4392 mut cx: AsyncAppContext,
4393 ) -> Result<proto::SearchProjectResponse> {
4394 let peer_id = envelope.original_sender_id()?;
4395 let query = SearchQuery::from_proto(envelope.payload)?;
4396 let result = this
4397 .update(&mut cx, |this, cx| this.search(query, cx))
4398 .await?;
4399
4400 this.update(&mut cx, |this, cx| {
4401 let mut locations = Vec::new();
4402 for (buffer, ranges) in result {
4403 for range in ranges {
4404 let start = serialize_anchor(&range.start);
4405 let end = serialize_anchor(&range.end);
4406 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4407 locations.push(proto::Location {
4408 buffer: Some(buffer),
4409 start: Some(start),
4410 end: Some(end),
4411 });
4412 }
4413 }
4414 Ok(proto::SearchProjectResponse { locations })
4415 })
4416 }
4417
4418 async fn handle_open_buffer_for_symbol(
4419 this: ModelHandle<Self>,
4420 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4421 _: Arc<Client>,
4422 mut cx: AsyncAppContext,
4423 ) -> Result<proto::OpenBufferForSymbolResponse> {
4424 let peer_id = envelope.original_sender_id()?;
4425 let symbol = envelope
4426 .payload
4427 .symbol
4428 .ok_or_else(|| anyhow!("invalid symbol"))?;
4429 let symbol = this.read_with(&cx, |this, _| {
4430 let symbol = this.deserialize_symbol(symbol)?;
4431 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4432 if signature == symbol.signature {
4433 Ok(symbol)
4434 } else {
4435 Err(anyhow!("invalid symbol signature"))
4436 }
4437 })?;
4438 let buffer = this
4439 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4440 .await?;
4441
4442 Ok(proto::OpenBufferForSymbolResponse {
4443 buffer: Some(this.update(&mut cx, |this, cx| {
4444 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4445 })),
4446 })
4447 }
4448
4449 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4450 let mut hasher = Sha256::new();
4451 hasher.update(worktree_id.to_proto().to_be_bytes());
4452 hasher.update(path.to_string_lossy().as_bytes());
4453 hasher.update(self.nonce.to_be_bytes());
4454 hasher.finalize().as_slice().try_into().unwrap()
4455 }
4456
4457 async fn handle_open_buffer_by_id(
4458 this: ModelHandle<Self>,
4459 envelope: TypedEnvelope<proto::OpenBufferById>,
4460 _: Arc<Client>,
4461 mut cx: AsyncAppContext,
4462 ) -> Result<proto::OpenBufferResponse> {
4463 let peer_id = envelope.original_sender_id()?;
4464 let buffer = this
4465 .update(&mut cx, |this, cx| {
4466 this.open_buffer_by_id(envelope.payload.id, cx)
4467 })
4468 .await?;
4469 this.update(&mut cx, |this, cx| {
4470 Ok(proto::OpenBufferResponse {
4471 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4472 })
4473 })
4474 }
4475
4476 async fn handle_open_buffer_by_path(
4477 this: ModelHandle<Self>,
4478 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4479 _: Arc<Client>,
4480 mut cx: AsyncAppContext,
4481 ) -> Result<proto::OpenBufferResponse> {
4482 let peer_id = envelope.original_sender_id()?;
4483 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4484 let open_buffer = this.update(&mut cx, |this, cx| {
4485 this.open_buffer(
4486 ProjectPath {
4487 worktree_id,
4488 path: PathBuf::from(envelope.payload.path).into(),
4489 },
4490 cx,
4491 )
4492 });
4493
4494 let buffer = open_buffer.await?;
4495 this.update(&mut cx, |this, cx| {
4496 Ok(proto::OpenBufferResponse {
4497 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4498 })
4499 })
4500 }
4501
4502 fn serialize_project_transaction_for_peer(
4503 &mut self,
4504 project_transaction: ProjectTransaction,
4505 peer_id: PeerId,
4506 cx: &AppContext,
4507 ) -> proto::ProjectTransaction {
4508 let mut serialized_transaction = proto::ProjectTransaction {
4509 buffers: Default::default(),
4510 transactions: Default::default(),
4511 };
4512 for (buffer, transaction) in project_transaction.0 {
4513 serialized_transaction
4514 .buffers
4515 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4516 serialized_transaction
4517 .transactions
4518 .push(language::proto::serialize_transaction(&transaction));
4519 }
4520 serialized_transaction
4521 }
4522
4523 fn deserialize_project_transaction(
4524 &mut self,
4525 message: proto::ProjectTransaction,
4526 push_to_history: bool,
4527 cx: &mut ModelContext<Self>,
4528 ) -> Task<Result<ProjectTransaction>> {
4529 cx.spawn(|this, mut cx| async move {
4530 let mut project_transaction = ProjectTransaction::default();
4531 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4532 let buffer = this
4533 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4534 .await?;
4535 let transaction = language::proto::deserialize_transaction(transaction)?;
4536 project_transaction.0.insert(buffer, transaction);
4537 }
4538
4539 for (buffer, transaction) in &project_transaction.0 {
4540 buffer
4541 .update(&mut cx, |buffer, _| {
4542 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4543 })
4544 .await;
4545
4546 if push_to_history {
4547 buffer.update(&mut cx, |buffer, _| {
4548 buffer.push_transaction(transaction.clone(), Instant::now());
4549 });
4550 }
4551 }
4552
4553 Ok(project_transaction)
4554 })
4555 }
4556
4557 fn serialize_buffer_for_peer(
4558 &mut self,
4559 buffer: &ModelHandle<Buffer>,
4560 peer_id: PeerId,
4561 cx: &AppContext,
4562 ) -> proto::Buffer {
4563 let buffer_id = buffer.read(cx).remote_id();
4564 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4565 if shared_buffers.insert(buffer_id) {
4566 proto::Buffer {
4567 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4568 }
4569 } else {
4570 proto::Buffer {
4571 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4572 }
4573 }
4574 }
4575
4576 fn deserialize_buffer(
4577 &mut self,
4578 buffer: proto::Buffer,
4579 cx: &mut ModelContext<Self>,
4580 ) -> Task<Result<ModelHandle<Buffer>>> {
4581 let replica_id = self.replica_id();
4582
4583 let opened_buffer_tx = self.opened_buffer.0.clone();
4584 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4585 cx.spawn(|this, mut cx| async move {
4586 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4587 proto::buffer::Variant::Id(id) => {
4588 let buffer = loop {
4589 let buffer = this.read_with(&cx, |this, cx| {
4590 this.opened_buffers
4591 .get(&id)
4592 .and_then(|buffer| buffer.upgrade(cx))
4593 });
4594 if let Some(buffer) = buffer {
4595 break buffer;
4596 }
4597 opened_buffer_rx
4598 .next()
4599 .await
4600 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4601 };
4602 Ok(buffer)
4603 }
4604 proto::buffer::Variant::State(mut buffer) => {
4605 let mut buffer_worktree = None;
4606 let mut buffer_file = None;
4607 if let Some(file) = buffer.file.take() {
4608 this.read_with(&cx, |this, cx| {
4609 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4610 let worktree =
4611 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4612 anyhow!("no worktree found for id {}", file.worktree_id)
4613 })?;
4614 buffer_file =
4615 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4616 as Box<dyn language::File>);
4617 buffer_worktree = Some(worktree);
4618 Ok::<_, anyhow::Error>(())
4619 })?;
4620 }
4621
4622 let buffer = cx.add_model(|cx| {
4623 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4624 });
4625
4626 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4627
4628 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4629 Ok(buffer)
4630 }
4631 }
4632 })
4633 }
4634
4635 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4636 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4637 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4638 let start = serialized_symbol
4639 .start
4640 .ok_or_else(|| anyhow!("invalid start"))?;
4641 let end = serialized_symbol
4642 .end
4643 .ok_or_else(|| anyhow!("invalid end"))?;
4644 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4645 let path = PathBuf::from(serialized_symbol.path);
4646 let language = self.languages.select_language(&path);
4647 Ok(Symbol {
4648 source_worktree_id,
4649 worktree_id,
4650 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4651 label: language
4652 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4653 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4654 name: serialized_symbol.name,
4655 path,
4656 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4657 kind,
4658 signature: serialized_symbol
4659 .signature
4660 .try_into()
4661 .map_err(|_| anyhow!("invalid signature"))?,
4662 })
4663 }
4664
4665 async fn handle_buffer_saved(
4666 this: ModelHandle<Self>,
4667 envelope: TypedEnvelope<proto::BufferSaved>,
4668 _: Arc<Client>,
4669 mut cx: AsyncAppContext,
4670 ) -> Result<()> {
4671 let version = deserialize_version(envelope.payload.version);
4672 let mtime = envelope
4673 .payload
4674 .mtime
4675 .ok_or_else(|| anyhow!("missing mtime"))?
4676 .into();
4677
4678 this.update(&mut cx, |this, cx| {
4679 let buffer = this
4680 .opened_buffers
4681 .get(&envelope.payload.buffer_id)
4682 .and_then(|buffer| buffer.upgrade(cx));
4683 if let Some(buffer) = buffer {
4684 buffer.update(cx, |buffer, cx| {
4685 buffer.did_save(version, mtime, None, cx);
4686 });
4687 }
4688 Ok(())
4689 })
4690 }
4691
4692 async fn handle_buffer_reloaded(
4693 this: ModelHandle<Self>,
4694 envelope: TypedEnvelope<proto::BufferReloaded>,
4695 _: Arc<Client>,
4696 mut cx: AsyncAppContext,
4697 ) -> Result<()> {
4698 let payload = envelope.payload.clone();
4699 let version = deserialize_version(payload.version);
4700 let mtime = payload
4701 .mtime
4702 .ok_or_else(|| anyhow!("missing mtime"))?
4703 .into();
4704 this.update(&mut cx, |this, cx| {
4705 let buffer = this
4706 .opened_buffers
4707 .get(&payload.buffer_id)
4708 .and_then(|buffer| buffer.upgrade(cx));
4709 if let Some(buffer) = buffer {
4710 buffer.update(cx, |buffer, cx| {
4711 buffer.did_reload(version, mtime, cx);
4712 });
4713 }
4714 Ok(())
4715 })
4716 }
4717
4718 pub fn match_paths<'a>(
4719 &self,
4720 query: &'a str,
4721 include_ignored: bool,
4722 smart_case: bool,
4723 max_results: usize,
4724 cancel_flag: &'a AtomicBool,
4725 cx: &AppContext,
4726 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4727 let worktrees = self
4728 .worktrees(cx)
4729 .filter(|worktree| worktree.read(cx).is_visible())
4730 .collect::<Vec<_>>();
4731 let include_root_name = worktrees.len() > 1;
4732 let candidate_sets = worktrees
4733 .into_iter()
4734 .map(|worktree| CandidateSet {
4735 snapshot: worktree.read(cx).snapshot(),
4736 include_ignored,
4737 include_root_name,
4738 })
4739 .collect::<Vec<_>>();
4740
4741 let background = cx.background().clone();
4742 async move {
4743 fuzzy::match_paths(
4744 candidate_sets.as_slice(),
4745 query,
4746 smart_case,
4747 max_results,
4748 cancel_flag,
4749 background,
4750 )
4751 .await
4752 }
4753 }
4754
4755 fn edits_from_lsp(
4756 &mut self,
4757 buffer: &ModelHandle<Buffer>,
4758 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4759 version: Option<i32>,
4760 cx: &mut ModelContext<Self>,
4761 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4762 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4763 cx.background().spawn(async move {
4764 let snapshot = snapshot?;
4765 let mut lsp_edits = lsp_edits
4766 .into_iter()
4767 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4768 .peekable();
4769
4770 let mut edits = Vec::new();
4771 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4772 // Combine any LSP edits that are adjacent.
4773 //
4774 // Also, combine LSP edits that are separated from each other by only
4775 // a newline. This is important because for some code actions,
4776 // Rust-analyzer rewrites the entire buffer via a series of edits that
4777 // are separated by unchanged newline characters.
4778 //
4779 // In order for the diffing logic below to work properly, any edits that
4780 // cancel each other out must be combined into one.
4781 while let Some((next_range, next_text)) = lsp_edits.peek() {
4782 if next_range.start > range.end {
4783 if next_range.start.row > range.end.row + 1
4784 || next_range.start.column > 0
4785 || snapshot.clip_point_utf16(
4786 PointUtf16::new(range.end.row, u32::MAX),
4787 Bias::Left,
4788 ) > range.end
4789 {
4790 break;
4791 }
4792 new_text.push('\n');
4793 }
4794 range.end = next_range.end;
4795 new_text.push_str(&next_text);
4796 lsp_edits.next();
4797 }
4798
4799 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4800 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4801 {
4802 return Err(anyhow!("invalid edits received from language server"));
4803 }
4804
4805 // For multiline edits, perform a diff of the old and new text so that
4806 // we can identify the changes more precisely, preserving the locations
4807 // of any anchors positioned in the unchanged regions.
4808 if range.end.row > range.start.row {
4809 let mut offset = range.start.to_offset(&snapshot);
4810 let old_text = snapshot.text_for_range(range).collect::<String>();
4811
4812 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4813 let mut moved_since_edit = true;
4814 for change in diff.iter_all_changes() {
4815 let tag = change.tag();
4816 let value = change.value();
4817 match tag {
4818 ChangeTag::Equal => {
4819 offset += value.len();
4820 moved_since_edit = true;
4821 }
4822 ChangeTag::Delete => {
4823 let start = snapshot.anchor_after(offset);
4824 let end = snapshot.anchor_before(offset + value.len());
4825 if moved_since_edit {
4826 edits.push((start..end, String::new()));
4827 } else {
4828 edits.last_mut().unwrap().0.end = end;
4829 }
4830 offset += value.len();
4831 moved_since_edit = false;
4832 }
4833 ChangeTag::Insert => {
4834 if moved_since_edit {
4835 let anchor = snapshot.anchor_after(offset);
4836 edits.push((anchor.clone()..anchor, value.to_string()));
4837 } else {
4838 edits.last_mut().unwrap().1.push_str(value);
4839 }
4840 moved_since_edit = false;
4841 }
4842 }
4843 }
4844 } else if range.end == range.start {
4845 let anchor = snapshot.anchor_after(range.start);
4846 edits.push((anchor.clone()..anchor, new_text));
4847 } else {
4848 let edit_start = snapshot.anchor_after(range.start);
4849 let edit_end = snapshot.anchor_before(range.end);
4850 edits.push((edit_start..edit_end, new_text));
4851 }
4852 }
4853
4854 Ok(edits)
4855 })
4856 }
4857
4858 fn buffer_snapshot_for_lsp_version(
4859 &mut self,
4860 buffer: &ModelHandle<Buffer>,
4861 version: Option<i32>,
4862 cx: &AppContext,
4863 ) -> Result<TextBufferSnapshot> {
4864 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4865
4866 if let Some(version) = version {
4867 let buffer_id = buffer.read(cx).remote_id();
4868 let snapshots = self
4869 .buffer_snapshots
4870 .get_mut(&buffer_id)
4871 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4872 let mut found_snapshot = None;
4873 snapshots.retain(|(snapshot_version, snapshot)| {
4874 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4875 false
4876 } else {
4877 if *snapshot_version == version {
4878 found_snapshot = Some(snapshot.clone());
4879 }
4880 true
4881 }
4882 });
4883
4884 found_snapshot.ok_or_else(|| {
4885 anyhow!(
4886 "snapshot not found for buffer {} at version {}",
4887 buffer_id,
4888 version
4889 )
4890 })
4891 } else {
4892 Ok((buffer.read(cx)).text_snapshot())
4893 }
4894 }
4895
4896 fn language_server_for_buffer(
4897 &self,
4898 buffer: &Buffer,
4899 cx: &AppContext,
4900 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4901 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4902 let worktree_id = file.worktree_id(cx);
4903 self.language_servers
4904 .get(&(worktree_id, language.lsp_adapter()?.name()))
4905 } else {
4906 None
4907 }
4908 }
4909}
4910
4911impl WorktreeHandle {
4912 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4913 match self {
4914 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4915 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4916 }
4917 }
4918}
4919
4920impl OpenBuffer {
4921 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4922 match self {
4923 OpenBuffer::Strong(handle) => Some(handle.clone()),
4924 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4925 OpenBuffer::Loading(_) => None,
4926 }
4927 }
4928}
4929
4930struct CandidateSet {
4931 snapshot: Snapshot,
4932 include_ignored: bool,
4933 include_root_name: bool,
4934}
4935
4936impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4937 type Candidates = CandidateSetIter<'a>;
4938
4939 fn id(&self) -> usize {
4940 self.snapshot.id().to_usize()
4941 }
4942
4943 fn len(&self) -> usize {
4944 if self.include_ignored {
4945 self.snapshot.file_count()
4946 } else {
4947 self.snapshot.visible_file_count()
4948 }
4949 }
4950
4951 fn prefix(&self) -> Arc<str> {
4952 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4953 self.snapshot.root_name().into()
4954 } else if self.include_root_name {
4955 format!("{}/", self.snapshot.root_name()).into()
4956 } else {
4957 "".into()
4958 }
4959 }
4960
4961 fn candidates(&'a self, start: usize) -> Self::Candidates {
4962 CandidateSetIter {
4963 traversal: self.snapshot.files(self.include_ignored, start),
4964 }
4965 }
4966}
4967
4968struct CandidateSetIter<'a> {
4969 traversal: Traversal<'a>,
4970}
4971
4972impl<'a> Iterator for CandidateSetIter<'a> {
4973 type Item = PathMatchCandidate<'a>;
4974
4975 fn next(&mut self) -> Option<Self::Item> {
4976 self.traversal.next().map(|entry| {
4977 if let EntryKind::File(char_bag) = entry.kind {
4978 PathMatchCandidate {
4979 path: &entry.path,
4980 char_bag,
4981 }
4982 } else {
4983 unreachable!()
4984 }
4985 })
4986 }
4987}
4988
4989impl Entity for Project {
4990 type Event = Event;
4991
4992 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4993 match &self.client_state {
4994 ProjectClientState::Local { remote_id_rx, .. } => {
4995 if let Some(project_id) = *remote_id_rx.borrow() {
4996 self.client
4997 .send(proto::UnregisterProject { project_id })
4998 .log_err();
4999 }
5000 }
5001 ProjectClientState::Remote { remote_id, .. } => {
5002 self.client
5003 .send(proto::LeaveProject {
5004 project_id: *remote_id,
5005 })
5006 .log_err();
5007 }
5008 }
5009 }
5010
5011 fn app_will_quit(
5012 &mut self,
5013 _: &mut MutableAppContext,
5014 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5015 let shutdown_futures = self
5016 .language_servers
5017 .drain()
5018 .filter_map(|(_, (_, server))| server.shutdown())
5019 .collect::<Vec<_>>();
5020 Some(
5021 async move {
5022 futures::future::join_all(shutdown_futures).await;
5023 }
5024 .boxed(),
5025 )
5026 }
5027}
5028
5029impl Collaborator {
5030 fn from_proto(
5031 message: proto::Collaborator,
5032 user_store: &ModelHandle<UserStore>,
5033 cx: &mut AsyncAppContext,
5034 ) -> impl Future<Output = Result<Self>> {
5035 let user = user_store.update(cx, |user_store, cx| {
5036 user_store.fetch_user(message.user_id, cx)
5037 });
5038
5039 async move {
5040 Ok(Self {
5041 peer_id: PeerId(message.peer_id),
5042 user: user.await?,
5043 replica_id: message.replica_id as ReplicaId,
5044 })
5045 }
5046 }
5047}
5048
5049impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5050 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5051 Self {
5052 worktree_id,
5053 path: path.as_ref().into(),
5054 }
5055 }
5056}
5057
5058impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5059 fn from(options: lsp::CreateFileOptions) -> Self {
5060 Self {
5061 overwrite: options.overwrite.unwrap_or(false),
5062 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5063 }
5064 }
5065}
5066
5067impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5068 fn from(options: lsp::RenameFileOptions) -> Self {
5069 Self {
5070 overwrite: options.overwrite.unwrap_or(false),
5071 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5072 }
5073 }
5074}
5075
5076impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5077 fn from(options: lsp::DeleteFileOptions) -> Self {
5078 Self {
5079 recursive: options.recursive.unwrap_or(false),
5080 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5081 }
5082 }
5083}
5084
5085fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5086 proto::Symbol {
5087 source_worktree_id: symbol.source_worktree_id.to_proto(),
5088 worktree_id: symbol.worktree_id.to_proto(),
5089 language_server_name: symbol.language_server_name.0.to_string(),
5090 name: symbol.name.clone(),
5091 kind: unsafe { mem::transmute(symbol.kind) },
5092 path: symbol.path.to_string_lossy().to_string(),
5093 start: Some(proto::Point {
5094 row: symbol.range.start.row,
5095 column: symbol.range.start.column,
5096 }),
5097 end: Some(proto::Point {
5098 row: symbol.range.end.row,
5099 column: symbol.range.end.column,
5100 }),
5101 signature: symbol.signature.to_vec(),
5102 }
5103}
5104
5105fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5106 let mut path_components = path.components();
5107 let mut base_components = base.components();
5108 let mut components: Vec<Component> = Vec::new();
5109 loop {
5110 match (path_components.next(), base_components.next()) {
5111 (None, None) => break,
5112 (Some(a), None) => {
5113 components.push(a);
5114 components.extend(path_components.by_ref());
5115 break;
5116 }
5117 (None, _) => components.push(Component::ParentDir),
5118 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5119 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5120 (Some(a), Some(_)) => {
5121 components.push(Component::ParentDir);
5122 for _ in base_components {
5123 components.push(Component::ParentDir);
5124 }
5125 components.push(a);
5126 components.extend(path_components.by_ref());
5127 break;
5128 }
5129 }
5130 }
5131 components.iter().map(|c| c.as_os_str()).collect()
5132}
5133
5134impl Item for Buffer {
5135 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5136 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5137 }
5138}
5139
5140#[cfg(test)]
5141mod tests {
5142 use crate::worktree::WorktreeHandle;
5143
5144 use super::{Event, *};
5145 use fs::RealFs;
5146 use futures::{future, StreamExt};
5147 use gpui::test::subscribe;
5148 use language::{
5149 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5150 OffsetRangeExt, Point, ToPoint,
5151 };
5152 use lsp::Url;
5153 use serde_json::json;
5154 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5155 use unindent::Unindent as _;
5156 use util::{assert_set_eq, test::temp_tree};
5157
5158 #[gpui::test]
5159 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5160 let dir = temp_tree(json!({
5161 "root": {
5162 "apple": "",
5163 "banana": {
5164 "carrot": {
5165 "date": "",
5166 "endive": "",
5167 }
5168 },
5169 "fennel": {
5170 "grape": "",
5171 }
5172 }
5173 }));
5174
5175 let root_link_path = dir.path().join("root_link");
5176 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5177 unix::fs::symlink(
5178 &dir.path().join("root/fennel"),
5179 &dir.path().join("root/finnochio"),
5180 )
5181 .unwrap();
5182
5183 let project = Project::test(Arc::new(RealFs), [root_link_path], cx).await;
5184
5185 project.read_with(cx, |project, cx| {
5186 let tree = project.worktrees(cx).next().unwrap().read(cx);
5187 assert_eq!(tree.file_count(), 5);
5188 assert_eq!(
5189 tree.inode_for_path("fennel/grape"),
5190 tree.inode_for_path("finnochio/grape")
5191 );
5192 });
5193
5194 let cancel_flag = Default::default();
5195 let results = project
5196 .read_with(cx, |project, cx| {
5197 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5198 })
5199 .await;
5200 assert_eq!(
5201 results
5202 .into_iter()
5203 .map(|result| result.path)
5204 .collect::<Vec<Arc<Path>>>(),
5205 vec![
5206 PathBuf::from("banana/carrot/date").into(),
5207 PathBuf::from("banana/carrot/endive").into(),
5208 ]
5209 );
5210 }
5211
5212 #[gpui::test]
5213 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5214 cx.foreground().forbid_parking();
5215
5216 let mut rust_language = Language::new(
5217 LanguageConfig {
5218 name: "Rust".into(),
5219 path_suffixes: vec!["rs".to_string()],
5220 ..Default::default()
5221 },
5222 Some(tree_sitter_rust::language()),
5223 );
5224 let mut json_language = Language::new(
5225 LanguageConfig {
5226 name: "JSON".into(),
5227 path_suffixes: vec!["json".to_string()],
5228 ..Default::default()
5229 },
5230 None,
5231 );
5232 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5233 name: "the-rust-language-server",
5234 capabilities: lsp::ServerCapabilities {
5235 completion_provider: Some(lsp::CompletionOptions {
5236 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5237 ..Default::default()
5238 }),
5239 ..Default::default()
5240 },
5241 ..Default::default()
5242 });
5243 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5244 name: "the-json-language-server",
5245 capabilities: lsp::ServerCapabilities {
5246 completion_provider: Some(lsp::CompletionOptions {
5247 trigger_characters: Some(vec![":".to_string()]),
5248 ..Default::default()
5249 }),
5250 ..Default::default()
5251 },
5252 ..Default::default()
5253 });
5254
5255 let fs = FakeFs::new(cx.background());
5256 fs.insert_tree(
5257 "/the-root",
5258 json!({
5259 "test.rs": "const A: i32 = 1;",
5260 "test2.rs": "",
5261 "Cargo.toml": "a = 1",
5262 "package.json": "{\"a\": 1}",
5263 }),
5264 )
5265 .await;
5266
5267 let project = Project::test(fs.clone(), ["/the-root"], cx).await;
5268 project.update(cx, |project, _| {
5269 project.languages.add(Arc::new(rust_language));
5270 project.languages.add(Arc::new(json_language));
5271 });
5272
5273 // Open a buffer without an associated language server.
5274 let toml_buffer = project
5275 .update(cx, |project, cx| {
5276 project.open_local_buffer("/the-root/Cargo.toml", cx)
5277 })
5278 .await
5279 .unwrap();
5280
5281 // Open a buffer with an associated language server.
5282 let rust_buffer = project
5283 .update(cx, |project, cx| {
5284 project.open_local_buffer("/the-root/test.rs", cx)
5285 })
5286 .await
5287 .unwrap();
5288
5289 // A server is started up, and it is notified about Rust files.
5290 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5291 assert_eq!(
5292 fake_rust_server
5293 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5294 .await
5295 .text_document,
5296 lsp::TextDocumentItem {
5297 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5298 version: 0,
5299 text: "const A: i32 = 1;".to_string(),
5300 language_id: Default::default()
5301 }
5302 );
5303
5304 // The buffer is configured based on the language server's capabilities.
5305 rust_buffer.read_with(cx, |buffer, _| {
5306 assert_eq!(
5307 buffer.completion_triggers(),
5308 &[".".to_string(), "::".to_string()]
5309 );
5310 });
5311 toml_buffer.read_with(cx, |buffer, _| {
5312 assert!(buffer.completion_triggers().is_empty());
5313 });
5314
5315 // Edit a buffer. The changes are reported to the language server.
5316 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5317 assert_eq!(
5318 fake_rust_server
5319 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5320 .await
5321 .text_document,
5322 lsp::VersionedTextDocumentIdentifier::new(
5323 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5324 1
5325 )
5326 );
5327
5328 // Open a third buffer with a different associated language server.
5329 let json_buffer = project
5330 .update(cx, |project, cx| {
5331 project.open_local_buffer("/the-root/package.json", cx)
5332 })
5333 .await
5334 .unwrap();
5335
5336 // A json language server is started up and is only notified about the json buffer.
5337 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5338 assert_eq!(
5339 fake_json_server
5340 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5341 .await
5342 .text_document,
5343 lsp::TextDocumentItem {
5344 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5345 version: 0,
5346 text: "{\"a\": 1}".to_string(),
5347 language_id: Default::default()
5348 }
5349 );
5350
5351 // This buffer is configured based on the second language server's
5352 // capabilities.
5353 json_buffer.read_with(cx, |buffer, _| {
5354 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5355 });
5356
5357 // When opening another buffer whose language server is already running,
5358 // it is also configured based on the existing language server's capabilities.
5359 let rust_buffer2 = project
5360 .update(cx, |project, cx| {
5361 project.open_local_buffer("/the-root/test2.rs", cx)
5362 })
5363 .await
5364 .unwrap();
5365 rust_buffer2.read_with(cx, |buffer, _| {
5366 assert_eq!(
5367 buffer.completion_triggers(),
5368 &[".".to_string(), "::".to_string()]
5369 );
5370 });
5371
5372 // Changes are reported only to servers matching the buffer's language.
5373 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5374 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5375 assert_eq!(
5376 fake_rust_server
5377 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5378 .await
5379 .text_document,
5380 lsp::VersionedTextDocumentIdentifier::new(
5381 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5382 1
5383 )
5384 );
5385
5386 // Save notifications are reported to all servers.
5387 toml_buffer
5388 .update(cx, |buffer, cx| buffer.save(cx))
5389 .await
5390 .unwrap();
5391 assert_eq!(
5392 fake_rust_server
5393 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5394 .await
5395 .text_document,
5396 lsp::TextDocumentIdentifier::new(
5397 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5398 )
5399 );
5400 assert_eq!(
5401 fake_json_server
5402 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5403 .await
5404 .text_document,
5405 lsp::TextDocumentIdentifier::new(
5406 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5407 )
5408 );
5409
5410 // Renames are reported only to servers matching the buffer's language.
5411 fs.rename(
5412 Path::new("/the-root/test2.rs"),
5413 Path::new("/the-root/test3.rs"),
5414 Default::default(),
5415 )
5416 .await
5417 .unwrap();
5418 assert_eq!(
5419 fake_rust_server
5420 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5421 .await
5422 .text_document,
5423 lsp::TextDocumentIdentifier::new(
5424 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5425 ),
5426 );
5427 assert_eq!(
5428 fake_rust_server
5429 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5430 .await
5431 .text_document,
5432 lsp::TextDocumentItem {
5433 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5434 version: 0,
5435 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5436 language_id: Default::default()
5437 },
5438 );
5439
5440 rust_buffer2.update(cx, |buffer, cx| {
5441 buffer.update_diagnostics(
5442 DiagnosticSet::from_sorted_entries(
5443 vec![DiagnosticEntry {
5444 diagnostic: Default::default(),
5445 range: Anchor::MIN..Anchor::MAX,
5446 }],
5447 &buffer.snapshot(),
5448 ),
5449 cx,
5450 );
5451 assert_eq!(
5452 buffer
5453 .snapshot()
5454 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5455 .count(),
5456 1
5457 );
5458 });
5459
5460 // When the rename changes the extension of the file, the buffer gets closed on the old
5461 // language server and gets opened on the new one.
5462 fs.rename(
5463 Path::new("/the-root/test3.rs"),
5464 Path::new("/the-root/test3.json"),
5465 Default::default(),
5466 )
5467 .await
5468 .unwrap();
5469 assert_eq!(
5470 fake_rust_server
5471 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5472 .await
5473 .text_document,
5474 lsp::TextDocumentIdentifier::new(
5475 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5476 ),
5477 );
5478 assert_eq!(
5479 fake_json_server
5480 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5481 .await
5482 .text_document,
5483 lsp::TextDocumentItem {
5484 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5485 version: 0,
5486 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5487 language_id: Default::default()
5488 },
5489 );
5490
5491 // We clear the diagnostics, since the language has changed.
5492 rust_buffer2.read_with(cx, |buffer, _| {
5493 assert_eq!(
5494 buffer
5495 .snapshot()
5496 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5497 .count(),
5498 0
5499 );
5500 });
5501
5502 // The renamed file's version resets after changing language server.
5503 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5504 assert_eq!(
5505 fake_json_server
5506 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5507 .await
5508 .text_document,
5509 lsp::VersionedTextDocumentIdentifier::new(
5510 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5511 1
5512 )
5513 );
5514
5515 // Restart language servers
5516 project.update(cx, |project, cx| {
5517 project.restart_language_servers_for_buffers(
5518 vec![rust_buffer.clone(), json_buffer.clone()],
5519 cx,
5520 );
5521 });
5522
5523 let mut rust_shutdown_requests = fake_rust_server
5524 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5525 let mut json_shutdown_requests = fake_json_server
5526 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5527 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5528
5529 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5530 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5531
5532 // Ensure rust document is reopened in new rust language server
5533 assert_eq!(
5534 fake_rust_server
5535 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5536 .await
5537 .text_document,
5538 lsp::TextDocumentItem {
5539 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5540 version: 1,
5541 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5542 language_id: Default::default()
5543 }
5544 );
5545
5546 // Ensure json documents are reopened in new json language server
5547 assert_set_eq!(
5548 [
5549 fake_json_server
5550 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5551 .await
5552 .text_document,
5553 fake_json_server
5554 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5555 .await
5556 .text_document,
5557 ],
5558 [
5559 lsp::TextDocumentItem {
5560 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5561 version: 0,
5562 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5563 language_id: Default::default()
5564 },
5565 lsp::TextDocumentItem {
5566 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5567 version: 1,
5568 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5569 language_id: Default::default()
5570 }
5571 ]
5572 );
5573
5574 // Close notifications are reported only to servers matching the buffer's language.
5575 cx.update(|_| drop(json_buffer));
5576 let close_message = lsp::DidCloseTextDocumentParams {
5577 text_document: lsp::TextDocumentIdentifier::new(
5578 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5579 ),
5580 };
5581 assert_eq!(
5582 fake_json_server
5583 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5584 .await,
5585 close_message,
5586 );
5587 }
5588
5589 #[gpui::test]
5590 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5591 cx.foreground().forbid_parking();
5592
5593 let fs = FakeFs::new(cx.background());
5594 fs.insert_tree(
5595 "/dir",
5596 json!({
5597 "a.rs": "let a = 1;",
5598 "b.rs": "let b = 2;"
5599 }),
5600 )
5601 .await;
5602
5603 let project = Project::test(fs, ["/dir/a.rs", "/dir/b.rs"], cx).await;
5604
5605 let buffer_a = project
5606 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5607 .await
5608 .unwrap();
5609 let buffer_b = project
5610 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5611 .await
5612 .unwrap();
5613
5614 project.update(cx, |project, cx| {
5615 project
5616 .update_diagnostics(
5617 lsp::PublishDiagnosticsParams {
5618 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5619 version: None,
5620 diagnostics: vec![lsp::Diagnostic {
5621 range: lsp::Range::new(
5622 lsp::Position::new(0, 4),
5623 lsp::Position::new(0, 5),
5624 ),
5625 severity: Some(lsp::DiagnosticSeverity::ERROR),
5626 message: "error 1".to_string(),
5627 ..Default::default()
5628 }],
5629 },
5630 &[],
5631 cx,
5632 )
5633 .unwrap();
5634 project
5635 .update_diagnostics(
5636 lsp::PublishDiagnosticsParams {
5637 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5638 version: None,
5639 diagnostics: vec![lsp::Diagnostic {
5640 range: lsp::Range::new(
5641 lsp::Position::new(0, 4),
5642 lsp::Position::new(0, 5),
5643 ),
5644 severity: Some(lsp::DiagnosticSeverity::WARNING),
5645 message: "error 2".to_string(),
5646 ..Default::default()
5647 }],
5648 },
5649 &[],
5650 cx,
5651 )
5652 .unwrap();
5653 });
5654
5655 buffer_a.read_with(cx, |buffer, _| {
5656 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5657 assert_eq!(
5658 chunks
5659 .iter()
5660 .map(|(s, d)| (s.as_str(), *d))
5661 .collect::<Vec<_>>(),
5662 &[
5663 ("let ", None),
5664 ("a", Some(DiagnosticSeverity::ERROR)),
5665 (" = 1;", None),
5666 ]
5667 );
5668 });
5669 buffer_b.read_with(cx, |buffer, _| {
5670 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5671 assert_eq!(
5672 chunks
5673 .iter()
5674 .map(|(s, d)| (s.as_str(), *d))
5675 .collect::<Vec<_>>(),
5676 &[
5677 ("let ", None),
5678 ("b", Some(DiagnosticSeverity::WARNING)),
5679 (" = 2;", None),
5680 ]
5681 );
5682 });
5683 }
5684
5685 #[gpui::test]
5686 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5687 cx.foreground().forbid_parking();
5688
5689 let progress_token = "the-progress-token";
5690 let mut language = Language::new(
5691 LanguageConfig {
5692 name: "Rust".into(),
5693 path_suffixes: vec!["rs".to_string()],
5694 ..Default::default()
5695 },
5696 Some(tree_sitter_rust::language()),
5697 );
5698 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5699 disk_based_diagnostics_progress_token: Some(progress_token),
5700 disk_based_diagnostics_sources: &["disk"],
5701 ..Default::default()
5702 });
5703
5704 let fs = FakeFs::new(cx.background());
5705 fs.insert_tree(
5706 "/dir",
5707 json!({
5708 "a.rs": "fn a() { A }",
5709 "b.rs": "const y: i32 = 1",
5710 }),
5711 )
5712 .await;
5713
5714 let project = Project::test(fs, ["/dir"], cx).await;
5715 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5716 let worktree_id =
5717 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5718
5719 // Cause worktree to start the fake language server
5720 let _buffer = project
5721 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5722 .await
5723 .unwrap();
5724
5725 let mut events = subscribe(&project, cx);
5726
5727 let mut fake_server = fake_servers.next().await.unwrap();
5728 fake_server.start_progress(progress_token).await;
5729 assert_eq!(
5730 events.next().await.unwrap(),
5731 Event::DiskBasedDiagnosticsStarted
5732 );
5733
5734 fake_server.start_progress(progress_token).await;
5735 fake_server.end_progress(progress_token).await;
5736 fake_server.start_progress(progress_token).await;
5737
5738 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5739 lsp::PublishDiagnosticsParams {
5740 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5741 version: None,
5742 diagnostics: vec![lsp::Diagnostic {
5743 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5744 severity: Some(lsp::DiagnosticSeverity::ERROR),
5745 message: "undefined variable 'A'".to_string(),
5746 ..Default::default()
5747 }],
5748 },
5749 );
5750 assert_eq!(
5751 events.next().await.unwrap(),
5752 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5753 );
5754
5755 fake_server.end_progress(progress_token).await;
5756 fake_server.end_progress(progress_token).await;
5757 assert_eq!(
5758 events.next().await.unwrap(),
5759 Event::DiskBasedDiagnosticsUpdated
5760 );
5761 assert_eq!(
5762 events.next().await.unwrap(),
5763 Event::DiskBasedDiagnosticsFinished
5764 );
5765
5766 let buffer = project
5767 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5768 .await
5769 .unwrap();
5770
5771 buffer.read_with(cx, |buffer, _| {
5772 let snapshot = buffer.snapshot();
5773 let diagnostics = snapshot
5774 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5775 .collect::<Vec<_>>();
5776 assert_eq!(
5777 diagnostics,
5778 &[DiagnosticEntry {
5779 range: Point::new(0, 9)..Point::new(0, 10),
5780 diagnostic: Diagnostic {
5781 severity: lsp::DiagnosticSeverity::ERROR,
5782 message: "undefined variable 'A'".to_string(),
5783 group_id: 0,
5784 is_primary: true,
5785 ..Default::default()
5786 }
5787 }]
5788 )
5789 });
5790
5791 // Ensure publishing empty diagnostics twice only results in one update event.
5792 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5793 lsp::PublishDiagnosticsParams {
5794 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5795 version: None,
5796 diagnostics: Default::default(),
5797 },
5798 );
5799 assert_eq!(
5800 events.next().await.unwrap(),
5801 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5802 );
5803
5804 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5805 lsp::PublishDiagnosticsParams {
5806 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5807 version: None,
5808 diagnostics: Default::default(),
5809 },
5810 );
5811 cx.foreground().run_until_parked();
5812 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5813 }
5814
5815 #[gpui::test]
5816 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5817 cx.foreground().forbid_parking();
5818
5819 let progress_token = "the-progress-token";
5820 let mut language = Language::new(
5821 LanguageConfig {
5822 path_suffixes: vec!["rs".to_string()],
5823 ..Default::default()
5824 },
5825 None,
5826 );
5827 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5828 disk_based_diagnostics_sources: &["disk"],
5829 disk_based_diagnostics_progress_token: Some(progress_token),
5830 ..Default::default()
5831 });
5832
5833 let fs = FakeFs::new(cx.background());
5834 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5835
5836 let project = Project::test(fs, ["/dir"], cx).await;
5837 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5838
5839 let buffer = project
5840 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5841 .await
5842 .unwrap();
5843
5844 // Simulate diagnostics starting to update.
5845 let mut fake_server = fake_servers.next().await.unwrap();
5846 fake_server.start_progress(progress_token).await;
5847
5848 // Restart the server before the diagnostics finish updating.
5849 project.update(cx, |project, cx| {
5850 project.restart_language_servers_for_buffers([buffer], cx);
5851 });
5852 let mut events = subscribe(&project, cx);
5853
5854 // Simulate the newly started server sending more diagnostics.
5855 let mut fake_server = fake_servers.next().await.unwrap();
5856 fake_server.start_progress(progress_token).await;
5857 assert_eq!(
5858 events.next().await.unwrap(),
5859 Event::DiskBasedDiagnosticsStarted
5860 );
5861
5862 // All diagnostics are considered done, despite the old server's diagnostic
5863 // task never completing.
5864 fake_server.end_progress(progress_token).await;
5865 assert_eq!(
5866 events.next().await.unwrap(),
5867 Event::DiskBasedDiagnosticsUpdated
5868 );
5869 assert_eq!(
5870 events.next().await.unwrap(),
5871 Event::DiskBasedDiagnosticsFinished
5872 );
5873 project.read_with(cx, |project, _| {
5874 assert!(!project.is_running_disk_based_diagnostics());
5875 });
5876 }
5877
5878 #[gpui::test]
5879 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5880 cx.foreground().forbid_parking();
5881
5882 let mut language = Language::new(
5883 LanguageConfig {
5884 name: "Rust".into(),
5885 path_suffixes: vec!["rs".to_string()],
5886 ..Default::default()
5887 },
5888 Some(tree_sitter_rust::language()),
5889 );
5890 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5891 disk_based_diagnostics_sources: &["disk"],
5892 ..Default::default()
5893 });
5894
5895 let text = "
5896 fn a() { A }
5897 fn b() { BB }
5898 fn c() { CCC }
5899 "
5900 .unindent();
5901
5902 let fs = FakeFs::new(cx.background());
5903 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5904
5905 let project = Project::test(fs, ["/dir"], cx).await;
5906 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5907
5908 let buffer = project
5909 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5910 .await
5911 .unwrap();
5912
5913 let mut fake_server = fake_servers.next().await.unwrap();
5914 let open_notification = fake_server
5915 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5916 .await;
5917
5918 // Edit the buffer, moving the content down
5919 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
5920 let change_notification_1 = fake_server
5921 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5922 .await;
5923 assert!(
5924 change_notification_1.text_document.version > open_notification.text_document.version
5925 );
5926
5927 // Report some diagnostics for the initial version of the buffer
5928 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5929 lsp::PublishDiagnosticsParams {
5930 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5931 version: Some(open_notification.text_document.version),
5932 diagnostics: vec![
5933 lsp::Diagnostic {
5934 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5935 severity: Some(DiagnosticSeverity::ERROR),
5936 message: "undefined variable 'A'".to_string(),
5937 source: Some("disk".to_string()),
5938 ..Default::default()
5939 },
5940 lsp::Diagnostic {
5941 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5942 severity: Some(DiagnosticSeverity::ERROR),
5943 message: "undefined variable 'BB'".to_string(),
5944 source: Some("disk".to_string()),
5945 ..Default::default()
5946 },
5947 lsp::Diagnostic {
5948 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5949 severity: Some(DiagnosticSeverity::ERROR),
5950 source: Some("disk".to_string()),
5951 message: "undefined variable 'CCC'".to_string(),
5952 ..Default::default()
5953 },
5954 ],
5955 },
5956 );
5957
5958 // The diagnostics have moved down since they were created.
5959 buffer.next_notification(cx).await;
5960 buffer.read_with(cx, |buffer, _| {
5961 assert_eq!(
5962 buffer
5963 .snapshot()
5964 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5965 .collect::<Vec<_>>(),
5966 &[
5967 DiagnosticEntry {
5968 range: Point::new(3, 9)..Point::new(3, 11),
5969 diagnostic: Diagnostic {
5970 severity: DiagnosticSeverity::ERROR,
5971 message: "undefined variable 'BB'".to_string(),
5972 is_disk_based: true,
5973 group_id: 1,
5974 is_primary: true,
5975 ..Default::default()
5976 },
5977 },
5978 DiagnosticEntry {
5979 range: Point::new(4, 9)..Point::new(4, 12),
5980 diagnostic: Diagnostic {
5981 severity: DiagnosticSeverity::ERROR,
5982 message: "undefined variable 'CCC'".to_string(),
5983 is_disk_based: true,
5984 group_id: 2,
5985 is_primary: true,
5986 ..Default::default()
5987 }
5988 }
5989 ]
5990 );
5991 assert_eq!(
5992 chunks_with_diagnostics(buffer, 0..buffer.len()),
5993 [
5994 ("\n\nfn a() { ".to_string(), None),
5995 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5996 (" }\nfn b() { ".to_string(), None),
5997 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5998 (" }\nfn c() { ".to_string(), None),
5999 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6000 (" }\n".to_string(), None),
6001 ]
6002 );
6003 assert_eq!(
6004 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6005 [
6006 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6007 (" }\nfn c() { ".to_string(), None),
6008 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6009 ]
6010 );
6011 });
6012
6013 // Ensure overlapping diagnostics are highlighted correctly.
6014 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6015 lsp::PublishDiagnosticsParams {
6016 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6017 version: Some(open_notification.text_document.version),
6018 diagnostics: vec![
6019 lsp::Diagnostic {
6020 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6021 severity: Some(DiagnosticSeverity::ERROR),
6022 message: "undefined variable 'A'".to_string(),
6023 source: Some("disk".to_string()),
6024 ..Default::default()
6025 },
6026 lsp::Diagnostic {
6027 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6028 severity: Some(DiagnosticSeverity::WARNING),
6029 message: "unreachable statement".to_string(),
6030 source: Some("disk".to_string()),
6031 ..Default::default()
6032 },
6033 ],
6034 },
6035 );
6036
6037 buffer.next_notification(cx).await;
6038 buffer.read_with(cx, |buffer, _| {
6039 assert_eq!(
6040 buffer
6041 .snapshot()
6042 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6043 .collect::<Vec<_>>(),
6044 &[
6045 DiagnosticEntry {
6046 range: Point::new(2, 9)..Point::new(2, 12),
6047 diagnostic: Diagnostic {
6048 severity: DiagnosticSeverity::WARNING,
6049 message: "unreachable statement".to_string(),
6050 is_disk_based: true,
6051 group_id: 1,
6052 is_primary: true,
6053 ..Default::default()
6054 }
6055 },
6056 DiagnosticEntry {
6057 range: Point::new(2, 9)..Point::new(2, 10),
6058 diagnostic: Diagnostic {
6059 severity: DiagnosticSeverity::ERROR,
6060 message: "undefined variable 'A'".to_string(),
6061 is_disk_based: true,
6062 group_id: 0,
6063 is_primary: true,
6064 ..Default::default()
6065 },
6066 }
6067 ]
6068 );
6069 assert_eq!(
6070 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6071 [
6072 ("fn a() { ".to_string(), None),
6073 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6074 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6075 ("\n".to_string(), None),
6076 ]
6077 );
6078 assert_eq!(
6079 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6080 [
6081 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6082 ("\n".to_string(), None),
6083 ]
6084 );
6085 });
6086
6087 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6088 // changes since the last save.
6089 buffer.update(cx, |buffer, cx| {
6090 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6091 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6092 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6093 });
6094 let change_notification_2 = fake_server
6095 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6096 .await;
6097 assert!(
6098 change_notification_2.text_document.version
6099 > change_notification_1.text_document.version
6100 );
6101
6102 // Handle out-of-order diagnostics
6103 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6104 lsp::PublishDiagnosticsParams {
6105 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6106 version: Some(change_notification_2.text_document.version),
6107 diagnostics: vec![
6108 lsp::Diagnostic {
6109 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6110 severity: Some(DiagnosticSeverity::ERROR),
6111 message: "undefined variable 'BB'".to_string(),
6112 source: Some("disk".to_string()),
6113 ..Default::default()
6114 },
6115 lsp::Diagnostic {
6116 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6117 severity: Some(DiagnosticSeverity::WARNING),
6118 message: "undefined variable 'A'".to_string(),
6119 source: Some("disk".to_string()),
6120 ..Default::default()
6121 },
6122 ],
6123 },
6124 );
6125
6126 buffer.next_notification(cx).await;
6127 buffer.read_with(cx, |buffer, _| {
6128 assert_eq!(
6129 buffer
6130 .snapshot()
6131 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6132 .collect::<Vec<_>>(),
6133 &[
6134 DiagnosticEntry {
6135 range: Point::new(2, 21)..Point::new(2, 22),
6136 diagnostic: Diagnostic {
6137 severity: DiagnosticSeverity::WARNING,
6138 message: "undefined variable 'A'".to_string(),
6139 is_disk_based: true,
6140 group_id: 1,
6141 is_primary: true,
6142 ..Default::default()
6143 }
6144 },
6145 DiagnosticEntry {
6146 range: Point::new(3, 9)..Point::new(3, 14),
6147 diagnostic: Diagnostic {
6148 severity: DiagnosticSeverity::ERROR,
6149 message: "undefined variable 'BB'".to_string(),
6150 is_disk_based: true,
6151 group_id: 0,
6152 is_primary: true,
6153 ..Default::default()
6154 },
6155 }
6156 ]
6157 );
6158 });
6159 }
6160
6161 #[gpui::test]
6162 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6163 cx.foreground().forbid_parking();
6164
6165 let text = concat!(
6166 "let one = ;\n", //
6167 "let two = \n",
6168 "let three = 3;\n",
6169 );
6170
6171 let fs = FakeFs::new(cx.background());
6172 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6173
6174 let project = Project::test(fs, ["/dir"], cx).await;
6175 let buffer = project
6176 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6177 .await
6178 .unwrap();
6179
6180 project.update(cx, |project, cx| {
6181 project
6182 .update_buffer_diagnostics(
6183 &buffer,
6184 vec![
6185 DiagnosticEntry {
6186 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6187 diagnostic: Diagnostic {
6188 severity: DiagnosticSeverity::ERROR,
6189 message: "syntax error 1".to_string(),
6190 ..Default::default()
6191 },
6192 },
6193 DiagnosticEntry {
6194 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6195 diagnostic: Diagnostic {
6196 severity: DiagnosticSeverity::ERROR,
6197 message: "syntax error 2".to_string(),
6198 ..Default::default()
6199 },
6200 },
6201 ],
6202 None,
6203 cx,
6204 )
6205 .unwrap();
6206 });
6207
6208 // An empty range is extended forward to include the following character.
6209 // At the end of a line, an empty range is extended backward to include
6210 // the preceding character.
6211 buffer.read_with(cx, |buffer, _| {
6212 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6213 assert_eq!(
6214 chunks
6215 .iter()
6216 .map(|(s, d)| (s.as_str(), *d))
6217 .collect::<Vec<_>>(),
6218 &[
6219 ("let one = ", None),
6220 (";", Some(DiagnosticSeverity::ERROR)),
6221 ("\nlet two =", None),
6222 (" ", Some(DiagnosticSeverity::ERROR)),
6223 ("\nlet three = 3;\n", None)
6224 ]
6225 );
6226 });
6227 }
6228
6229 #[gpui::test]
6230 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6231 cx.foreground().forbid_parking();
6232
6233 let mut language = Language::new(
6234 LanguageConfig {
6235 name: "Rust".into(),
6236 path_suffixes: vec!["rs".to_string()],
6237 ..Default::default()
6238 },
6239 Some(tree_sitter_rust::language()),
6240 );
6241 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6242
6243 let text = "
6244 fn a() {
6245 f1();
6246 }
6247 fn b() {
6248 f2();
6249 }
6250 fn c() {
6251 f3();
6252 }
6253 "
6254 .unindent();
6255
6256 let fs = FakeFs::new(cx.background());
6257 fs.insert_tree(
6258 "/dir",
6259 json!({
6260 "a.rs": text.clone(),
6261 }),
6262 )
6263 .await;
6264
6265 let project = Project::test(fs, ["/dir"], cx).await;
6266 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6267 let buffer = project
6268 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6269 .await
6270 .unwrap();
6271
6272 let mut fake_server = fake_servers.next().await.unwrap();
6273 let lsp_document_version = fake_server
6274 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6275 .await
6276 .text_document
6277 .version;
6278
6279 // Simulate editing the buffer after the language server computes some edits.
6280 buffer.update(cx, |buffer, cx| {
6281 buffer.edit(
6282 [(
6283 Point::new(0, 0)..Point::new(0, 0),
6284 "// above first function\n",
6285 )],
6286 cx,
6287 );
6288 buffer.edit(
6289 [(
6290 Point::new(2, 0)..Point::new(2, 0),
6291 " // inside first function\n",
6292 )],
6293 cx,
6294 );
6295 buffer.edit(
6296 [(
6297 Point::new(6, 4)..Point::new(6, 4),
6298 "// inside second function ",
6299 )],
6300 cx,
6301 );
6302
6303 assert_eq!(
6304 buffer.text(),
6305 "
6306 // above first function
6307 fn a() {
6308 // inside first function
6309 f1();
6310 }
6311 fn b() {
6312 // inside second function f2();
6313 }
6314 fn c() {
6315 f3();
6316 }
6317 "
6318 .unindent()
6319 );
6320 });
6321
6322 let edits = project
6323 .update(cx, |project, cx| {
6324 project.edits_from_lsp(
6325 &buffer,
6326 vec![
6327 // replace body of first function
6328 lsp::TextEdit {
6329 range: lsp::Range::new(
6330 lsp::Position::new(0, 0),
6331 lsp::Position::new(3, 0),
6332 ),
6333 new_text: "
6334 fn a() {
6335 f10();
6336 }
6337 "
6338 .unindent(),
6339 },
6340 // edit inside second function
6341 lsp::TextEdit {
6342 range: lsp::Range::new(
6343 lsp::Position::new(4, 6),
6344 lsp::Position::new(4, 6),
6345 ),
6346 new_text: "00".into(),
6347 },
6348 // edit inside third function via two distinct edits
6349 lsp::TextEdit {
6350 range: lsp::Range::new(
6351 lsp::Position::new(7, 5),
6352 lsp::Position::new(7, 5),
6353 ),
6354 new_text: "4000".into(),
6355 },
6356 lsp::TextEdit {
6357 range: lsp::Range::new(
6358 lsp::Position::new(7, 5),
6359 lsp::Position::new(7, 6),
6360 ),
6361 new_text: "".into(),
6362 },
6363 ],
6364 Some(lsp_document_version),
6365 cx,
6366 )
6367 })
6368 .await
6369 .unwrap();
6370
6371 buffer.update(cx, |buffer, cx| {
6372 for (range, new_text) in edits {
6373 buffer.edit([(range, new_text)], cx);
6374 }
6375 assert_eq!(
6376 buffer.text(),
6377 "
6378 // above first function
6379 fn a() {
6380 // inside first function
6381 f10();
6382 }
6383 fn b() {
6384 // inside second function f200();
6385 }
6386 fn c() {
6387 f4000();
6388 }
6389 "
6390 .unindent()
6391 );
6392 });
6393 }
6394
6395 #[gpui::test]
6396 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6397 cx.foreground().forbid_parking();
6398
6399 let text = "
6400 use a::b;
6401 use a::c;
6402
6403 fn f() {
6404 b();
6405 c();
6406 }
6407 "
6408 .unindent();
6409
6410 let fs = FakeFs::new(cx.background());
6411 fs.insert_tree(
6412 "/dir",
6413 json!({
6414 "a.rs": text.clone(),
6415 }),
6416 )
6417 .await;
6418
6419 let project = Project::test(fs, ["/dir"], cx).await;
6420 let buffer = project
6421 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6422 .await
6423 .unwrap();
6424
6425 // Simulate the language server sending us a small edit in the form of a very large diff.
6426 // Rust-analyzer does this when performing a merge-imports code action.
6427 let edits = project
6428 .update(cx, |project, cx| {
6429 project.edits_from_lsp(
6430 &buffer,
6431 [
6432 // Replace the first use statement without editing the semicolon.
6433 lsp::TextEdit {
6434 range: lsp::Range::new(
6435 lsp::Position::new(0, 4),
6436 lsp::Position::new(0, 8),
6437 ),
6438 new_text: "a::{b, c}".into(),
6439 },
6440 // Reinsert the remainder of the file between the semicolon and the final
6441 // newline of the file.
6442 lsp::TextEdit {
6443 range: lsp::Range::new(
6444 lsp::Position::new(0, 9),
6445 lsp::Position::new(0, 9),
6446 ),
6447 new_text: "\n\n".into(),
6448 },
6449 lsp::TextEdit {
6450 range: lsp::Range::new(
6451 lsp::Position::new(0, 9),
6452 lsp::Position::new(0, 9),
6453 ),
6454 new_text: "
6455 fn f() {
6456 b();
6457 c();
6458 }"
6459 .unindent(),
6460 },
6461 // Delete everything after the first newline of the file.
6462 lsp::TextEdit {
6463 range: lsp::Range::new(
6464 lsp::Position::new(1, 0),
6465 lsp::Position::new(7, 0),
6466 ),
6467 new_text: "".into(),
6468 },
6469 ],
6470 None,
6471 cx,
6472 )
6473 })
6474 .await
6475 .unwrap();
6476
6477 buffer.update(cx, |buffer, cx| {
6478 let edits = edits
6479 .into_iter()
6480 .map(|(range, text)| {
6481 (
6482 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6483 text,
6484 )
6485 })
6486 .collect::<Vec<_>>();
6487
6488 assert_eq!(
6489 edits,
6490 [
6491 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6492 (Point::new(1, 0)..Point::new(2, 0), "".into())
6493 ]
6494 );
6495
6496 for (range, new_text) in edits {
6497 buffer.edit([(range, new_text)], cx);
6498 }
6499 assert_eq!(
6500 buffer.text(),
6501 "
6502 use a::{b, c};
6503
6504 fn f() {
6505 b();
6506 c();
6507 }
6508 "
6509 .unindent()
6510 );
6511 });
6512 }
6513
6514 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6515 buffer: &Buffer,
6516 range: Range<T>,
6517 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6518 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6519 for chunk in buffer.snapshot().chunks(range, true) {
6520 if chunks.last().map_or(false, |prev_chunk| {
6521 prev_chunk.1 == chunk.diagnostic_severity
6522 }) {
6523 chunks.last_mut().unwrap().0.push_str(chunk.text);
6524 } else {
6525 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6526 }
6527 }
6528 chunks
6529 }
6530
6531 #[gpui::test]
6532 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6533 let dir = temp_tree(json!({
6534 "root": {
6535 "dir1": {},
6536 "dir2": {
6537 "dir3": {}
6538 }
6539 }
6540 }));
6541
6542 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6543 let cancel_flag = Default::default();
6544 let results = project
6545 .read_with(cx, |project, cx| {
6546 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6547 })
6548 .await;
6549
6550 assert!(results.is_empty());
6551 }
6552
6553 #[gpui::test]
6554 async fn test_definition(cx: &mut gpui::TestAppContext) {
6555 let mut language = Language::new(
6556 LanguageConfig {
6557 name: "Rust".into(),
6558 path_suffixes: vec!["rs".to_string()],
6559 ..Default::default()
6560 },
6561 Some(tree_sitter_rust::language()),
6562 );
6563 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6564
6565 let fs = FakeFs::new(cx.background());
6566 fs.insert_tree(
6567 "/dir",
6568 json!({
6569 "a.rs": "const fn a() { A }",
6570 "b.rs": "const y: i32 = crate::a()",
6571 }),
6572 )
6573 .await;
6574
6575 let project = Project::test(fs, ["/dir/b.rs"], cx).await;
6576 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6577
6578 let buffer = project
6579 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6580 .await
6581 .unwrap();
6582
6583 let fake_server = fake_servers.next().await.unwrap();
6584 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6585 let params = params.text_document_position_params;
6586 assert_eq!(
6587 params.text_document.uri.to_file_path().unwrap(),
6588 Path::new("/dir/b.rs"),
6589 );
6590 assert_eq!(params.position, lsp::Position::new(0, 22));
6591
6592 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6593 lsp::Location::new(
6594 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6595 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6596 ),
6597 )))
6598 });
6599
6600 let mut definitions = project
6601 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6602 .await
6603 .unwrap();
6604
6605 assert_eq!(definitions.len(), 1);
6606 let definition = definitions.pop().unwrap();
6607 cx.update(|cx| {
6608 let target_buffer = definition.buffer.read(cx);
6609 assert_eq!(
6610 target_buffer
6611 .file()
6612 .unwrap()
6613 .as_local()
6614 .unwrap()
6615 .abs_path(cx),
6616 Path::new("/dir/a.rs"),
6617 );
6618 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6619 assert_eq!(
6620 list_worktrees(&project, cx),
6621 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6622 );
6623
6624 drop(definition);
6625 });
6626 cx.read(|cx| {
6627 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6628 });
6629
6630 fn list_worktrees<'a>(
6631 project: &'a ModelHandle<Project>,
6632 cx: &'a AppContext,
6633 ) -> Vec<(&'a Path, bool)> {
6634 project
6635 .read(cx)
6636 .worktrees(cx)
6637 .map(|worktree| {
6638 let worktree = worktree.read(cx);
6639 (
6640 worktree.as_local().unwrap().abs_path().as_ref(),
6641 worktree.is_visible(),
6642 )
6643 })
6644 .collect::<Vec<_>>()
6645 }
6646 }
6647
6648 #[gpui::test]
6649 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6650 let mut language = Language::new(
6651 LanguageConfig {
6652 name: "TypeScript".into(),
6653 path_suffixes: vec!["ts".to_string()],
6654 ..Default::default()
6655 },
6656 Some(tree_sitter_typescript::language_typescript()),
6657 );
6658 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6659
6660 let fs = FakeFs::new(cx.background());
6661 fs.insert_tree(
6662 "/dir",
6663 json!({
6664 "a.ts": "",
6665 }),
6666 )
6667 .await;
6668
6669 let project = Project::test(fs, ["/dir"], cx).await;
6670 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6671 let buffer = project
6672 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6673 .await
6674 .unwrap();
6675
6676 let fake_server = fake_language_servers.next().await.unwrap();
6677
6678 let text = "let a = b.fqn";
6679 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6680 let completions = project.update(cx, |project, cx| {
6681 project.completions(&buffer, text.len(), cx)
6682 });
6683
6684 fake_server
6685 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6686 Ok(Some(lsp::CompletionResponse::Array(vec![
6687 lsp::CompletionItem {
6688 label: "fullyQualifiedName?".into(),
6689 insert_text: Some("fullyQualifiedName".into()),
6690 ..Default::default()
6691 },
6692 ])))
6693 })
6694 .next()
6695 .await;
6696 let completions = completions.await.unwrap();
6697 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6698 assert_eq!(completions.len(), 1);
6699 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6700 assert_eq!(
6701 completions[0].old_range.to_offset(&snapshot),
6702 text.len() - 3..text.len()
6703 );
6704 }
6705
6706 #[gpui::test(iterations = 10)]
6707 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6708 let mut language = Language::new(
6709 LanguageConfig {
6710 name: "TypeScript".into(),
6711 path_suffixes: vec!["ts".to_string()],
6712 ..Default::default()
6713 },
6714 None,
6715 );
6716 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6717
6718 let fs = FakeFs::new(cx.background());
6719 fs.insert_tree(
6720 "/dir",
6721 json!({
6722 "a.ts": "a",
6723 }),
6724 )
6725 .await;
6726
6727 let project = Project::test(fs, ["/dir"], cx).await;
6728 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6729 let buffer = project
6730 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6731 .await
6732 .unwrap();
6733
6734 let fake_server = fake_language_servers.next().await.unwrap();
6735
6736 // Language server returns code actions that contain commands, and not edits.
6737 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6738 fake_server
6739 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6740 Ok(Some(vec![
6741 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6742 title: "The code action".into(),
6743 command: Some(lsp::Command {
6744 title: "The command".into(),
6745 command: "_the/command".into(),
6746 arguments: Some(vec![json!("the-argument")]),
6747 }),
6748 ..Default::default()
6749 }),
6750 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6751 title: "two".into(),
6752 ..Default::default()
6753 }),
6754 ]))
6755 })
6756 .next()
6757 .await;
6758
6759 let action = actions.await.unwrap()[0].clone();
6760 let apply = project.update(cx, |project, cx| {
6761 project.apply_code_action(buffer.clone(), action, true, cx)
6762 });
6763
6764 // Resolving the code action does not populate its edits. In absence of
6765 // edits, we must execute the given command.
6766 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6767 |action, _| async move { Ok(action) },
6768 );
6769
6770 // While executing the command, the language server sends the editor
6771 // a `workspaceEdit` request.
6772 fake_server
6773 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6774 let fake = fake_server.clone();
6775 move |params, _| {
6776 assert_eq!(params.command, "_the/command");
6777 let fake = fake.clone();
6778 async move {
6779 fake.server
6780 .request::<lsp::request::ApplyWorkspaceEdit>(
6781 lsp::ApplyWorkspaceEditParams {
6782 label: None,
6783 edit: lsp::WorkspaceEdit {
6784 changes: Some(
6785 [(
6786 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6787 vec![lsp::TextEdit {
6788 range: lsp::Range::new(
6789 lsp::Position::new(0, 0),
6790 lsp::Position::new(0, 0),
6791 ),
6792 new_text: "X".into(),
6793 }],
6794 )]
6795 .into_iter()
6796 .collect(),
6797 ),
6798 ..Default::default()
6799 },
6800 },
6801 )
6802 .await
6803 .unwrap();
6804 Ok(Some(json!(null)))
6805 }
6806 }
6807 })
6808 .next()
6809 .await;
6810
6811 // Applying the code action returns a project transaction containing the edits
6812 // sent by the language server in its `workspaceEdit` request.
6813 let transaction = apply.await.unwrap();
6814 assert!(transaction.0.contains_key(&buffer));
6815 buffer.update(cx, |buffer, cx| {
6816 assert_eq!(buffer.text(), "Xa");
6817 buffer.undo(cx);
6818 assert_eq!(buffer.text(), "a");
6819 });
6820 }
6821
6822 #[gpui::test]
6823 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6824 let fs = FakeFs::new(cx.background());
6825 fs.insert_tree(
6826 "/dir",
6827 json!({
6828 "file1": "the old contents",
6829 }),
6830 )
6831 .await;
6832
6833 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6834 let buffer = project
6835 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6836 .await
6837 .unwrap();
6838 buffer
6839 .update(cx, |buffer, cx| {
6840 assert_eq!(buffer.text(), "the old contents");
6841 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6842 buffer.save(cx)
6843 })
6844 .await
6845 .unwrap();
6846
6847 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6848 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6849 }
6850
6851 #[gpui::test]
6852 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6853 let fs = FakeFs::new(cx.background());
6854 fs.insert_tree(
6855 "/dir",
6856 json!({
6857 "file1": "the old contents",
6858 }),
6859 )
6860 .await;
6861
6862 let project = Project::test(fs.clone(), ["/dir/file1"], cx).await;
6863 let buffer = project
6864 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6865 .await
6866 .unwrap();
6867 buffer
6868 .update(cx, |buffer, cx| {
6869 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6870 buffer.save(cx)
6871 })
6872 .await
6873 .unwrap();
6874
6875 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6876 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6877 }
6878
6879 #[gpui::test]
6880 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6881 let fs = FakeFs::new(cx.background());
6882 fs.insert_tree("/dir", json!({})).await;
6883
6884 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6885 let buffer = project.update(cx, |project, cx| {
6886 project.create_buffer("", None, cx).unwrap()
6887 });
6888 buffer.update(cx, |buffer, cx| {
6889 buffer.edit([(0..0, "abc")], cx);
6890 assert!(buffer.is_dirty());
6891 assert!(!buffer.has_conflict());
6892 });
6893 project
6894 .update(cx, |project, cx| {
6895 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6896 })
6897 .await
6898 .unwrap();
6899 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6900 buffer.read_with(cx, |buffer, cx| {
6901 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6902 assert!(!buffer.is_dirty());
6903 assert!(!buffer.has_conflict());
6904 });
6905
6906 let opened_buffer = project
6907 .update(cx, |project, cx| {
6908 project.open_local_buffer("/dir/file1", cx)
6909 })
6910 .await
6911 .unwrap();
6912 assert_eq!(opened_buffer, buffer);
6913 }
6914
6915 #[gpui::test(retries = 5)]
6916 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6917 let dir = temp_tree(json!({
6918 "a": {
6919 "file1": "",
6920 "file2": "",
6921 "file3": "",
6922 },
6923 "b": {
6924 "c": {
6925 "file4": "",
6926 "file5": "",
6927 }
6928 }
6929 }));
6930
6931 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6932 let rpc = project.read_with(cx, |p, _| p.client.clone());
6933
6934 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6935 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
6936 async move { buffer.await.unwrap() }
6937 };
6938 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6939 project.read_with(cx, |project, cx| {
6940 let tree = project.worktrees(cx).next().unwrap();
6941 tree.read(cx)
6942 .entry_for_path(path)
6943 .expect(&format!("no entry for path {}", path))
6944 .id
6945 })
6946 };
6947
6948 let buffer2 = buffer_for_path("a/file2", cx).await;
6949 let buffer3 = buffer_for_path("a/file3", cx).await;
6950 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6951 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6952
6953 let file2_id = id_for_path("a/file2", &cx);
6954 let file3_id = id_for_path("a/file3", &cx);
6955 let file4_id = id_for_path("b/c/file4", &cx);
6956
6957 // Create a remote copy of this worktree.
6958 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
6959 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6960 let (remote, load_task) = cx.update(|cx| {
6961 Worktree::remote(
6962 1,
6963 1,
6964 initial_snapshot.to_proto(&Default::default(), true),
6965 rpc.clone(),
6966 cx,
6967 )
6968 });
6969 // tree
6970 load_task.await;
6971
6972 cx.read(|cx| {
6973 assert!(!buffer2.read(cx).is_dirty());
6974 assert!(!buffer3.read(cx).is_dirty());
6975 assert!(!buffer4.read(cx).is_dirty());
6976 assert!(!buffer5.read(cx).is_dirty());
6977 });
6978
6979 // Rename and delete files and directories.
6980 tree.flush_fs_events(&cx).await;
6981 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6982 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6983 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6984 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6985 tree.flush_fs_events(&cx).await;
6986
6987 let expected_paths = vec![
6988 "a",
6989 "a/file1",
6990 "a/file2.new",
6991 "b",
6992 "d",
6993 "d/file3",
6994 "d/file4",
6995 ];
6996
6997 cx.read(|app| {
6998 assert_eq!(
6999 tree.read(app)
7000 .paths()
7001 .map(|p| p.to_str().unwrap())
7002 .collect::<Vec<_>>(),
7003 expected_paths
7004 );
7005
7006 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7007 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7008 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7009
7010 assert_eq!(
7011 buffer2.read(app).file().unwrap().path().as_ref(),
7012 Path::new("a/file2.new")
7013 );
7014 assert_eq!(
7015 buffer3.read(app).file().unwrap().path().as_ref(),
7016 Path::new("d/file3")
7017 );
7018 assert_eq!(
7019 buffer4.read(app).file().unwrap().path().as_ref(),
7020 Path::new("d/file4")
7021 );
7022 assert_eq!(
7023 buffer5.read(app).file().unwrap().path().as_ref(),
7024 Path::new("b/c/file5")
7025 );
7026
7027 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7028 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7029 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7030 assert!(buffer5.read(app).file().unwrap().is_deleted());
7031 });
7032
7033 // Update the remote worktree. Check that it becomes consistent with the
7034 // local worktree.
7035 remote.update(cx, |remote, cx| {
7036 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7037 &initial_snapshot,
7038 1,
7039 1,
7040 true,
7041 );
7042 remote
7043 .as_remote_mut()
7044 .unwrap()
7045 .snapshot
7046 .apply_remote_update(update_message)
7047 .unwrap();
7048
7049 assert_eq!(
7050 remote
7051 .paths()
7052 .map(|p| p.to_str().unwrap())
7053 .collect::<Vec<_>>(),
7054 expected_paths
7055 );
7056 });
7057 }
7058
7059 #[gpui::test]
7060 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7061 let fs = FakeFs::new(cx.background());
7062 fs.insert_tree(
7063 "/dir",
7064 json!({
7065 "a.txt": "a-contents",
7066 "b.txt": "b-contents",
7067 }),
7068 )
7069 .await;
7070
7071 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7072
7073 // Spawn multiple tasks to open paths, repeating some paths.
7074 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7075 (
7076 p.open_local_buffer("/dir/a.txt", cx),
7077 p.open_local_buffer("/dir/b.txt", cx),
7078 p.open_local_buffer("/dir/a.txt", cx),
7079 )
7080 });
7081
7082 let buffer_a_1 = buffer_a_1.await.unwrap();
7083 let buffer_a_2 = buffer_a_2.await.unwrap();
7084 let buffer_b = buffer_b.await.unwrap();
7085 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7086 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7087
7088 // There is only one buffer per path.
7089 let buffer_a_id = buffer_a_1.id();
7090 assert_eq!(buffer_a_2.id(), buffer_a_id);
7091
7092 // Open the same path again while it is still open.
7093 drop(buffer_a_1);
7094 let buffer_a_3 = project
7095 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7096 .await
7097 .unwrap();
7098
7099 // There's still only one buffer per path.
7100 assert_eq!(buffer_a_3.id(), buffer_a_id);
7101 }
7102
7103 #[gpui::test]
7104 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7105 let fs = FakeFs::new(cx.background());
7106 fs.insert_tree(
7107 "/dir",
7108 json!({
7109 "file1": "abc",
7110 "file2": "def",
7111 "file3": "ghi",
7112 }),
7113 )
7114 .await;
7115
7116 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7117
7118 let buffer1 = project
7119 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7120 .await
7121 .unwrap();
7122 let events = Rc::new(RefCell::new(Vec::new()));
7123
7124 // initially, the buffer isn't dirty.
7125 buffer1.update(cx, |buffer, cx| {
7126 cx.subscribe(&buffer1, {
7127 let events = events.clone();
7128 move |_, _, event, _| match event {
7129 BufferEvent::Operation(_) => {}
7130 _ => events.borrow_mut().push(event.clone()),
7131 }
7132 })
7133 .detach();
7134
7135 assert!(!buffer.is_dirty());
7136 assert!(events.borrow().is_empty());
7137
7138 buffer.edit([(1..2, "")], cx);
7139 });
7140
7141 // after the first edit, the buffer is dirty, and emits a dirtied event.
7142 buffer1.update(cx, |buffer, cx| {
7143 assert!(buffer.text() == "ac");
7144 assert!(buffer.is_dirty());
7145 assert_eq!(
7146 *events.borrow(),
7147 &[language::Event::Edited, language::Event::Dirtied]
7148 );
7149 events.borrow_mut().clear();
7150 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7151 });
7152
7153 // after saving, the buffer is not dirty, and emits a saved event.
7154 buffer1.update(cx, |buffer, cx| {
7155 assert!(!buffer.is_dirty());
7156 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7157 events.borrow_mut().clear();
7158
7159 buffer.edit([(1..1, "B")], cx);
7160 buffer.edit([(2..2, "D")], cx);
7161 });
7162
7163 // after editing again, the buffer is dirty, and emits another dirty event.
7164 buffer1.update(cx, |buffer, cx| {
7165 assert!(buffer.text() == "aBDc");
7166 assert!(buffer.is_dirty());
7167 assert_eq!(
7168 *events.borrow(),
7169 &[
7170 language::Event::Edited,
7171 language::Event::Dirtied,
7172 language::Event::Edited,
7173 ],
7174 );
7175 events.borrow_mut().clear();
7176
7177 // TODO - currently, after restoring the buffer to its
7178 // previously-saved state, the is still considered dirty.
7179 buffer.edit([(1..3, "")], cx);
7180 assert!(buffer.text() == "ac");
7181 assert!(buffer.is_dirty());
7182 });
7183
7184 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7185
7186 // When a file is deleted, the buffer is considered dirty.
7187 let events = Rc::new(RefCell::new(Vec::new()));
7188 let buffer2 = project
7189 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7190 .await
7191 .unwrap();
7192 buffer2.update(cx, |_, cx| {
7193 cx.subscribe(&buffer2, {
7194 let events = events.clone();
7195 move |_, _, event, _| events.borrow_mut().push(event.clone())
7196 })
7197 .detach();
7198 });
7199
7200 fs.remove_file("/dir/file2".as_ref(), Default::default())
7201 .await
7202 .unwrap();
7203 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7204 assert_eq!(
7205 *events.borrow(),
7206 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7207 );
7208
7209 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7210 let events = Rc::new(RefCell::new(Vec::new()));
7211 let buffer3 = project
7212 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7213 .await
7214 .unwrap();
7215 buffer3.update(cx, |_, cx| {
7216 cx.subscribe(&buffer3, {
7217 let events = events.clone();
7218 move |_, _, event, _| events.borrow_mut().push(event.clone())
7219 })
7220 .detach();
7221 });
7222
7223 buffer3.update(cx, |buffer, cx| {
7224 buffer.edit([(0..0, "x")], cx);
7225 });
7226 events.borrow_mut().clear();
7227 fs.remove_file("/dir/file3".as_ref(), Default::default())
7228 .await
7229 .unwrap();
7230 buffer3
7231 .condition(&cx, |_, _| !events.borrow().is_empty())
7232 .await;
7233 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7234 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7235 }
7236
7237 #[gpui::test]
7238 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7239 let initial_contents = "aaa\nbbbbb\nc\n";
7240 let fs = FakeFs::new(cx.background());
7241 fs.insert_tree(
7242 "/dir",
7243 json!({
7244 "the-file": initial_contents,
7245 }),
7246 )
7247 .await;
7248 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7249 let buffer = project
7250 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7251 .await
7252 .unwrap();
7253
7254 let anchors = (0..3)
7255 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7256 .collect::<Vec<_>>();
7257
7258 // Change the file on disk, adding two new lines of text, and removing
7259 // one line.
7260 buffer.read_with(cx, |buffer, _| {
7261 assert!(!buffer.is_dirty());
7262 assert!(!buffer.has_conflict());
7263 });
7264 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7265 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7266 .await
7267 .unwrap();
7268
7269 // Because the buffer was not modified, it is reloaded from disk. Its
7270 // contents are edited according to the diff between the old and new
7271 // file contents.
7272 buffer
7273 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7274 .await;
7275
7276 buffer.update(cx, |buffer, _| {
7277 assert_eq!(buffer.text(), new_contents);
7278 assert!(!buffer.is_dirty());
7279 assert!(!buffer.has_conflict());
7280
7281 let anchor_positions = anchors
7282 .iter()
7283 .map(|anchor| anchor.to_point(&*buffer))
7284 .collect::<Vec<_>>();
7285 assert_eq!(
7286 anchor_positions,
7287 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7288 );
7289 });
7290
7291 // Modify the buffer
7292 buffer.update(cx, |buffer, cx| {
7293 buffer.edit([(0..0, " ")], cx);
7294 assert!(buffer.is_dirty());
7295 assert!(!buffer.has_conflict());
7296 });
7297
7298 // Change the file on disk again, adding blank lines to the beginning.
7299 fs.save(
7300 "/dir/the-file".as_ref(),
7301 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7302 )
7303 .await
7304 .unwrap();
7305
7306 // Because the buffer is modified, it doesn't reload from disk, but is
7307 // marked as having a conflict.
7308 buffer
7309 .condition(&cx, |buffer, _| buffer.has_conflict())
7310 .await;
7311 }
7312
7313 #[gpui::test]
7314 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7315 cx.foreground().forbid_parking();
7316
7317 let fs = FakeFs::new(cx.background());
7318 fs.insert_tree(
7319 "/the-dir",
7320 json!({
7321 "a.rs": "
7322 fn foo(mut v: Vec<usize>) {
7323 for x in &v {
7324 v.push(1);
7325 }
7326 }
7327 "
7328 .unindent(),
7329 }),
7330 )
7331 .await;
7332
7333 let project = Project::test(fs.clone(), ["/the-dir"], cx).await;
7334 let buffer = project
7335 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7336 .await
7337 .unwrap();
7338
7339 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7340 let message = lsp::PublishDiagnosticsParams {
7341 uri: buffer_uri.clone(),
7342 diagnostics: vec![
7343 lsp::Diagnostic {
7344 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7345 severity: Some(DiagnosticSeverity::WARNING),
7346 message: "error 1".to_string(),
7347 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7348 location: lsp::Location {
7349 uri: buffer_uri.clone(),
7350 range: lsp::Range::new(
7351 lsp::Position::new(1, 8),
7352 lsp::Position::new(1, 9),
7353 ),
7354 },
7355 message: "error 1 hint 1".to_string(),
7356 }]),
7357 ..Default::default()
7358 },
7359 lsp::Diagnostic {
7360 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7361 severity: Some(DiagnosticSeverity::HINT),
7362 message: "error 1 hint 1".to_string(),
7363 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7364 location: lsp::Location {
7365 uri: buffer_uri.clone(),
7366 range: lsp::Range::new(
7367 lsp::Position::new(1, 8),
7368 lsp::Position::new(1, 9),
7369 ),
7370 },
7371 message: "original diagnostic".to_string(),
7372 }]),
7373 ..Default::default()
7374 },
7375 lsp::Diagnostic {
7376 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7377 severity: Some(DiagnosticSeverity::ERROR),
7378 message: "error 2".to_string(),
7379 related_information: Some(vec![
7380 lsp::DiagnosticRelatedInformation {
7381 location: lsp::Location {
7382 uri: buffer_uri.clone(),
7383 range: lsp::Range::new(
7384 lsp::Position::new(1, 13),
7385 lsp::Position::new(1, 15),
7386 ),
7387 },
7388 message: "error 2 hint 1".to_string(),
7389 },
7390 lsp::DiagnosticRelatedInformation {
7391 location: lsp::Location {
7392 uri: buffer_uri.clone(),
7393 range: lsp::Range::new(
7394 lsp::Position::new(1, 13),
7395 lsp::Position::new(1, 15),
7396 ),
7397 },
7398 message: "error 2 hint 2".to_string(),
7399 },
7400 ]),
7401 ..Default::default()
7402 },
7403 lsp::Diagnostic {
7404 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7405 severity: Some(DiagnosticSeverity::HINT),
7406 message: "error 2 hint 1".to_string(),
7407 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7408 location: lsp::Location {
7409 uri: buffer_uri.clone(),
7410 range: lsp::Range::new(
7411 lsp::Position::new(2, 8),
7412 lsp::Position::new(2, 17),
7413 ),
7414 },
7415 message: "original diagnostic".to_string(),
7416 }]),
7417 ..Default::default()
7418 },
7419 lsp::Diagnostic {
7420 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7421 severity: Some(DiagnosticSeverity::HINT),
7422 message: "error 2 hint 2".to_string(),
7423 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7424 location: lsp::Location {
7425 uri: buffer_uri.clone(),
7426 range: lsp::Range::new(
7427 lsp::Position::new(2, 8),
7428 lsp::Position::new(2, 17),
7429 ),
7430 },
7431 message: "original diagnostic".to_string(),
7432 }]),
7433 ..Default::default()
7434 },
7435 ],
7436 version: None,
7437 };
7438
7439 project
7440 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7441 .unwrap();
7442 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7443
7444 assert_eq!(
7445 buffer
7446 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7447 .collect::<Vec<_>>(),
7448 &[
7449 DiagnosticEntry {
7450 range: Point::new(1, 8)..Point::new(1, 9),
7451 diagnostic: Diagnostic {
7452 severity: DiagnosticSeverity::WARNING,
7453 message: "error 1".to_string(),
7454 group_id: 0,
7455 is_primary: true,
7456 ..Default::default()
7457 }
7458 },
7459 DiagnosticEntry {
7460 range: Point::new(1, 8)..Point::new(1, 9),
7461 diagnostic: Diagnostic {
7462 severity: DiagnosticSeverity::HINT,
7463 message: "error 1 hint 1".to_string(),
7464 group_id: 0,
7465 is_primary: false,
7466 ..Default::default()
7467 }
7468 },
7469 DiagnosticEntry {
7470 range: Point::new(1, 13)..Point::new(1, 15),
7471 diagnostic: Diagnostic {
7472 severity: DiagnosticSeverity::HINT,
7473 message: "error 2 hint 1".to_string(),
7474 group_id: 1,
7475 is_primary: false,
7476 ..Default::default()
7477 }
7478 },
7479 DiagnosticEntry {
7480 range: Point::new(1, 13)..Point::new(1, 15),
7481 diagnostic: Diagnostic {
7482 severity: DiagnosticSeverity::HINT,
7483 message: "error 2 hint 2".to_string(),
7484 group_id: 1,
7485 is_primary: false,
7486 ..Default::default()
7487 }
7488 },
7489 DiagnosticEntry {
7490 range: Point::new(2, 8)..Point::new(2, 17),
7491 diagnostic: Diagnostic {
7492 severity: DiagnosticSeverity::ERROR,
7493 message: "error 2".to_string(),
7494 group_id: 1,
7495 is_primary: true,
7496 ..Default::default()
7497 }
7498 }
7499 ]
7500 );
7501
7502 assert_eq!(
7503 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7504 &[
7505 DiagnosticEntry {
7506 range: Point::new(1, 8)..Point::new(1, 9),
7507 diagnostic: Diagnostic {
7508 severity: DiagnosticSeverity::WARNING,
7509 message: "error 1".to_string(),
7510 group_id: 0,
7511 is_primary: true,
7512 ..Default::default()
7513 }
7514 },
7515 DiagnosticEntry {
7516 range: Point::new(1, 8)..Point::new(1, 9),
7517 diagnostic: Diagnostic {
7518 severity: DiagnosticSeverity::HINT,
7519 message: "error 1 hint 1".to_string(),
7520 group_id: 0,
7521 is_primary: false,
7522 ..Default::default()
7523 }
7524 },
7525 ]
7526 );
7527 assert_eq!(
7528 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7529 &[
7530 DiagnosticEntry {
7531 range: Point::new(1, 13)..Point::new(1, 15),
7532 diagnostic: Diagnostic {
7533 severity: DiagnosticSeverity::HINT,
7534 message: "error 2 hint 1".to_string(),
7535 group_id: 1,
7536 is_primary: false,
7537 ..Default::default()
7538 }
7539 },
7540 DiagnosticEntry {
7541 range: Point::new(1, 13)..Point::new(1, 15),
7542 diagnostic: Diagnostic {
7543 severity: DiagnosticSeverity::HINT,
7544 message: "error 2 hint 2".to_string(),
7545 group_id: 1,
7546 is_primary: false,
7547 ..Default::default()
7548 }
7549 },
7550 DiagnosticEntry {
7551 range: Point::new(2, 8)..Point::new(2, 17),
7552 diagnostic: Diagnostic {
7553 severity: DiagnosticSeverity::ERROR,
7554 message: "error 2".to_string(),
7555 group_id: 1,
7556 is_primary: true,
7557 ..Default::default()
7558 }
7559 }
7560 ]
7561 );
7562 }
7563
7564 #[gpui::test]
7565 async fn test_rename(cx: &mut gpui::TestAppContext) {
7566 cx.foreground().forbid_parking();
7567
7568 let mut language = Language::new(
7569 LanguageConfig {
7570 name: "Rust".into(),
7571 path_suffixes: vec!["rs".to_string()],
7572 ..Default::default()
7573 },
7574 Some(tree_sitter_rust::language()),
7575 );
7576 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7577 capabilities: lsp::ServerCapabilities {
7578 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7579 prepare_provider: Some(true),
7580 work_done_progress_options: Default::default(),
7581 })),
7582 ..Default::default()
7583 },
7584 ..Default::default()
7585 });
7586
7587 let fs = FakeFs::new(cx.background());
7588 fs.insert_tree(
7589 "/dir",
7590 json!({
7591 "one.rs": "const ONE: usize = 1;",
7592 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7593 }),
7594 )
7595 .await;
7596
7597 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7598 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7599 let buffer = project
7600 .update(cx, |project, cx| {
7601 project.open_local_buffer("/dir/one.rs", cx)
7602 })
7603 .await
7604 .unwrap();
7605
7606 let fake_server = fake_servers.next().await.unwrap();
7607
7608 let response = project.update(cx, |project, cx| {
7609 project.prepare_rename(buffer.clone(), 7, cx)
7610 });
7611 fake_server
7612 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7613 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7614 assert_eq!(params.position, lsp::Position::new(0, 7));
7615 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7616 lsp::Position::new(0, 6),
7617 lsp::Position::new(0, 9),
7618 ))))
7619 })
7620 .next()
7621 .await
7622 .unwrap();
7623 let range = response.await.unwrap().unwrap();
7624 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7625 assert_eq!(range, 6..9);
7626
7627 let response = project.update(cx, |project, cx| {
7628 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7629 });
7630 fake_server
7631 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7632 assert_eq!(
7633 params.text_document_position.text_document.uri.as_str(),
7634 "file:///dir/one.rs"
7635 );
7636 assert_eq!(
7637 params.text_document_position.position,
7638 lsp::Position::new(0, 7)
7639 );
7640 assert_eq!(params.new_name, "THREE");
7641 Ok(Some(lsp::WorkspaceEdit {
7642 changes: Some(
7643 [
7644 (
7645 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7646 vec![lsp::TextEdit::new(
7647 lsp::Range::new(
7648 lsp::Position::new(0, 6),
7649 lsp::Position::new(0, 9),
7650 ),
7651 "THREE".to_string(),
7652 )],
7653 ),
7654 (
7655 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7656 vec![
7657 lsp::TextEdit::new(
7658 lsp::Range::new(
7659 lsp::Position::new(0, 24),
7660 lsp::Position::new(0, 27),
7661 ),
7662 "THREE".to_string(),
7663 ),
7664 lsp::TextEdit::new(
7665 lsp::Range::new(
7666 lsp::Position::new(0, 35),
7667 lsp::Position::new(0, 38),
7668 ),
7669 "THREE".to_string(),
7670 ),
7671 ],
7672 ),
7673 ]
7674 .into_iter()
7675 .collect(),
7676 ),
7677 ..Default::default()
7678 }))
7679 })
7680 .next()
7681 .await
7682 .unwrap();
7683 let mut transaction = response.await.unwrap().0;
7684 assert_eq!(transaction.len(), 2);
7685 assert_eq!(
7686 transaction
7687 .remove_entry(&buffer)
7688 .unwrap()
7689 .0
7690 .read_with(cx, |buffer, _| buffer.text()),
7691 "const THREE: usize = 1;"
7692 );
7693 assert_eq!(
7694 transaction
7695 .into_keys()
7696 .next()
7697 .unwrap()
7698 .read_with(cx, |buffer, _| buffer.text()),
7699 "const TWO: usize = one::THREE + one::THREE;"
7700 );
7701 }
7702
7703 #[gpui::test]
7704 async fn test_search(cx: &mut gpui::TestAppContext) {
7705 let fs = FakeFs::new(cx.background());
7706 fs.insert_tree(
7707 "/dir",
7708 json!({
7709 "one.rs": "const ONE: usize = 1;",
7710 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7711 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7712 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7713 }),
7714 )
7715 .await;
7716 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7717 assert_eq!(
7718 search(&project, SearchQuery::text("TWO", false, true), cx)
7719 .await
7720 .unwrap(),
7721 HashMap::from_iter([
7722 ("two.rs".to_string(), vec![6..9]),
7723 ("three.rs".to_string(), vec![37..40])
7724 ])
7725 );
7726
7727 let buffer_4 = project
7728 .update(cx, |project, cx| {
7729 project.open_local_buffer("/dir/four.rs", cx)
7730 })
7731 .await
7732 .unwrap();
7733 buffer_4.update(cx, |buffer, cx| {
7734 let text = "two::TWO";
7735 buffer.edit([(20..28, text), (31..43, text)], cx);
7736 });
7737
7738 assert_eq!(
7739 search(&project, SearchQuery::text("TWO", false, true), cx)
7740 .await
7741 .unwrap(),
7742 HashMap::from_iter([
7743 ("two.rs".to_string(), vec![6..9]),
7744 ("three.rs".to_string(), vec![37..40]),
7745 ("four.rs".to_string(), vec![25..28, 36..39])
7746 ])
7747 );
7748
7749 async fn search(
7750 project: &ModelHandle<Project>,
7751 query: SearchQuery,
7752 cx: &mut gpui::TestAppContext,
7753 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7754 let results = project
7755 .update(cx, |project, cx| project.search(query, cx))
7756 .await?;
7757
7758 Ok(results
7759 .into_iter()
7760 .map(|(buffer, ranges)| {
7761 buffer.read_with(cx, |buffer, _| {
7762 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7763 let ranges = ranges
7764 .into_iter()
7765 .map(|range| range.to_offset(buffer))
7766 .collect::<Vec<_>>();
7767 (path, ranges)
7768 })
7769 })
7770 .collect())
7771 }
7772 }
7773}