1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use settings::Settings;
32use sha2::{Digest, Sha256};
33use similar::{ChangeTag, TextDiff};
34use std::{
35 cell::RefCell,
36 cmp::{self, Ordering},
37 convert::TryInto,
38 hash::Hash,
39 mem,
40 ops::Range,
41 path::{Component, Path, PathBuf},
42 rc::Rc,
43 sync::{
44 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
45 Arc,
46 },
47 time::Instant,
48};
49use util::{post_inc, ResultExt, TryFutureExt as _};
50
51pub use fs::*;
52pub use worktree::*;
53
54pub trait Item: Entity {
55 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
56}
57
58pub struct Project {
59 worktrees: Vec<WorktreeHandle>,
60 active_entry: Option<ProjectEntryId>,
61 languages: Arc<LanguageRegistry>,
62 language_servers:
63 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
64 started_language_servers:
65 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
66 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
67 language_server_settings: Arc<Mutex<serde_json::Value>>,
68 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
69 next_language_server_id: usize,
70 client: Arc<client::Client>,
71 next_entry_id: Arc<AtomicUsize>,
72 user_store: ModelHandle<UserStore>,
73 fs: Arc<dyn Fs>,
74 client_state: ProjectClientState,
75 collaborators: HashMap<PeerId, Collaborator>,
76 subscriptions: Vec<client::Subscription>,
77 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
78 shared_buffers: HashMap<PeerId, HashSet<u64>>,
79 loading_buffers: HashMap<
80 ProjectPath,
81 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
82 >,
83 loading_local_worktrees:
84 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
85 opened_buffers: HashMap<u64, OpenBuffer>,
86 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
87 nonce: u128,
88}
89
90enum OpenBuffer {
91 Strong(ModelHandle<Buffer>),
92 Weak(WeakModelHandle<Buffer>),
93 Loading(Vec<Operation>),
94}
95
96enum WorktreeHandle {
97 Strong(ModelHandle<Worktree>),
98 Weak(WeakModelHandle<Worktree>),
99}
100
101enum ProjectClientState {
102 Local {
103 is_shared: bool,
104 remote_id_tx: watch::Sender<Option<u64>>,
105 remote_id_rx: watch::Receiver<Option<u64>>,
106 _maintain_remote_id_task: Task<Option<()>>,
107 },
108 Remote {
109 sharing_has_stopped: bool,
110 remote_id: u64,
111 replica_id: ReplicaId,
112 _detect_unshare_task: Task<Option<()>>,
113 },
114}
115
116#[derive(Clone, Debug)]
117pub struct Collaborator {
118 pub user: Arc<User>,
119 pub peer_id: PeerId,
120 pub replica_id: ReplicaId,
121}
122
123#[derive(Clone, Debug, PartialEq)]
124pub enum Event {
125 ActiveEntryChanged(Option<ProjectEntryId>),
126 WorktreeRemoved(WorktreeId),
127 DiskBasedDiagnosticsStarted,
128 DiskBasedDiagnosticsUpdated,
129 DiskBasedDiagnosticsFinished,
130 DiagnosticsUpdated(ProjectPath),
131 RemoteIdChanged(Option<u64>),
132 CollaboratorLeft(PeerId),
133}
134
135pub struct LanguageServerStatus {
136 pub name: String,
137 pub pending_work: BTreeMap<String, LanguageServerProgress>,
138 pending_diagnostic_updates: isize,
139}
140
141#[derive(Clone, Debug)]
142pub struct LanguageServerProgress {
143 pub message: Option<String>,
144 pub percentage: Option<usize>,
145 pub last_update_at: Instant,
146}
147
148#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
149pub struct ProjectPath {
150 pub worktree_id: WorktreeId,
151 pub path: Arc<Path>,
152}
153
154#[derive(Clone, Debug, Default, PartialEq)]
155pub struct DiagnosticSummary {
156 pub error_count: usize,
157 pub warning_count: usize,
158 pub info_count: usize,
159 pub hint_count: usize,
160}
161
162#[derive(Debug)]
163pub struct Location {
164 pub buffer: ModelHandle<Buffer>,
165 pub range: Range<language::Anchor>,
166}
167
168#[derive(Debug)]
169pub struct DocumentHighlight {
170 pub range: Range<language::Anchor>,
171 pub kind: DocumentHighlightKind,
172}
173
174#[derive(Clone, Debug)]
175pub struct Symbol {
176 pub source_worktree_id: WorktreeId,
177 pub worktree_id: WorktreeId,
178 pub language_server_name: LanguageServerName,
179 pub path: PathBuf,
180 pub label: CodeLabel,
181 pub name: String,
182 pub kind: lsp::SymbolKind,
183 pub range: Range<PointUtf16>,
184 pub signature: [u8; 32],
185}
186
187#[derive(Default)]
188pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
189
190impl DiagnosticSummary {
191 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
192 let mut this = Self {
193 error_count: 0,
194 warning_count: 0,
195 info_count: 0,
196 hint_count: 0,
197 };
198
199 for entry in diagnostics {
200 if entry.diagnostic.is_primary {
201 match entry.diagnostic.severity {
202 DiagnosticSeverity::ERROR => this.error_count += 1,
203 DiagnosticSeverity::WARNING => this.warning_count += 1,
204 DiagnosticSeverity::INFORMATION => this.info_count += 1,
205 DiagnosticSeverity::HINT => this.hint_count += 1,
206 _ => {}
207 }
208 }
209 }
210
211 this
212 }
213
214 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
215 proto::DiagnosticSummary {
216 path: path.to_string_lossy().to_string(),
217 error_count: self.error_count as u32,
218 warning_count: self.warning_count as u32,
219 info_count: self.info_count as u32,
220 hint_count: self.hint_count as u32,
221 }
222 }
223}
224
225#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
226pub struct ProjectEntryId(usize);
227
228impl ProjectEntryId {
229 pub fn new(counter: &AtomicUsize) -> Self {
230 Self(counter.fetch_add(1, SeqCst))
231 }
232
233 pub fn from_proto(id: u64) -> Self {
234 Self(id as usize)
235 }
236
237 pub fn to_proto(&self) -> u64 {
238 self.0 as u64
239 }
240
241 pub fn to_usize(&self) -> usize {
242 self.0
243 }
244}
245
246impl Project {
247 pub fn init(client: &Arc<Client>) {
248 client.add_model_message_handler(Self::handle_add_collaborator);
249 client.add_model_message_handler(Self::handle_buffer_reloaded);
250 client.add_model_message_handler(Self::handle_buffer_saved);
251 client.add_model_message_handler(Self::handle_start_language_server);
252 client.add_model_message_handler(Self::handle_update_language_server);
253 client.add_model_message_handler(Self::handle_remove_collaborator);
254 client.add_model_message_handler(Self::handle_register_worktree);
255 client.add_model_message_handler(Self::handle_unregister_worktree);
256 client.add_model_message_handler(Self::handle_unshare_project);
257 client.add_model_message_handler(Self::handle_update_buffer_file);
258 client.add_model_message_handler(Self::handle_update_buffer);
259 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
260 client.add_model_message_handler(Self::handle_update_worktree);
261 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
262 client.add_model_request_handler(Self::handle_apply_code_action);
263 client.add_model_request_handler(Self::handle_reload_buffers);
264 client.add_model_request_handler(Self::handle_format_buffers);
265 client.add_model_request_handler(Self::handle_get_code_actions);
266 client.add_model_request_handler(Self::handle_get_completions);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
269 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
271 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
272 client.add_model_request_handler(Self::handle_search_project);
273 client.add_model_request_handler(Self::handle_get_project_symbols);
274 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
275 client.add_model_request_handler(Self::handle_open_buffer_by_id);
276 client.add_model_request_handler(Self::handle_open_buffer_by_path);
277 client.add_model_request_handler(Self::handle_save_buffer);
278 }
279
280 pub fn local(
281 client: Arc<Client>,
282 user_store: ModelHandle<UserStore>,
283 languages: Arc<LanguageRegistry>,
284 fs: Arc<dyn Fs>,
285 cx: &mut MutableAppContext,
286 ) -> ModelHandle<Self> {
287 cx.add_model(|cx: &mut ModelContext<Self>| {
288 let (remote_id_tx, remote_id_rx) = watch::channel();
289 let _maintain_remote_id_task = cx.spawn_weak({
290 let rpc = client.clone();
291 move |this, mut cx| {
292 async move {
293 let mut status = rpc.status();
294 while let Some(status) = status.next().await {
295 if let Some(this) = this.upgrade(&cx) {
296 if status.is_connected() {
297 this.update(&mut cx, |this, cx| this.register(cx)).await?;
298 } else {
299 this.update(&mut cx, |this, cx| this.unregister(cx));
300 }
301 }
302 }
303 Ok(())
304 }
305 .log_err()
306 }
307 });
308
309 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
310 Self {
311 worktrees: Default::default(),
312 collaborators: Default::default(),
313 opened_buffers: Default::default(),
314 shared_buffers: Default::default(),
315 loading_buffers: Default::default(),
316 loading_local_worktrees: Default::default(),
317 buffer_snapshots: Default::default(),
318 client_state: ProjectClientState::Local {
319 is_shared: false,
320 remote_id_tx,
321 remote_id_rx,
322 _maintain_remote_id_task,
323 },
324 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
325 subscriptions: Vec::new(),
326 active_entry: None,
327 languages,
328 client,
329 user_store,
330 fs,
331 next_entry_id: Default::default(),
332 language_servers: Default::default(),
333 started_language_servers: Default::default(),
334 language_server_statuses: Default::default(),
335 last_workspace_edits_by_language_server: Default::default(),
336 language_server_settings: Default::default(),
337 next_language_server_id: 0,
338 nonce: StdRng::from_entropy().gen(),
339 }
340 })
341 }
342
343 pub async fn remote(
344 remote_id: u64,
345 client: Arc<Client>,
346 user_store: ModelHandle<UserStore>,
347 languages: Arc<LanguageRegistry>,
348 fs: Arc<dyn Fs>,
349 cx: &mut AsyncAppContext,
350 ) -> Result<ModelHandle<Self>> {
351 client.authenticate_and_connect(true, &cx).await?;
352
353 let response = client
354 .request(proto::JoinProject {
355 project_id: remote_id,
356 })
357 .await?;
358
359 let replica_id = response.replica_id as ReplicaId;
360
361 let mut worktrees = Vec::new();
362 for worktree in response.worktrees {
363 let (worktree, load_task) = cx
364 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
365 worktrees.push(worktree);
366 load_task.detach();
367 }
368
369 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
370 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
371 let mut this = Self {
372 worktrees: Vec::new(),
373 loading_buffers: Default::default(),
374 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
375 shared_buffers: Default::default(),
376 loading_local_worktrees: Default::default(),
377 active_entry: None,
378 collaborators: Default::default(),
379 languages,
380 user_store: user_store.clone(),
381 fs,
382 next_entry_id: Default::default(),
383 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
384 client: client.clone(),
385 client_state: ProjectClientState::Remote {
386 sharing_has_stopped: false,
387 remote_id,
388 replica_id,
389 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
390 async move {
391 let mut status = client.status();
392 let is_connected =
393 status.next().await.map_or(false, |s| s.is_connected());
394 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
395 if !is_connected || status.next().await.is_some() {
396 if let Some(this) = this.upgrade(&cx) {
397 this.update(&mut cx, |this, cx| this.project_unshared(cx))
398 }
399 }
400 Ok(())
401 }
402 .log_err()
403 }),
404 },
405 language_servers: Default::default(),
406 started_language_servers: Default::default(),
407 language_server_settings: Default::default(),
408 language_server_statuses: response
409 .language_servers
410 .into_iter()
411 .map(|server| {
412 (
413 server.id as usize,
414 LanguageServerStatus {
415 name: server.name,
416 pending_work: Default::default(),
417 pending_diagnostic_updates: 0,
418 },
419 )
420 })
421 .collect(),
422 last_workspace_edits_by_language_server: Default::default(),
423 next_language_server_id: 0,
424 opened_buffers: Default::default(),
425 buffer_snapshots: Default::default(),
426 nonce: StdRng::from_entropy().gen(),
427 };
428 for worktree in worktrees {
429 this.add_worktree(&worktree, cx);
430 }
431 this
432 });
433
434 let user_ids = response
435 .collaborators
436 .iter()
437 .map(|peer| peer.user_id)
438 .collect();
439 user_store
440 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
441 .await?;
442 let mut collaborators = HashMap::default();
443 for message in response.collaborators {
444 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
445 collaborators.insert(collaborator.peer_id, collaborator);
446 }
447
448 this.update(cx, |this, _| {
449 this.collaborators = collaborators;
450 });
451
452 Ok(this)
453 }
454
455 #[cfg(any(test, feature = "test-support"))]
456 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
457 let languages = Arc::new(LanguageRegistry::test());
458 let http_client = client::test::FakeHttpClient::with_404_response();
459 let client = client::Client::new(http_client.clone());
460 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
461 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
462 }
463
464 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
465 self.opened_buffers
466 .get(&remote_id)
467 .and_then(|buffer| buffer.upgrade(cx))
468 }
469
470 #[cfg(any(test, feature = "test-support"))]
471 pub fn languages(&self) -> &Arc<LanguageRegistry> {
472 &self.languages
473 }
474
475 #[cfg(any(test, feature = "test-support"))]
476 pub fn check_invariants(&self, cx: &AppContext) {
477 if self.is_local() {
478 let mut worktree_root_paths = HashMap::default();
479 for worktree in self.worktrees(cx) {
480 let worktree = worktree.read(cx);
481 let abs_path = worktree.as_local().unwrap().abs_path().clone();
482 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
483 assert_eq!(
484 prev_worktree_id,
485 None,
486 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
487 abs_path,
488 worktree.id(),
489 prev_worktree_id
490 )
491 }
492 } else {
493 let replica_id = self.replica_id();
494 for buffer in self.opened_buffers.values() {
495 if let Some(buffer) = buffer.upgrade(cx) {
496 let buffer = buffer.read(cx);
497 assert_eq!(
498 buffer.deferred_ops_len(),
499 0,
500 "replica {}, buffer {} has deferred operations",
501 replica_id,
502 buffer.remote_id()
503 );
504 }
505 }
506 }
507 }
508
509 #[cfg(any(test, feature = "test-support"))]
510 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
511 let path = path.into();
512 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
513 self.opened_buffers.iter().any(|(_, buffer)| {
514 if let Some(buffer) = buffer.upgrade(cx) {
515 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
516 if file.worktree == worktree && file.path() == &path.path {
517 return true;
518 }
519 }
520 }
521 false
522 })
523 } else {
524 false
525 }
526 }
527
528 pub fn fs(&self) -> &Arc<dyn Fs> {
529 &self.fs
530 }
531
532 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
533 self.unshare(cx);
534 for worktree in &self.worktrees {
535 if let Some(worktree) = worktree.upgrade(cx) {
536 worktree.update(cx, |worktree, _| {
537 worktree.as_local_mut().unwrap().unregister();
538 });
539 }
540 }
541
542 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
543 *remote_id_tx.borrow_mut() = None;
544 }
545
546 self.subscriptions.clear();
547 }
548
549 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
550 self.unregister(cx);
551
552 let response = self.client.request(proto::RegisterProject {});
553 cx.spawn(|this, mut cx| async move {
554 let remote_id = response.await?.project_id;
555
556 let mut registrations = Vec::new();
557 this.update(&mut cx, |this, cx| {
558 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
559 *remote_id_tx.borrow_mut() = Some(remote_id);
560 }
561
562 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
563
564 this.subscriptions
565 .push(this.client.add_model_for_remote_entity(remote_id, cx));
566
567 for worktree in &this.worktrees {
568 if let Some(worktree) = worktree.upgrade(cx) {
569 registrations.push(worktree.update(cx, |worktree, cx| {
570 let worktree = worktree.as_local_mut().unwrap();
571 worktree.register(remote_id, cx)
572 }));
573 }
574 }
575 });
576
577 futures::future::try_join_all(registrations).await?;
578 Ok(())
579 })
580 }
581
582 pub fn remote_id(&self) -> Option<u64> {
583 match &self.client_state {
584 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
585 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
586 }
587 }
588
589 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
590 let mut id = None;
591 let mut watch = None;
592 match &self.client_state {
593 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
594 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
595 }
596
597 async move {
598 if let Some(id) = id {
599 return id;
600 }
601 let mut watch = watch.unwrap();
602 loop {
603 let id = *watch.borrow();
604 if let Some(id) = id {
605 return id;
606 }
607 watch.next().await;
608 }
609 }
610 }
611
612 pub fn replica_id(&self) -> ReplicaId {
613 match &self.client_state {
614 ProjectClientState::Local { .. } => 0,
615 ProjectClientState::Remote { replica_id, .. } => *replica_id,
616 }
617 }
618
619 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
620 &self.collaborators
621 }
622
623 pub fn worktrees<'a>(
624 &'a self,
625 cx: &'a AppContext,
626 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
627 self.worktrees
628 .iter()
629 .filter_map(move |worktree| worktree.upgrade(cx))
630 }
631
632 pub fn visible_worktrees<'a>(
633 &'a self,
634 cx: &'a AppContext,
635 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
636 self.worktrees.iter().filter_map(|worktree| {
637 worktree.upgrade(cx).and_then(|worktree| {
638 if worktree.read(cx).is_visible() {
639 Some(worktree)
640 } else {
641 None
642 }
643 })
644 })
645 }
646
647 pub fn worktree_for_id(
648 &self,
649 id: WorktreeId,
650 cx: &AppContext,
651 ) -> Option<ModelHandle<Worktree>> {
652 self.worktrees(cx)
653 .find(|worktree| worktree.read(cx).id() == id)
654 }
655
656 pub fn worktree_for_entry(
657 &self,
658 entry_id: ProjectEntryId,
659 cx: &AppContext,
660 ) -> Option<ModelHandle<Worktree>> {
661 self.worktrees(cx)
662 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
663 }
664
665 pub fn worktree_id_for_entry(
666 &self,
667 entry_id: ProjectEntryId,
668 cx: &AppContext,
669 ) -> Option<WorktreeId> {
670 self.worktree_for_entry(entry_id, cx)
671 .map(|worktree| worktree.read(cx).id())
672 }
673
674 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
675 let rpc = self.client.clone();
676 cx.spawn(|this, mut cx| async move {
677 let project_id = this.update(&mut cx, |this, cx| {
678 if let ProjectClientState::Local {
679 is_shared,
680 remote_id_rx,
681 ..
682 } = &mut this.client_state
683 {
684 *is_shared = true;
685
686 for open_buffer in this.opened_buffers.values_mut() {
687 match open_buffer {
688 OpenBuffer::Strong(_) => {}
689 OpenBuffer::Weak(buffer) => {
690 if let Some(buffer) = buffer.upgrade(cx) {
691 *open_buffer = OpenBuffer::Strong(buffer);
692 }
693 }
694 OpenBuffer::Loading(_) => unreachable!(),
695 }
696 }
697
698 for worktree_handle in this.worktrees.iter_mut() {
699 match worktree_handle {
700 WorktreeHandle::Strong(_) => {}
701 WorktreeHandle::Weak(worktree) => {
702 if let Some(worktree) = worktree.upgrade(cx) {
703 *worktree_handle = WorktreeHandle::Strong(worktree);
704 }
705 }
706 }
707 }
708
709 remote_id_rx
710 .borrow()
711 .ok_or_else(|| anyhow!("no project id"))
712 } else {
713 Err(anyhow!("can't share a remote project"))
714 }
715 })?;
716
717 rpc.request(proto::ShareProject { project_id }).await?;
718
719 let mut tasks = Vec::new();
720 this.update(&mut cx, |this, cx| {
721 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
722 worktree.update(cx, |worktree, cx| {
723 let worktree = worktree.as_local_mut().unwrap();
724 tasks.push(worktree.share(project_id, cx));
725 });
726 }
727 });
728 for task in tasks {
729 task.await?;
730 }
731 this.update(&mut cx, |_, cx| cx.notify());
732 Ok(())
733 })
734 }
735
736 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
737 let rpc = self.client.clone();
738
739 if let ProjectClientState::Local {
740 is_shared,
741 remote_id_rx,
742 ..
743 } = &mut self.client_state
744 {
745 if !*is_shared {
746 return;
747 }
748
749 *is_shared = false;
750 self.collaborators.clear();
751 self.shared_buffers.clear();
752 for worktree_handle in self.worktrees.iter_mut() {
753 if let WorktreeHandle::Strong(worktree) = worktree_handle {
754 let is_visible = worktree.update(cx, |worktree, _| {
755 worktree.as_local_mut().unwrap().unshare();
756 worktree.is_visible()
757 });
758 if !is_visible {
759 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
760 }
761 }
762 }
763
764 for open_buffer in self.opened_buffers.values_mut() {
765 match open_buffer {
766 OpenBuffer::Strong(buffer) => {
767 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
768 }
769 _ => {}
770 }
771 }
772
773 if let Some(project_id) = *remote_id_rx.borrow() {
774 rpc.send(proto::UnshareProject { project_id }).log_err();
775 }
776
777 cx.notify();
778 } else {
779 log::error!("attempted to unshare a remote project");
780 }
781 }
782
783 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
784 if let ProjectClientState::Remote {
785 sharing_has_stopped,
786 ..
787 } = &mut self.client_state
788 {
789 *sharing_has_stopped = true;
790 self.collaborators.clear();
791 cx.notify();
792 }
793 }
794
795 pub fn is_read_only(&self) -> bool {
796 match &self.client_state {
797 ProjectClientState::Local { .. } => false,
798 ProjectClientState::Remote {
799 sharing_has_stopped,
800 ..
801 } => *sharing_has_stopped,
802 }
803 }
804
805 pub fn is_local(&self) -> bool {
806 match &self.client_state {
807 ProjectClientState::Local { .. } => true,
808 ProjectClientState::Remote { .. } => false,
809 }
810 }
811
812 pub fn is_remote(&self) -> bool {
813 !self.is_local()
814 }
815
816 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
817 if self.is_remote() {
818 return Err(anyhow!("creating buffers as a guest is not supported yet"));
819 }
820
821 let buffer = cx.add_model(|cx| {
822 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
823 });
824 self.register_buffer(&buffer, cx)?;
825 Ok(buffer)
826 }
827
828 pub fn open_path(
829 &mut self,
830 path: impl Into<ProjectPath>,
831 cx: &mut ModelContext<Self>,
832 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
833 let task = self.open_buffer(path, cx);
834 cx.spawn_weak(|_, cx| async move {
835 let buffer = task.await?;
836 let project_entry_id = buffer
837 .read_with(&cx, |buffer, cx| {
838 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
839 })
840 .ok_or_else(|| anyhow!("no project entry"))?;
841 Ok((project_entry_id, buffer.into()))
842 })
843 }
844
845 pub fn open_buffer(
846 &mut self,
847 path: impl Into<ProjectPath>,
848 cx: &mut ModelContext<Self>,
849 ) -> Task<Result<ModelHandle<Buffer>>> {
850 let project_path = path.into();
851 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
852 worktree
853 } else {
854 return Task::ready(Err(anyhow!("no such worktree")));
855 };
856
857 // If there is already a buffer for the given path, then return it.
858 let existing_buffer = self.get_open_buffer(&project_path, cx);
859 if let Some(existing_buffer) = existing_buffer {
860 return Task::ready(Ok(existing_buffer));
861 }
862
863 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
864 // If the given path is already being loaded, then wait for that existing
865 // task to complete and return the same buffer.
866 hash_map::Entry::Occupied(e) => e.get().clone(),
867
868 // Otherwise, record the fact that this path is now being loaded.
869 hash_map::Entry::Vacant(entry) => {
870 let (mut tx, rx) = postage::watch::channel();
871 entry.insert(rx.clone());
872
873 let load_buffer = if worktree.read(cx).is_local() {
874 self.open_local_buffer(&project_path.path, &worktree, cx)
875 } else {
876 self.open_remote_buffer(&project_path.path, &worktree, cx)
877 };
878
879 cx.spawn(move |this, mut cx| async move {
880 let load_result = load_buffer.await;
881 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
882 // Record the fact that the buffer is no longer loading.
883 this.loading_buffers.remove(&project_path);
884 let buffer = load_result.map_err(Arc::new)?;
885 Ok(buffer)
886 }));
887 })
888 .detach();
889 rx
890 }
891 };
892
893 cx.foreground().spawn(async move {
894 loop {
895 if let Some(result) = loading_watch.borrow().as_ref() {
896 match result {
897 Ok(buffer) => return Ok(buffer.clone()),
898 Err(error) => return Err(anyhow!("{}", error)),
899 }
900 }
901 loading_watch.next().await;
902 }
903 })
904 }
905
906 fn open_local_buffer(
907 &mut self,
908 path: &Arc<Path>,
909 worktree: &ModelHandle<Worktree>,
910 cx: &mut ModelContext<Self>,
911 ) -> Task<Result<ModelHandle<Buffer>>> {
912 let load_buffer = worktree.update(cx, |worktree, cx| {
913 let worktree = worktree.as_local_mut().unwrap();
914 worktree.load_buffer(path, cx)
915 });
916 cx.spawn(|this, mut cx| async move {
917 let buffer = load_buffer.await?;
918 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
919 Ok(buffer)
920 })
921 }
922
923 fn open_remote_buffer(
924 &mut self,
925 path: &Arc<Path>,
926 worktree: &ModelHandle<Worktree>,
927 cx: &mut ModelContext<Self>,
928 ) -> Task<Result<ModelHandle<Buffer>>> {
929 let rpc = self.client.clone();
930 let project_id = self.remote_id().unwrap();
931 let remote_worktree_id = worktree.read(cx).id();
932 let path = path.clone();
933 let path_string = path.to_string_lossy().to_string();
934 cx.spawn(|this, mut cx| async move {
935 let response = rpc
936 .request(proto::OpenBufferByPath {
937 project_id,
938 worktree_id: remote_worktree_id.to_proto(),
939 path: path_string,
940 })
941 .await?;
942 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
943 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
944 .await
945 })
946 }
947
948 fn open_local_buffer_via_lsp(
949 &mut self,
950 abs_path: lsp::Url,
951 lsp_adapter: Arc<dyn LspAdapter>,
952 lsp_server: Arc<LanguageServer>,
953 cx: &mut ModelContext<Self>,
954 ) -> Task<Result<ModelHandle<Buffer>>> {
955 cx.spawn(|this, mut cx| async move {
956 let abs_path = abs_path
957 .to_file_path()
958 .map_err(|_| anyhow!("can't convert URI to path"))?;
959 let (worktree, relative_path) = if let Some(result) =
960 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
961 {
962 result
963 } else {
964 let worktree = this
965 .update(&mut cx, |this, cx| {
966 this.create_local_worktree(&abs_path, false, cx)
967 })
968 .await?;
969 this.update(&mut cx, |this, cx| {
970 this.language_servers.insert(
971 (worktree.read(cx).id(), lsp_adapter.name()),
972 (lsp_adapter, lsp_server),
973 );
974 });
975 (worktree, PathBuf::new())
976 };
977
978 let project_path = ProjectPath {
979 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
980 path: relative_path.into(),
981 };
982 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
983 .await
984 })
985 }
986
987 pub fn open_buffer_by_id(
988 &mut self,
989 id: u64,
990 cx: &mut ModelContext<Self>,
991 ) -> Task<Result<ModelHandle<Buffer>>> {
992 if let Some(buffer) = self.buffer_for_id(id, cx) {
993 Task::ready(Ok(buffer))
994 } else if self.is_local() {
995 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
996 } else if let Some(project_id) = self.remote_id() {
997 let request = self
998 .client
999 .request(proto::OpenBufferById { project_id, id });
1000 cx.spawn(|this, mut cx| async move {
1001 let buffer = request
1002 .await?
1003 .buffer
1004 .ok_or_else(|| anyhow!("invalid buffer"))?;
1005 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1006 .await
1007 })
1008 } else {
1009 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1010 }
1011 }
1012
1013 pub fn save_buffer_as(
1014 &mut self,
1015 buffer: ModelHandle<Buffer>,
1016 abs_path: PathBuf,
1017 cx: &mut ModelContext<Project>,
1018 ) -> Task<Result<()>> {
1019 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1020 let old_path =
1021 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1022 cx.spawn(|this, mut cx| async move {
1023 if let Some(old_path) = old_path {
1024 this.update(&mut cx, |this, cx| {
1025 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1026 });
1027 }
1028 let (worktree, path) = worktree_task.await?;
1029 worktree
1030 .update(&mut cx, |worktree, cx| {
1031 worktree
1032 .as_local_mut()
1033 .unwrap()
1034 .save_buffer_as(buffer.clone(), path, cx)
1035 })
1036 .await?;
1037 this.update(&mut cx, |this, cx| {
1038 this.assign_language_to_buffer(&buffer, cx);
1039 this.register_buffer_with_language_server(&buffer, cx);
1040 });
1041 Ok(())
1042 })
1043 }
1044
1045 pub fn get_open_buffer(
1046 &mut self,
1047 path: &ProjectPath,
1048 cx: &mut ModelContext<Self>,
1049 ) -> Option<ModelHandle<Buffer>> {
1050 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1051 self.opened_buffers.values().find_map(|buffer| {
1052 let buffer = buffer.upgrade(cx)?;
1053 let file = File::from_dyn(buffer.read(cx).file())?;
1054 if file.worktree == worktree && file.path() == &path.path {
1055 Some(buffer)
1056 } else {
1057 None
1058 }
1059 })
1060 }
1061
1062 fn register_buffer(
1063 &mut self,
1064 buffer: &ModelHandle<Buffer>,
1065 cx: &mut ModelContext<Self>,
1066 ) -> Result<()> {
1067 let remote_id = buffer.read(cx).remote_id();
1068 let open_buffer = if self.is_remote() || self.is_shared() {
1069 OpenBuffer::Strong(buffer.clone())
1070 } else {
1071 OpenBuffer::Weak(buffer.downgrade())
1072 };
1073
1074 match self.opened_buffers.insert(remote_id, open_buffer) {
1075 None => {}
1076 Some(OpenBuffer::Loading(operations)) => {
1077 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1078 }
1079 Some(OpenBuffer::Weak(existing_handle)) => {
1080 if existing_handle.upgrade(cx).is_some() {
1081 Err(anyhow!(
1082 "already registered buffer with remote id {}",
1083 remote_id
1084 ))?
1085 }
1086 }
1087 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1088 "already registered buffer with remote id {}",
1089 remote_id
1090 ))?,
1091 }
1092 cx.subscribe(buffer, |this, buffer, event, cx| {
1093 this.on_buffer_event(buffer, event, cx);
1094 })
1095 .detach();
1096
1097 self.assign_language_to_buffer(buffer, cx);
1098 self.register_buffer_with_language_server(buffer, cx);
1099 cx.observe_release(buffer, |this, buffer, cx| {
1100 if let Some(file) = File::from_dyn(buffer.file()) {
1101 if file.is_local() {
1102 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1103 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1104 server
1105 .notify::<lsp::notification::DidCloseTextDocument>(
1106 lsp::DidCloseTextDocumentParams {
1107 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1108 },
1109 )
1110 .log_err();
1111 }
1112 }
1113 }
1114 })
1115 .detach();
1116
1117 Ok(())
1118 }
1119
1120 fn register_buffer_with_language_server(
1121 &mut self,
1122 buffer_handle: &ModelHandle<Buffer>,
1123 cx: &mut ModelContext<Self>,
1124 ) {
1125 let buffer = buffer_handle.read(cx);
1126 let buffer_id = buffer.remote_id();
1127 if let Some(file) = File::from_dyn(buffer.file()) {
1128 if file.is_local() {
1129 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1130 let initial_snapshot = buffer.text_snapshot();
1131 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1132
1133 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1134 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1135 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1136 .log_err();
1137 }
1138 }
1139
1140 if let Some((_, server)) = language_server {
1141 server
1142 .notify::<lsp::notification::DidOpenTextDocument>(
1143 lsp::DidOpenTextDocumentParams {
1144 text_document: lsp::TextDocumentItem::new(
1145 uri,
1146 Default::default(),
1147 0,
1148 initial_snapshot.text(),
1149 ),
1150 }
1151 .clone(),
1152 )
1153 .log_err();
1154 buffer_handle.update(cx, |buffer, cx| {
1155 buffer.set_completion_triggers(
1156 server
1157 .capabilities()
1158 .completion_provider
1159 .as_ref()
1160 .and_then(|provider| provider.trigger_characters.clone())
1161 .unwrap_or(Vec::new()),
1162 cx,
1163 )
1164 });
1165 self.buffer_snapshots
1166 .insert(buffer_id, vec![(0, initial_snapshot)]);
1167 }
1168 }
1169 }
1170 }
1171
1172 fn unregister_buffer_from_language_server(
1173 &mut self,
1174 buffer: &ModelHandle<Buffer>,
1175 old_path: PathBuf,
1176 cx: &mut ModelContext<Self>,
1177 ) {
1178 buffer.update(cx, |buffer, cx| {
1179 buffer.update_diagnostics(Default::default(), cx);
1180 self.buffer_snapshots.remove(&buffer.remote_id());
1181 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1182 language_server
1183 .notify::<lsp::notification::DidCloseTextDocument>(
1184 lsp::DidCloseTextDocumentParams {
1185 text_document: lsp::TextDocumentIdentifier::new(
1186 lsp::Url::from_file_path(old_path).unwrap(),
1187 ),
1188 },
1189 )
1190 .log_err();
1191 }
1192 });
1193 }
1194
1195 fn on_buffer_event(
1196 &mut self,
1197 buffer: ModelHandle<Buffer>,
1198 event: &BufferEvent,
1199 cx: &mut ModelContext<Self>,
1200 ) -> Option<()> {
1201 match event {
1202 BufferEvent::Operation(operation) => {
1203 let project_id = self.remote_id()?;
1204 let request = self.client.request(proto::UpdateBuffer {
1205 project_id,
1206 buffer_id: buffer.read(cx).remote_id(),
1207 operations: vec![language::proto::serialize_operation(&operation)],
1208 });
1209 cx.background().spawn(request).detach_and_log_err(cx);
1210 }
1211 BufferEvent::Edited { .. } => {
1212 let (_, language_server) = self
1213 .language_server_for_buffer(buffer.read(cx), cx)?
1214 .clone();
1215 let buffer = buffer.read(cx);
1216 let file = File::from_dyn(buffer.file())?;
1217 let abs_path = file.as_local()?.abs_path(cx);
1218 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1219 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1220 let (version, prev_snapshot) = buffer_snapshots.last()?;
1221 let next_snapshot = buffer.text_snapshot();
1222 let next_version = version + 1;
1223
1224 let content_changes = buffer
1225 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1226 .map(|edit| {
1227 let edit_start = edit.new.start.0;
1228 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1229 let new_text = next_snapshot
1230 .text_for_range(edit.new.start.1..edit.new.end.1)
1231 .collect();
1232 lsp::TextDocumentContentChangeEvent {
1233 range: Some(lsp::Range::new(
1234 point_to_lsp(edit_start),
1235 point_to_lsp(edit_end),
1236 )),
1237 range_length: None,
1238 text: new_text,
1239 }
1240 })
1241 .collect();
1242
1243 buffer_snapshots.push((next_version, next_snapshot));
1244
1245 language_server
1246 .notify::<lsp::notification::DidChangeTextDocument>(
1247 lsp::DidChangeTextDocumentParams {
1248 text_document: lsp::VersionedTextDocumentIdentifier::new(
1249 uri,
1250 next_version,
1251 ),
1252 content_changes,
1253 },
1254 )
1255 .log_err();
1256 }
1257 BufferEvent::Saved => {
1258 let file = File::from_dyn(buffer.read(cx).file())?;
1259 let worktree_id = file.worktree_id(cx);
1260 let abs_path = file.as_local()?.abs_path(cx);
1261 let text_document = lsp::TextDocumentIdentifier {
1262 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1263 };
1264
1265 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1266 server
1267 .notify::<lsp::notification::DidSaveTextDocument>(
1268 lsp::DidSaveTextDocumentParams {
1269 text_document: text_document.clone(),
1270 text: None,
1271 },
1272 )
1273 .log_err();
1274 }
1275 }
1276 _ => {}
1277 }
1278
1279 None
1280 }
1281
1282 fn language_servers_for_worktree(
1283 &self,
1284 worktree_id: WorktreeId,
1285 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1286 self.language_servers.iter().filter_map(
1287 move |((language_server_worktree_id, _), server)| {
1288 if *language_server_worktree_id == worktree_id {
1289 Some(server)
1290 } else {
1291 None
1292 }
1293 },
1294 )
1295 }
1296
1297 fn assign_language_to_buffer(
1298 &mut self,
1299 buffer: &ModelHandle<Buffer>,
1300 cx: &mut ModelContext<Self>,
1301 ) -> Option<()> {
1302 // If the buffer has a language, set it and start the language server if we haven't already.
1303 let full_path = buffer.read(cx).file()?.full_path(cx);
1304 let language = self.languages.select_language(&full_path)?;
1305 buffer.update(cx, |buffer, cx| {
1306 buffer.set_language(Some(language.clone()), cx);
1307 });
1308
1309 let file = File::from_dyn(buffer.read(cx).file())?;
1310 let worktree = file.worktree.read(cx).as_local()?;
1311 let worktree_id = worktree.id();
1312 let worktree_abs_path = worktree.abs_path().clone();
1313 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1314
1315 None
1316 }
1317
1318 fn start_language_server(
1319 &mut self,
1320 worktree_id: WorktreeId,
1321 worktree_path: Arc<Path>,
1322 language: Arc<Language>,
1323 cx: &mut ModelContext<Self>,
1324 ) {
1325 let adapter = if let Some(adapter) = language.lsp_adapter() {
1326 adapter
1327 } else {
1328 return;
1329 };
1330 let key = (worktree_id, adapter.name());
1331 self.started_language_servers
1332 .entry(key.clone())
1333 .or_insert_with(|| {
1334 let server_id = post_inc(&mut self.next_language_server_id);
1335 let language_server = self.languages.start_language_server(
1336 server_id,
1337 language.clone(),
1338 worktree_path,
1339 self.client.http_client(),
1340 cx,
1341 );
1342 cx.spawn_weak(|this, mut cx| async move {
1343 let language_server = language_server?.await.log_err()?;
1344 let language_server = language_server
1345 .initialize(adapter.initialization_options())
1346 .await
1347 .log_err()?;
1348 let this = this.upgrade(&cx)?;
1349 let disk_based_diagnostics_progress_token =
1350 adapter.disk_based_diagnostics_progress_token();
1351
1352 language_server
1353 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1354 let this = this.downgrade();
1355 let adapter = adapter.clone();
1356 move |params, mut cx| {
1357 if let Some(this) = this.upgrade(&cx) {
1358 this.update(&mut cx, |this, cx| {
1359 this.on_lsp_diagnostics_published(
1360 server_id,
1361 params,
1362 &adapter,
1363 disk_based_diagnostics_progress_token,
1364 cx,
1365 );
1366 });
1367 }
1368 }
1369 })
1370 .detach();
1371
1372 language_server
1373 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1374 let settings = this
1375 .read_with(&cx, |this, _| this.language_server_settings.clone());
1376 move |params, _| {
1377 let settings = settings.lock().clone();
1378 async move {
1379 Ok(params
1380 .items
1381 .into_iter()
1382 .map(|item| {
1383 if let Some(section) = &item.section {
1384 settings
1385 .get(section)
1386 .cloned()
1387 .unwrap_or(serde_json::Value::Null)
1388 } else {
1389 settings.clone()
1390 }
1391 })
1392 .collect())
1393 }
1394 }
1395 })
1396 .detach();
1397
1398 language_server
1399 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1400 let this = this.downgrade();
1401 let adapter = adapter.clone();
1402 let language_server = language_server.clone();
1403 move |params, cx| {
1404 Self::on_lsp_workspace_edit(
1405 this,
1406 params,
1407 server_id,
1408 adapter.clone(),
1409 language_server.clone(),
1410 cx,
1411 )
1412 }
1413 })
1414 .detach();
1415
1416 language_server
1417 .on_notification::<lsp::notification::Progress, _>({
1418 let this = this.downgrade();
1419 move |params, mut cx| {
1420 if let Some(this) = this.upgrade(&cx) {
1421 this.update(&mut cx, |this, cx| {
1422 this.on_lsp_progress(
1423 params,
1424 server_id,
1425 disk_based_diagnostics_progress_token,
1426 cx,
1427 );
1428 });
1429 }
1430 }
1431 })
1432 .detach();
1433
1434 this.update(&mut cx, |this, cx| {
1435 this.language_servers
1436 .insert(key.clone(), (adapter, language_server.clone()));
1437 this.language_server_statuses.insert(
1438 server_id,
1439 LanguageServerStatus {
1440 name: language_server.name().to_string(),
1441 pending_work: Default::default(),
1442 pending_diagnostic_updates: 0,
1443 },
1444 );
1445 language_server
1446 .notify::<lsp::notification::DidChangeConfiguration>(
1447 lsp::DidChangeConfigurationParams {
1448 settings: this.language_server_settings.lock().clone(),
1449 },
1450 )
1451 .ok();
1452
1453 if let Some(project_id) = this.remote_id() {
1454 this.client
1455 .send(proto::StartLanguageServer {
1456 project_id,
1457 server: Some(proto::LanguageServer {
1458 id: server_id as u64,
1459 name: language_server.name().to_string(),
1460 }),
1461 })
1462 .log_err();
1463 }
1464
1465 // Tell the language server about every open buffer in the worktree that matches the language.
1466 for buffer in this.opened_buffers.values() {
1467 if let Some(buffer_handle) = buffer.upgrade(cx) {
1468 let buffer = buffer_handle.read(cx);
1469 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1470 file
1471 } else {
1472 continue;
1473 };
1474 let language = if let Some(language) = buffer.language() {
1475 language
1476 } else {
1477 continue;
1478 };
1479 if file.worktree.read(cx).id() != key.0
1480 || language.lsp_adapter().map(|a| a.name())
1481 != Some(key.1.clone())
1482 {
1483 continue;
1484 }
1485
1486 let file = file.as_local()?;
1487 let versions = this
1488 .buffer_snapshots
1489 .entry(buffer.remote_id())
1490 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1491 let (version, initial_snapshot) = versions.last().unwrap();
1492 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1493 language_server
1494 .notify::<lsp::notification::DidOpenTextDocument>(
1495 lsp::DidOpenTextDocumentParams {
1496 text_document: lsp::TextDocumentItem::new(
1497 uri,
1498 Default::default(),
1499 *version,
1500 initial_snapshot.text(),
1501 ),
1502 },
1503 )
1504 .log_err()?;
1505 buffer_handle.update(cx, |buffer, cx| {
1506 buffer.set_completion_triggers(
1507 language_server
1508 .capabilities()
1509 .completion_provider
1510 .as_ref()
1511 .and_then(|provider| {
1512 provider.trigger_characters.clone()
1513 })
1514 .unwrap_or(Vec::new()),
1515 cx,
1516 )
1517 });
1518 }
1519 }
1520
1521 cx.notify();
1522 Some(())
1523 });
1524
1525 Some(language_server)
1526 })
1527 });
1528 }
1529
1530 pub fn restart_language_servers_for_buffers(
1531 &mut self,
1532 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1533 cx: &mut ModelContext<Self>,
1534 ) -> Option<()> {
1535 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1536 .into_iter()
1537 .filter_map(|buffer| {
1538 let file = File::from_dyn(buffer.read(cx).file())?;
1539 let worktree = file.worktree.read(cx).as_local()?;
1540 let worktree_id = worktree.id();
1541 let worktree_abs_path = worktree.abs_path().clone();
1542 let full_path = file.full_path(cx);
1543 Some((worktree_id, worktree_abs_path, full_path))
1544 })
1545 .collect();
1546 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1547 let language = self.languages.select_language(&full_path)?;
1548 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1549 }
1550
1551 None
1552 }
1553
1554 fn restart_language_server(
1555 &mut self,
1556 worktree_id: WorktreeId,
1557 worktree_path: Arc<Path>,
1558 language: Arc<Language>,
1559 cx: &mut ModelContext<Self>,
1560 ) {
1561 let adapter = if let Some(adapter) = language.lsp_adapter() {
1562 adapter
1563 } else {
1564 return;
1565 };
1566 let key = (worktree_id, adapter.name());
1567 let server_to_shutdown = self.language_servers.remove(&key);
1568 self.started_language_servers.remove(&key);
1569 server_to_shutdown
1570 .as_ref()
1571 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1572 cx.spawn_weak(|this, mut cx| async move {
1573 if let Some(this) = this.upgrade(&cx) {
1574 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1575 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1576 shutdown_task.await;
1577 }
1578 }
1579
1580 this.update(&mut cx, |this, cx| {
1581 this.start_language_server(worktree_id, worktree_path, language, cx);
1582 });
1583 }
1584 })
1585 .detach();
1586 }
1587
1588 fn on_lsp_diagnostics_published(
1589 &mut self,
1590 server_id: usize,
1591 mut params: lsp::PublishDiagnosticsParams,
1592 adapter: &Arc<dyn LspAdapter>,
1593 disk_based_diagnostics_progress_token: Option<&str>,
1594 cx: &mut ModelContext<Self>,
1595 ) {
1596 adapter.process_diagnostics(&mut params);
1597 if disk_based_diagnostics_progress_token.is_none() {
1598 self.disk_based_diagnostics_started(cx);
1599 self.broadcast_language_server_update(
1600 server_id,
1601 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1602 proto::LspDiskBasedDiagnosticsUpdating {},
1603 ),
1604 );
1605 }
1606 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1607 .log_err();
1608 if disk_based_diagnostics_progress_token.is_none() {
1609 self.disk_based_diagnostics_finished(cx);
1610 self.broadcast_language_server_update(
1611 server_id,
1612 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1613 proto::LspDiskBasedDiagnosticsUpdated {},
1614 ),
1615 );
1616 }
1617 }
1618
1619 fn on_lsp_progress(
1620 &mut self,
1621 progress: lsp::ProgressParams,
1622 server_id: usize,
1623 disk_based_diagnostics_progress_token: Option<&str>,
1624 cx: &mut ModelContext<Self>,
1625 ) {
1626 let token = match progress.token {
1627 lsp::NumberOrString::String(token) => token,
1628 lsp::NumberOrString::Number(token) => {
1629 log::info!("skipping numeric progress token {}", token);
1630 return;
1631 }
1632 };
1633 let progress = match progress.value {
1634 lsp::ProgressParamsValue::WorkDone(value) => value,
1635 };
1636 let language_server_status =
1637 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1638 status
1639 } else {
1640 return;
1641 };
1642 match progress {
1643 lsp::WorkDoneProgress::Begin(_) => {
1644 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1645 language_server_status.pending_diagnostic_updates += 1;
1646 if language_server_status.pending_diagnostic_updates == 1 {
1647 self.disk_based_diagnostics_started(cx);
1648 self.broadcast_language_server_update(
1649 server_id,
1650 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1651 proto::LspDiskBasedDiagnosticsUpdating {},
1652 ),
1653 );
1654 }
1655 } else {
1656 self.on_lsp_work_start(server_id, token.clone(), cx);
1657 self.broadcast_language_server_update(
1658 server_id,
1659 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1660 token,
1661 }),
1662 );
1663 }
1664 }
1665 lsp::WorkDoneProgress::Report(report) => {
1666 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1667 self.on_lsp_work_progress(
1668 server_id,
1669 token.clone(),
1670 LanguageServerProgress {
1671 message: report.message.clone(),
1672 percentage: report.percentage.map(|p| p as usize),
1673 last_update_at: Instant::now(),
1674 },
1675 cx,
1676 );
1677 self.broadcast_language_server_update(
1678 server_id,
1679 proto::update_language_server::Variant::WorkProgress(
1680 proto::LspWorkProgress {
1681 token,
1682 message: report.message,
1683 percentage: report.percentage.map(|p| p as u32),
1684 },
1685 ),
1686 );
1687 }
1688 }
1689 lsp::WorkDoneProgress::End(_) => {
1690 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1691 language_server_status.pending_diagnostic_updates -= 1;
1692 if language_server_status.pending_diagnostic_updates == 0 {
1693 self.disk_based_diagnostics_finished(cx);
1694 self.broadcast_language_server_update(
1695 server_id,
1696 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1697 proto::LspDiskBasedDiagnosticsUpdated {},
1698 ),
1699 );
1700 }
1701 } else {
1702 self.on_lsp_work_end(server_id, token.clone(), cx);
1703 self.broadcast_language_server_update(
1704 server_id,
1705 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1706 token,
1707 }),
1708 );
1709 }
1710 }
1711 }
1712 }
1713
1714 fn on_lsp_work_start(
1715 &mut self,
1716 language_server_id: usize,
1717 token: String,
1718 cx: &mut ModelContext<Self>,
1719 ) {
1720 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1721 status.pending_work.insert(
1722 token,
1723 LanguageServerProgress {
1724 message: None,
1725 percentage: None,
1726 last_update_at: Instant::now(),
1727 },
1728 );
1729 cx.notify();
1730 }
1731 }
1732
1733 fn on_lsp_work_progress(
1734 &mut self,
1735 language_server_id: usize,
1736 token: String,
1737 progress: LanguageServerProgress,
1738 cx: &mut ModelContext<Self>,
1739 ) {
1740 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1741 status.pending_work.insert(token, progress);
1742 cx.notify();
1743 }
1744 }
1745
1746 fn on_lsp_work_end(
1747 &mut self,
1748 language_server_id: usize,
1749 token: String,
1750 cx: &mut ModelContext<Self>,
1751 ) {
1752 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1753 status.pending_work.remove(&token);
1754 cx.notify();
1755 }
1756 }
1757
1758 async fn on_lsp_workspace_edit(
1759 this: WeakModelHandle<Self>,
1760 params: lsp::ApplyWorkspaceEditParams,
1761 server_id: usize,
1762 adapter: Arc<dyn LspAdapter>,
1763 language_server: Arc<LanguageServer>,
1764 mut cx: AsyncAppContext,
1765 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1766 let this = this
1767 .upgrade(&cx)
1768 .ok_or_else(|| anyhow!("project project closed"))?;
1769 let transaction = Self::deserialize_workspace_edit(
1770 this.clone(),
1771 params.edit,
1772 true,
1773 adapter.clone(),
1774 language_server.clone(),
1775 &mut cx,
1776 )
1777 .await
1778 .log_err();
1779 this.update(&mut cx, |this, _| {
1780 if let Some(transaction) = transaction {
1781 this.last_workspace_edits_by_language_server
1782 .insert(server_id, transaction);
1783 }
1784 });
1785 Ok(lsp::ApplyWorkspaceEditResponse {
1786 applied: true,
1787 failed_change: None,
1788 failure_reason: None,
1789 })
1790 }
1791
1792 fn broadcast_language_server_update(
1793 &self,
1794 language_server_id: usize,
1795 event: proto::update_language_server::Variant,
1796 ) {
1797 if let Some(project_id) = self.remote_id() {
1798 self.client
1799 .send(proto::UpdateLanguageServer {
1800 project_id,
1801 language_server_id: language_server_id as u64,
1802 variant: Some(event),
1803 })
1804 .log_err();
1805 }
1806 }
1807
1808 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1809 for (_, server) in self.language_servers.values() {
1810 server
1811 .notify::<lsp::notification::DidChangeConfiguration>(
1812 lsp::DidChangeConfigurationParams {
1813 settings: settings.clone(),
1814 },
1815 )
1816 .ok();
1817 }
1818 *self.language_server_settings.lock() = settings;
1819 }
1820
1821 pub fn language_server_statuses(
1822 &self,
1823 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1824 self.language_server_statuses.values()
1825 }
1826
1827 pub fn update_diagnostics(
1828 &mut self,
1829 params: lsp::PublishDiagnosticsParams,
1830 disk_based_sources: &[&str],
1831 cx: &mut ModelContext<Self>,
1832 ) -> Result<()> {
1833 let abs_path = params
1834 .uri
1835 .to_file_path()
1836 .map_err(|_| anyhow!("URI is not a file"))?;
1837 let mut next_group_id = 0;
1838 let mut diagnostics = Vec::default();
1839 let mut primary_diagnostic_group_ids = HashMap::default();
1840 let mut sources_by_group_id = HashMap::default();
1841 let mut supporting_diagnostics = HashMap::default();
1842 for diagnostic in ¶ms.diagnostics {
1843 let source = diagnostic.source.as_ref();
1844 let code = diagnostic.code.as_ref().map(|code| match code {
1845 lsp::NumberOrString::Number(code) => code.to_string(),
1846 lsp::NumberOrString::String(code) => code.clone(),
1847 });
1848 let range = range_from_lsp(diagnostic.range);
1849 let is_supporting = diagnostic
1850 .related_information
1851 .as_ref()
1852 .map_or(false, |infos| {
1853 infos.iter().any(|info| {
1854 primary_diagnostic_group_ids.contains_key(&(
1855 source,
1856 code.clone(),
1857 range_from_lsp(info.location.range),
1858 ))
1859 })
1860 });
1861
1862 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1863 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1864 });
1865
1866 if is_supporting {
1867 supporting_diagnostics.insert(
1868 (source, code.clone(), range),
1869 (diagnostic.severity, is_unnecessary),
1870 );
1871 } else {
1872 let group_id = post_inc(&mut next_group_id);
1873 let is_disk_based = source.map_or(false, |source| {
1874 disk_based_sources.contains(&source.as_str())
1875 });
1876
1877 sources_by_group_id.insert(group_id, source);
1878 primary_diagnostic_group_ids
1879 .insert((source, code.clone(), range.clone()), group_id);
1880
1881 diagnostics.push(DiagnosticEntry {
1882 range,
1883 diagnostic: Diagnostic {
1884 code: code.clone(),
1885 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1886 message: diagnostic.message.clone(),
1887 group_id,
1888 is_primary: true,
1889 is_valid: true,
1890 is_disk_based,
1891 is_unnecessary,
1892 },
1893 });
1894 if let Some(infos) = &diagnostic.related_information {
1895 for info in infos {
1896 if info.location.uri == params.uri && !info.message.is_empty() {
1897 let range = range_from_lsp(info.location.range);
1898 diagnostics.push(DiagnosticEntry {
1899 range,
1900 diagnostic: Diagnostic {
1901 code: code.clone(),
1902 severity: DiagnosticSeverity::INFORMATION,
1903 message: info.message.clone(),
1904 group_id,
1905 is_primary: false,
1906 is_valid: true,
1907 is_disk_based,
1908 is_unnecessary: false,
1909 },
1910 });
1911 }
1912 }
1913 }
1914 }
1915 }
1916
1917 for entry in &mut diagnostics {
1918 let diagnostic = &mut entry.diagnostic;
1919 if !diagnostic.is_primary {
1920 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1921 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1922 source,
1923 diagnostic.code.clone(),
1924 entry.range.clone(),
1925 )) {
1926 if let Some(severity) = severity {
1927 diagnostic.severity = severity;
1928 }
1929 diagnostic.is_unnecessary = is_unnecessary;
1930 }
1931 }
1932 }
1933
1934 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1935 Ok(())
1936 }
1937
1938 pub fn update_diagnostic_entries(
1939 &mut self,
1940 abs_path: PathBuf,
1941 version: Option<i32>,
1942 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1943 cx: &mut ModelContext<Project>,
1944 ) -> Result<(), anyhow::Error> {
1945 let (worktree, relative_path) = self
1946 .find_local_worktree(&abs_path, cx)
1947 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1948 if !worktree.read(cx).is_visible() {
1949 return Ok(());
1950 }
1951
1952 let project_path = ProjectPath {
1953 worktree_id: worktree.read(cx).id(),
1954 path: relative_path.into(),
1955 };
1956
1957 for buffer in self.opened_buffers.values() {
1958 if let Some(buffer) = buffer.upgrade(cx) {
1959 if buffer
1960 .read(cx)
1961 .file()
1962 .map_or(false, |file| *file.path() == project_path.path)
1963 {
1964 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1965 break;
1966 }
1967 }
1968 }
1969 worktree.update(cx, |worktree, cx| {
1970 worktree
1971 .as_local_mut()
1972 .ok_or_else(|| anyhow!("not a local worktree"))?
1973 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1974 })?;
1975 cx.emit(Event::DiagnosticsUpdated(project_path));
1976 Ok(())
1977 }
1978
1979 fn update_buffer_diagnostics(
1980 &mut self,
1981 buffer: &ModelHandle<Buffer>,
1982 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1983 version: Option<i32>,
1984 cx: &mut ModelContext<Self>,
1985 ) -> Result<()> {
1986 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1987 Ordering::Equal
1988 .then_with(|| b.is_primary.cmp(&a.is_primary))
1989 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1990 .then_with(|| a.severity.cmp(&b.severity))
1991 .then_with(|| a.message.cmp(&b.message))
1992 }
1993
1994 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1995
1996 diagnostics.sort_unstable_by(|a, b| {
1997 Ordering::Equal
1998 .then_with(|| a.range.start.cmp(&b.range.start))
1999 .then_with(|| b.range.end.cmp(&a.range.end))
2000 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2001 });
2002
2003 let mut sanitized_diagnostics = Vec::new();
2004 let edits_since_save = Patch::new(
2005 snapshot
2006 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2007 .collect(),
2008 );
2009 for entry in diagnostics {
2010 let start;
2011 let end;
2012 if entry.diagnostic.is_disk_based {
2013 // Some diagnostics are based on files on disk instead of buffers'
2014 // current contents. Adjust these diagnostics' ranges to reflect
2015 // any unsaved edits.
2016 start = edits_since_save.old_to_new(entry.range.start);
2017 end = edits_since_save.old_to_new(entry.range.end);
2018 } else {
2019 start = entry.range.start;
2020 end = entry.range.end;
2021 }
2022
2023 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2024 ..snapshot.clip_point_utf16(end, Bias::Right);
2025
2026 // Expand empty ranges by one character
2027 if range.start == range.end {
2028 range.end.column += 1;
2029 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2030 if range.start == range.end && range.end.column > 0 {
2031 range.start.column -= 1;
2032 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2033 }
2034 }
2035
2036 sanitized_diagnostics.push(DiagnosticEntry {
2037 range,
2038 diagnostic: entry.diagnostic,
2039 });
2040 }
2041 drop(edits_since_save);
2042
2043 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2044 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2045 Ok(())
2046 }
2047
2048 pub fn reload_buffers(
2049 &self,
2050 buffers: HashSet<ModelHandle<Buffer>>,
2051 push_to_history: bool,
2052 cx: &mut ModelContext<Self>,
2053 ) -> Task<Result<ProjectTransaction>> {
2054 let mut local_buffers = Vec::new();
2055 let mut remote_buffers = None;
2056 for buffer_handle in buffers {
2057 let buffer = buffer_handle.read(cx);
2058 if buffer.is_dirty() {
2059 if let Some(file) = File::from_dyn(buffer.file()) {
2060 if file.is_local() {
2061 local_buffers.push(buffer_handle);
2062 } else {
2063 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2064 }
2065 }
2066 }
2067 }
2068
2069 let remote_buffers = self.remote_id().zip(remote_buffers);
2070 let client = self.client.clone();
2071
2072 cx.spawn(|this, mut cx| async move {
2073 let mut project_transaction = ProjectTransaction::default();
2074
2075 if let Some((project_id, remote_buffers)) = remote_buffers {
2076 let response = client
2077 .request(proto::ReloadBuffers {
2078 project_id,
2079 buffer_ids: remote_buffers
2080 .iter()
2081 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2082 .collect(),
2083 })
2084 .await?
2085 .transaction
2086 .ok_or_else(|| anyhow!("missing transaction"))?;
2087 project_transaction = this
2088 .update(&mut cx, |this, cx| {
2089 this.deserialize_project_transaction(response, push_to_history, cx)
2090 })
2091 .await?;
2092 }
2093
2094 for buffer in local_buffers {
2095 let transaction = buffer
2096 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2097 .await?;
2098 buffer.update(&mut cx, |buffer, cx| {
2099 if let Some(transaction) = transaction {
2100 if !push_to_history {
2101 buffer.forget_transaction(transaction.id);
2102 }
2103 project_transaction.0.insert(cx.handle(), transaction);
2104 }
2105 });
2106 }
2107
2108 Ok(project_transaction)
2109 })
2110 }
2111
2112 pub fn format(
2113 &self,
2114 buffers: HashSet<ModelHandle<Buffer>>,
2115 push_to_history: bool,
2116 cx: &mut ModelContext<Project>,
2117 ) -> Task<Result<ProjectTransaction>> {
2118 let mut local_buffers = Vec::new();
2119 let mut remote_buffers = None;
2120 for buffer_handle in buffers {
2121 let buffer = buffer_handle.read(cx);
2122 if let Some(file) = File::from_dyn(buffer.file()) {
2123 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2124 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2125 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2126 }
2127 } else {
2128 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2129 }
2130 } else {
2131 return Task::ready(Ok(Default::default()));
2132 }
2133 }
2134
2135 let remote_buffers = self.remote_id().zip(remote_buffers);
2136 let client = self.client.clone();
2137
2138 cx.spawn(|this, mut cx| async move {
2139 let mut project_transaction = ProjectTransaction::default();
2140
2141 if let Some((project_id, remote_buffers)) = remote_buffers {
2142 let response = client
2143 .request(proto::FormatBuffers {
2144 project_id,
2145 buffer_ids: remote_buffers
2146 .iter()
2147 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2148 .collect(),
2149 })
2150 .await?
2151 .transaction
2152 .ok_or_else(|| anyhow!("missing transaction"))?;
2153 project_transaction = this
2154 .update(&mut cx, |this, cx| {
2155 this.deserialize_project_transaction(response, push_to_history, cx)
2156 })
2157 .await?;
2158 }
2159
2160 for (buffer, buffer_abs_path, language_server) in local_buffers {
2161 let text_document = lsp::TextDocumentIdentifier::new(
2162 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2163 );
2164 let capabilities = &language_server.capabilities();
2165 let tab_size = cx.update(|cx| {
2166 let language_name = buffer.read(cx).language().map(|language| language.name());
2167 cx.global::<Settings>().tab_size(language_name.as_deref())
2168 });
2169 let lsp_edits = if capabilities
2170 .document_formatting_provider
2171 .as_ref()
2172 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2173 {
2174 language_server
2175 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2176 text_document,
2177 options: lsp::FormattingOptions {
2178 tab_size,
2179 insert_spaces: true,
2180 insert_final_newline: Some(true),
2181 ..Default::default()
2182 },
2183 work_done_progress_params: Default::default(),
2184 })
2185 .await?
2186 } else if capabilities
2187 .document_range_formatting_provider
2188 .as_ref()
2189 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2190 {
2191 let buffer_start = lsp::Position::new(0, 0);
2192 let buffer_end =
2193 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2194 language_server
2195 .request::<lsp::request::RangeFormatting>(
2196 lsp::DocumentRangeFormattingParams {
2197 text_document,
2198 range: lsp::Range::new(buffer_start, buffer_end),
2199 options: lsp::FormattingOptions {
2200 tab_size: 4,
2201 insert_spaces: true,
2202 insert_final_newline: Some(true),
2203 ..Default::default()
2204 },
2205 work_done_progress_params: Default::default(),
2206 },
2207 )
2208 .await?
2209 } else {
2210 continue;
2211 };
2212
2213 if let Some(lsp_edits) = lsp_edits {
2214 let edits = this
2215 .update(&mut cx, |this, cx| {
2216 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2217 })
2218 .await?;
2219 buffer.update(&mut cx, |buffer, cx| {
2220 buffer.finalize_last_transaction();
2221 buffer.start_transaction();
2222 for (range, text) in edits {
2223 buffer.edit([range], text, cx);
2224 }
2225 if buffer.end_transaction(cx).is_some() {
2226 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2227 if !push_to_history {
2228 buffer.forget_transaction(transaction.id);
2229 }
2230 project_transaction.0.insert(cx.handle(), transaction);
2231 }
2232 });
2233 }
2234 }
2235
2236 Ok(project_transaction)
2237 })
2238 }
2239
2240 pub fn definition<T: ToPointUtf16>(
2241 &self,
2242 buffer: &ModelHandle<Buffer>,
2243 position: T,
2244 cx: &mut ModelContext<Self>,
2245 ) -> Task<Result<Vec<Location>>> {
2246 let position = position.to_point_utf16(buffer.read(cx));
2247 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2248 }
2249
2250 pub fn references<T: ToPointUtf16>(
2251 &self,
2252 buffer: &ModelHandle<Buffer>,
2253 position: T,
2254 cx: &mut ModelContext<Self>,
2255 ) -> Task<Result<Vec<Location>>> {
2256 let position = position.to_point_utf16(buffer.read(cx));
2257 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2258 }
2259
2260 pub fn document_highlights<T: ToPointUtf16>(
2261 &self,
2262 buffer: &ModelHandle<Buffer>,
2263 position: T,
2264 cx: &mut ModelContext<Self>,
2265 ) -> Task<Result<Vec<DocumentHighlight>>> {
2266 let position = position.to_point_utf16(buffer.read(cx));
2267
2268 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2269 }
2270
2271 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2272 if self.is_local() {
2273 let mut requests = Vec::new();
2274 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2275 let worktree_id = *worktree_id;
2276 if let Some(worktree) = self
2277 .worktree_for_id(worktree_id, cx)
2278 .and_then(|worktree| worktree.read(cx).as_local())
2279 {
2280 let lsp_adapter = lsp_adapter.clone();
2281 let worktree_abs_path = worktree.abs_path().clone();
2282 requests.push(
2283 language_server
2284 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2285 query: query.to_string(),
2286 ..Default::default()
2287 })
2288 .log_err()
2289 .map(move |response| {
2290 (
2291 lsp_adapter,
2292 worktree_id,
2293 worktree_abs_path,
2294 response.unwrap_or_default(),
2295 )
2296 }),
2297 );
2298 }
2299 }
2300
2301 cx.spawn_weak(|this, cx| async move {
2302 let responses = futures::future::join_all(requests).await;
2303 let this = if let Some(this) = this.upgrade(&cx) {
2304 this
2305 } else {
2306 return Ok(Default::default());
2307 };
2308 this.read_with(&cx, |this, cx| {
2309 let mut symbols = Vec::new();
2310 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2311 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2312 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2313 let mut worktree_id = source_worktree_id;
2314 let path;
2315 if let Some((worktree, rel_path)) =
2316 this.find_local_worktree(&abs_path, cx)
2317 {
2318 worktree_id = worktree.read(cx).id();
2319 path = rel_path;
2320 } else {
2321 path = relativize_path(&worktree_abs_path, &abs_path);
2322 }
2323
2324 let label = this
2325 .languages
2326 .select_language(&path)
2327 .and_then(|language| {
2328 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2329 })
2330 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2331 let signature = this.symbol_signature(worktree_id, &path);
2332
2333 Some(Symbol {
2334 source_worktree_id,
2335 worktree_id,
2336 language_server_name: adapter.name(),
2337 name: lsp_symbol.name,
2338 kind: lsp_symbol.kind,
2339 label,
2340 path,
2341 range: range_from_lsp(lsp_symbol.location.range),
2342 signature,
2343 })
2344 }));
2345 }
2346 Ok(symbols)
2347 })
2348 })
2349 } else if let Some(project_id) = self.remote_id() {
2350 let request = self.client.request(proto::GetProjectSymbols {
2351 project_id,
2352 query: query.to_string(),
2353 });
2354 cx.spawn_weak(|this, cx| async move {
2355 let response = request.await?;
2356 let mut symbols = Vec::new();
2357 if let Some(this) = this.upgrade(&cx) {
2358 this.read_with(&cx, |this, _| {
2359 symbols.extend(
2360 response
2361 .symbols
2362 .into_iter()
2363 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2364 );
2365 })
2366 }
2367 Ok(symbols)
2368 })
2369 } else {
2370 Task::ready(Ok(Default::default()))
2371 }
2372 }
2373
2374 pub fn open_buffer_for_symbol(
2375 &mut self,
2376 symbol: &Symbol,
2377 cx: &mut ModelContext<Self>,
2378 ) -> Task<Result<ModelHandle<Buffer>>> {
2379 if self.is_local() {
2380 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2381 symbol.source_worktree_id,
2382 symbol.language_server_name.clone(),
2383 )) {
2384 server.clone()
2385 } else {
2386 return Task::ready(Err(anyhow!(
2387 "language server for worktree and language not found"
2388 )));
2389 };
2390
2391 let worktree_abs_path = if let Some(worktree_abs_path) = self
2392 .worktree_for_id(symbol.worktree_id, cx)
2393 .and_then(|worktree| worktree.read(cx).as_local())
2394 .map(|local_worktree| local_worktree.abs_path())
2395 {
2396 worktree_abs_path
2397 } else {
2398 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2399 };
2400 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2401 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2402 uri
2403 } else {
2404 return Task::ready(Err(anyhow!("invalid symbol path")));
2405 };
2406
2407 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2408 } else if let Some(project_id) = self.remote_id() {
2409 let request = self.client.request(proto::OpenBufferForSymbol {
2410 project_id,
2411 symbol: Some(serialize_symbol(symbol)),
2412 });
2413 cx.spawn(|this, mut cx| async move {
2414 let response = request.await?;
2415 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2416 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2417 .await
2418 })
2419 } else {
2420 Task::ready(Err(anyhow!("project does not have a remote id")))
2421 }
2422 }
2423
2424 pub fn completions<T: ToPointUtf16>(
2425 &self,
2426 source_buffer_handle: &ModelHandle<Buffer>,
2427 position: T,
2428 cx: &mut ModelContext<Self>,
2429 ) -> Task<Result<Vec<Completion>>> {
2430 let source_buffer_handle = source_buffer_handle.clone();
2431 let source_buffer = source_buffer_handle.read(cx);
2432 let buffer_id = source_buffer.remote_id();
2433 let language = source_buffer.language().cloned();
2434 let worktree;
2435 let buffer_abs_path;
2436 if let Some(file) = File::from_dyn(source_buffer.file()) {
2437 worktree = file.worktree.clone();
2438 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2439 } else {
2440 return Task::ready(Ok(Default::default()));
2441 };
2442
2443 let position = position.to_point_utf16(source_buffer);
2444 let anchor = source_buffer.anchor_after(position);
2445
2446 if worktree.read(cx).as_local().is_some() {
2447 let buffer_abs_path = buffer_abs_path.unwrap();
2448 let (_, lang_server) =
2449 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2450 server.clone()
2451 } else {
2452 return Task::ready(Ok(Default::default()));
2453 };
2454
2455 cx.spawn(|_, cx| async move {
2456 let completions = lang_server
2457 .request::<lsp::request::Completion>(lsp::CompletionParams {
2458 text_document_position: lsp::TextDocumentPositionParams::new(
2459 lsp::TextDocumentIdentifier::new(
2460 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2461 ),
2462 point_to_lsp(position),
2463 ),
2464 context: Default::default(),
2465 work_done_progress_params: Default::default(),
2466 partial_result_params: Default::default(),
2467 })
2468 .await
2469 .context("lsp completion request failed")?;
2470
2471 let completions = if let Some(completions) = completions {
2472 match completions {
2473 lsp::CompletionResponse::Array(completions) => completions,
2474 lsp::CompletionResponse::List(list) => list.items,
2475 }
2476 } else {
2477 Default::default()
2478 };
2479
2480 source_buffer_handle.read_with(&cx, |this, _| {
2481 Ok(completions
2482 .into_iter()
2483 .filter_map(|lsp_completion| {
2484 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2485 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2486 (range_from_lsp(edit.range), edit.new_text.clone())
2487 }
2488 None => {
2489 let clipped_position =
2490 this.clip_point_utf16(position, Bias::Left);
2491 if position != clipped_position {
2492 log::info!("completion out of expected range");
2493 return None;
2494 }
2495 (
2496 this.common_prefix_at(
2497 clipped_position,
2498 &lsp_completion.label,
2499 ),
2500 lsp_completion.label.clone(),
2501 )
2502 }
2503 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2504 log::info!("unsupported insert/replace completion");
2505 return None;
2506 }
2507 };
2508
2509 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2510 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2511 if clipped_start == old_range.start && clipped_end == old_range.end {
2512 Some(Completion {
2513 old_range: this.anchor_before(old_range.start)
2514 ..this.anchor_after(old_range.end),
2515 new_text,
2516 label: language
2517 .as_ref()
2518 .and_then(|l| l.label_for_completion(&lsp_completion))
2519 .unwrap_or_else(|| {
2520 CodeLabel::plain(
2521 lsp_completion.label.clone(),
2522 lsp_completion.filter_text.as_deref(),
2523 )
2524 }),
2525 lsp_completion,
2526 })
2527 } else {
2528 log::info!("completion out of expected range");
2529 None
2530 }
2531 })
2532 .collect())
2533 })
2534 })
2535 } else if let Some(project_id) = self.remote_id() {
2536 let rpc = self.client.clone();
2537 let message = proto::GetCompletions {
2538 project_id,
2539 buffer_id,
2540 position: Some(language::proto::serialize_anchor(&anchor)),
2541 version: serialize_version(&source_buffer.version()),
2542 };
2543 cx.spawn_weak(|_, mut cx| async move {
2544 let response = rpc.request(message).await?;
2545
2546 source_buffer_handle
2547 .update(&mut cx, |buffer, _| {
2548 buffer.wait_for_version(deserialize_version(response.version))
2549 })
2550 .await;
2551
2552 response
2553 .completions
2554 .into_iter()
2555 .map(|completion| {
2556 language::proto::deserialize_completion(completion, language.as_ref())
2557 })
2558 .collect()
2559 })
2560 } else {
2561 Task::ready(Ok(Default::default()))
2562 }
2563 }
2564
2565 pub fn apply_additional_edits_for_completion(
2566 &self,
2567 buffer_handle: ModelHandle<Buffer>,
2568 completion: Completion,
2569 push_to_history: bool,
2570 cx: &mut ModelContext<Self>,
2571 ) -> Task<Result<Option<Transaction>>> {
2572 let buffer = buffer_handle.read(cx);
2573 let buffer_id = buffer.remote_id();
2574
2575 if self.is_local() {
2576 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2577 {
2578 server.clone()
2579 } else {
2580 return Task::ready(Ok(Default::default()));
2581 };
2582
2583 cx.spawn(|this, mut cx| async move {
2584 let resolved_completion = lang_server
2585 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2586 .await?;
2587 if let Some(edits) = resolved_completion.additional_text_edits {
2588 let edits = this
2589 .update(&mut cx, |this, cx| {
2590 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2591 })
2592 .await?;
2593 buffer_handle.update(&mut cx, |buffer, cx| {
2594 buffer.finalize_last_transaction();
2595 buffer.start_transaction();
2596 for (range, text) in edits {
2597 buffer.edit([range], text, cx);
2598 }
2599 let transaction = if buffer.end_transaction(cx).is_some() {
2600 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2601 if !push_to_history {
2602 buffer.forget_transaction(transaction.id);
2603 }
2604 Some(transaction)
2605 } else {
2606 None
2607 };
2608 Ok(transaction)
2609 })
2610 } else {
2611 Ok(None)
2612 }
2613 })
2614 } else if let Some(project_id) = self.remote_id() {
2615 let client = self.client.clone();
2616 cx.spawn(|_, mut cx| async move {
2617 let response = client
2618 .request(proto::ApplyCompletionAdditionalEdits {
2619 project_id,
2620 buffer_id,
2621 completion: Some(language::proto::serialize_completion(&completion)),
2622 })
2623 .await?;
2624
2625 if let Some(transaction) = response.transaction {
2626 let transaction = language::proto::deserialize_transaction(transaction)?;
2627 buffer_handle
2628 .update(&mut cx, |buffer, _| {
2629 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2630 })
2631 .await;
2632 if push_to_history {
2633 buffer_handle.update(&mut cx, |buffer, _| {
2634 buffer.push_transaction(transaction.clone(), Instant::now());
2635 });
2636 }
2637 Ok(Some(transaction))
2638 } else {
2639 Ok(None)
2640 }
2641 })
2642 } else {
2643 Task::ready(Err(anyhow!("project does not have a remote id")))
2644 }
2645 }
2646
2647 pub fn code_actions<T: Clone + ToOffset>(
2648 &self,
2649 buffer_handle: &ModelHandle<Buffer>,
2650 range: Range<T>,
2651 cx: &mut ModelContext<Self>,
2652 ) -> Task<Result<Vec<CodeAction>>> {
2653 let buffer_handle = buffer_handle.clone();
2654 let buffer = buffer_handle.read(cx);
2655 let snapshot = buffer.snapshot();
2656 let relevant_diagnostics = snapshot
2657 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2658 .map(|entry| entry.to_lsp_diagnostic_stub())
2659 .collect();
2660 let buffer_id = buffer.remote_id();
2661 let worktree;
2662 let buffer_abs_path;
2663 if let Some(file) = File::from_dyn(buffer.file()) {
2664 worktree = file.worktree.clone();
2665 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2666 } else {
2667 return Task::ready(Ok(Default::default()));
2668 };
2669 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2670
2671 if worktree.read(cx).as_local().is_some() {
2672 let buffer_abs_path = buffer_abs_path.unwrap();
2673 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2674 {
2675 server.clone()
2676 } else {
2677 return Task::ready(Ok(Default::default()));
2678 };
2679
2680 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2681 cx.foreground().spawn(async move {
2682 if !lang_server.capabilities().code_action_provider.is_some() {
2683 return Ok(Default::default());
2684 }
2685
2686 Ok(lang_server
2687 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2688 text_document: lsp::TextDocumentIdentifier::new(
2689 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2690 ),
2691 range: lsp_range,
2692 work_done_progress_params: Default::default(),
2693 partial_result_params: Default::default(),
2694 context: lsp::CodeActionContext {
2695 diagnostics: relevant_diagnostics,
2696 only: Some(vec![
2697 lsp::CodeActionKind::QUICKFIX,
2698 lsp::CodeActionKind::REFACTOR,
2699 lsp::CodeActionKind::REFACTOR_EXTRACT,
2700 lsp::CodeActionKind::SOURCE,
2701 ]),
2702 },
2703 })
2704 .await?
2705 .unwrap_or_default()
2706 .into_iter()
2707 .filter_map(|entry| {
2708 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2709 Some(CodeAction {
2710 range: range.clone(),
2711 lsp_action,
2712 })
2713 } else {
2714 None
2715 }
2716 })
2717 .collect())
2718 })
2719 } else if let Some(project_id) = self.remote_id() {
2720 let rpc = self.client.clone();
2721 let version = buffer.version();
2722 cx.spawn_weak(|_, mut cx| async move {
2723 let response = rpc
2724 .request(proto::GetCodeActions {
2725 project_id,
2726 buffer_id,
2727 start: Some(language::proto::serialize_anchor(&range.start)),
2728 end: Some(language::proto::serialize_anchor(&range.end)),
2729 version: serialize_version(&version),
2730 })
2731 .await?;
2732
2733 buffer_handle
2734 .update(&mut cx, |buffer, _| {
2735 buffer.wait_for_version(deserialize_version(response.version))
2736 })
2737 .await;
2738
2739 response
2740 .actions
2741 .into_iter()
2742 .map(language::proto::deserialize_code_action)
2743 .collect()
2744 })
2745 } else {
2746 Task::ready(Ok(Default::default()))
2747 }
2748 }
2749
2750 pub fn apply_code_action(
2751 &self,
2752 buffer_handle: ModelHandle<Buffer>,
2753 mut action: CodeAction,
2754 push_to_history: bool,
2755 cx: &mut ModelContext<Self>,
2756 ) -> Task<Result<ProjectTransaction>> {
2757 if self.is_local() {
2758 let buffer = buffer_handle.read(cx);
2759 let (lsp_adapter, lang_server) =
2760 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2761 server.clone()
2762 } else {
2763 return Task::ready(Ok(Default::default()));
2764 };
2765 let range = action.range.to_point_utf16(buffer);
2766
2767 cx.spawn(|this, mut cx| async move {
2768 if let Some(lsp_range) = action
2769 .lsp_action
2770 .data
2771 .as_mut()
2772 .and_then(|d| d.get_mut("codeActionParams"))
2773 .and_then(|d| d.get_mut("range"))
2774 {
2775 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2776 action.lsp_action = lang_server
2777 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2778 .await?;
2779 } else {
2780 let actions = this
2781 .update(&mut cx, |this, cx| {
2782 this.code_actions(&buffer_handle, action.range, cx)
2783 })
2784 .await?;
2785 action.lsp_action = actions
2786 .into_iter()
2787 .find(|a| a.lsp_action.title == action.lsp_action.title)
2788 .ok_or_else(|| anyhow!("code action is outdated"))?
2789 .lsp_action;
2790 }
2791
2792 if let Some(edit) = action.lsp_action.edit {
2793 Self::deserialize_workspace_edit(
2794 this,
2795 edit,
2796 push_to_history,
2797 lsp_adapter,
2798 lang_server,
2799 &mut cx,
2800 )
2801 .await
2802 } else if let Some(command) = action.lsp_action.command {
2803 this.update(&mut cx, |this, _| {
2804 this.last_workspace_edits_by_language_server
2805 .remove(&lang_server.server_id());
2806 });
2807 lang_server
2808 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2809 command: command.command,
2810 arguments: command.arguments.unwrap_or_default(),
2811 ..Default::default()
2812 })
2813 .await?;
2814 Ok(this.update(&mut cx, |this, _| {
2815 this.last_workspace_edits_by_language_server
2816 .remove(&lang_server.server_id())
2817 .unwrap_or_default()
2818 }))
2819 } else {
2820 Ok(ProjectTransaction::default())
2821 }
2822 })
2823 } else if let Some(project_id) = self.remote_id() {
2824 let client = self.client.clone();
2825 let request = proto::ApplyCodeAction {
2826 project_id,
2827 buffer_id: buffer_handle.read(cx).remote_id(),
2828 action: Some(language::proto::serialize_code_action(&action)),
2829 };
2830 cx.spawn(|this, mut cx| async move {
2831 let response = client
2832 .request(request)
2833 .await?
2834 .transaction
2835 .ok_or_else(|| anyhow!("missing transaction"))?;
2836 this.update(&mut cx, |this, cx| {
2837 this.deserialize_project_transaction(response, push_to_history, cx)
2838 })
2839 .await
2840 })
2841 } else {
2842 Task::ready(Err(anyhow!("project does not have a remote id")))
2843 }
2844 }
2845
2846 async fn deserialize_workspace_edit(
2847 this: ModelHandle<Self>,
2848 edit: lsp::WorkspaceEdit,
2849 push_to_history: bool,
2850 lsp_adapter: Arc<dyn LspAdapter>,
2851 language_server: Arc<LanguageServer>,
2852 cx: &mut AsyncAppContext,
2853 ) -> Result<ProjectTransaction> {
2854 let fs = this.read_with(cx, |this, _| this.fs.clone());
2855 let mut operations = Vec::new();
2856 if let Some(document_changes) = edit.document_changes {
2857 match document_changes {
2858 lsp::DocumentChanges::Edits(edits) => {
2859 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2860 }
2861 lsp::DocumentChanges::Operations(ops) => operations = ops,
2862 }
2863 } else if let Some(changes) = edit.changes {
2864 operations.extend(changes.into_iter().map(|(uri, edits)| {
2865 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2866 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2867 uri,
2868 version: None,
2869 },
2870 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2871 })
2872 }));
2873 }
2874
2875 let mut project_transaction = ProjectTransaction::default();
2876 for operation in operations {
2877 match operation {
2878 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2879 let abs_path = op
2880 .uri
2881 .to_file_path()
2882 .map_err(|_| anyhow!("can't convert URI to path"))?;
2883
2884 if let Some(parent_path) = abs_path.parent() {
2885 fs.create_dir(parent_path).await?;
2886 }
2887 if abs_path.ends_with("/") {
2888 fs.create_dir(&abs_path).await?;
2889 } else {
2890 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2891 .await?;
2892 }
2893 }
2894 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2895 let source_abs_path = op
2896 .old_uri
2897 .to_file_path()
2898 .map_err(|_| anyhow!("can't convert URI to path"))?;
2899 let target_abs_path = op
2900 .new_uri
2901 .to_file_path()
2902 .map_err(|_| anyhow!("can't convert URI to path"))?;
2903 fs.rename(
2904 &source_abs_path,
2905 &target_abs_path,
2906 op.options.map(Into::into).unwrap_or_default(),
2907 )
2908 .await?;
2909 }
2910 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2911 let abs_path = op
2912 .uri
2913 .to_file_path()
2914 .map_err(|_| anyhow!("can't convert URI to path"))?;
2915 let options = op.options.map(Into::into).unwrap_or_default();
2916 if abs_path.ends_with("/") {
2917 fs.remove_dir(&abs_path, options).await?;
2918 } else {
2919 fs.remove_file(&abs_path, options).await?;
2920 }
2921 }
2922 lsp::DocumentChangeOperation::Edit(op) => {
2923 let buffer_to_edit = this
2924 .update(cx, |this, cx| {
2925 this.open_local_buffer_via_lsp(
2926 op.text_document.uri,
2927 lsp_adapter.clone(),
2928 language_server.clone(),
2929 cx,
2930 )
2931 })
2932 .await?;
2933
2934 let edits = this
2935 .update(cx, |this, cx| {
2936 let edits = op.edits.into_iter().map(|edit| match edit {
2937 lsp::OneOf::Left(edit) => edit,
2938 lsp::OneOf::Right(edit) => edit.text_edit,
2939 });
2940 this.edits_from_lsp(
2941 &buffer_to_edit,
2942 edits,
2943 op.text_document.version,
2944 cx,
2945 )
2946 })
2947 .await?;
2948
2949 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2950 buffer.finalize_last_transaction();
2951 buffer.start_transaction();
2952 for (range, text) in edits {
2953 buffer.edit([range], text, cx);
2954 }
2955 let transaction = if buffer.end_transaction(cx).is_some() {
2956 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2957 if !push_to_history {
2958 buffer.forget_transaction(transaction.id);
2959 }
2960 Some(transaction)
2961 } else {
2962 None
2963 };
2964
2965 transaction
2966 });
2967 if let Some(transaction) = transaction {
2968 project_transaction.0.insert(buffer_to_edit, transaction);
2969 }
2970 }
2971 }
2972 }
2973
2974 Ok(project_transaction)
2975 }
2976
2977 pub fn prepare_rename<T: ToPointUtf16>(
2978 &self,
2979 buffer: ModelHandle<Buffer>,
2980 position: T,
2981 cx: &mut ModelContext<Self>,
2982 ) -> Task<Result<Option<Range<Anchor>>>> {
2983 let position = position.to_point_utf16(buffer.read(cx));
2984 self.request_lsp(buffer, PrepareRename { position }, cx)
2985 }
2986
2987 pub fn perform_rename<T: ToPointUtf16>(
2988 &self,
2989 buffer: ModelHandle<Buffer>,
2990 position: T,
2991 new_name: String,
2992 push_to_history: bool,
2993 cx: &mut ModelContext<Self>,
2994 ) -> Task<Result<ProjectTransaction>> {
2995 let position = position.to_point_utf16(buffer.read(cx));
2996 self.request_lsp(
2997 buffer,
2998 PerformRename {
2999 position,
3000 new_name,
3001 push_to_history,
3002 },
3003 cx,
3004 )
3005 }
3006
3007 pub fn search(
3008 &self,
3009 query: SearchQuery,
3010 cx: &mut ModelContext<Self>,
3011 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3012 if self.is_local() {
3013 let snapshots = self
3014 .visible_worktrees(cx)
3015 .filter_map(|tree| {
3016 let tree = tree.read(cx).as_local()?;
3017 Some(tree.snapshot())
3018 })
3019 .collect::<Vec<_>>();
3020
3021 let background = cx.background().clone();
3022 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3023 if path_count == 0 {
3024 return Task::ready(Ok(Default::default()));
3025 }
3026 let workers = background.num_cpus().min(path_count);
3027 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3028 cx.background()
3029 .spawn({
3030 let fs = self.fs.clone();
3031 let background = cx.background().clone();
3032 let query = query.clone();
3033 async move {
3034 let fs = &fs;
3035 let query = &query;
3036 let matching_paths_tx = &matching_paths_tx;
3037 let paths_per_worker = (path_count + workers - 1) / workers;
3038 let snapshots = &snapshots;
3039 background
3040 .scoped(|scope| {
3041 for worker_ix in 0..workers {
3042 let worker_start_ix = worker_ix * paths_per_worker;
3043 let worker_end_ix = worker_start_ix + paths_per_worker;
3044 scope.spawn(async move {
3045 let mut snapshot_start_ix = 0;
3046 let mut abs_path = PathBuf::new();
3047 for snapshot in snapshots {
3048 let snapshot_end_ix =
3049 snapshot_start_ix + snapshot.visible_file_count();
3050 if worker_end_ix <= snapshot_start_ix {
3051 break;
3052 } else if worker_start_ix > snapshot_end_ix {
3053 snapshot_start_ix = snapshot_end_ix;
3054 continue;
3055 } else {
3056 let start_in_snapshot = worker_start_ix
3057 .saturating_sub(snapshot_start_ix);
3058 let end_in_snapshot =
3059 cmp::min(worker_end_ix, snapshot_end_ix)
3060 - snapshot_start_ix;
3061
3062 for entry in snapshot
3063 .files(false, start_in_snapshot)
3064 .take(end_in_snapshot - start_in_snapshot)
3065 {
3066 if matching_paths_tx.is_closed() {
3067 break;
3068 }
3069
3070 abs_path.clear();
3071 abs_path.push(&snapshot.abs_path());
3072 abs_path.push(&entry.path);
3073 let matches = if let Some(file) =
3074 fs.open_sync(&abs_path).await.log_err()
3075 {
3076 query.detect(file).unwrap_or(false)
3077 } else {
3078 false
3079 };
3080
3081 if matches {
3082 let project_path =
3083 (snapshot.id(), entry.path.clone());
3084 if matching_paths_tx
3085 .send(project_path)
3086 .await
3087 .is_err()
3088 {
3089 break;
3090 }
3091 }
3092 }
3093
3094 snapshot_start_ix = snapshot_end_ix;
3095 }
3096 }
3097 });
3098 }
3099 })
3100 .await;
3101 }
3102 })
3103 .detach();
3104
3105 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3106 let open_buffers = self
3107 .opened_buffers
3108 .values()
3109 .filter_map(|b| b.upgrade(cx))
3110 .collect::<HashSet<_>>();
3111 cx.spawn(|this, cx| async move {
3112 for buffer in &open_buffers {
3113 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3114 buffers_tx.send((buffer.clone(), snapshot)).await?;
3115 }
3116
3117 let open_buffers = Rc::new(RefCell::new(open_buffers));
3118 while let Some(project_path) = matching_paths_rx.next().await {
3119 if buffers_tx.is_closed() {
3120 break;
3121 }
3122
3123 let this = this.clone();
3124 let open_buffers = open_buffers.clone();
3125 let buffers_tx = buffers_tx.clone();
3126 cx.spawn(|mut cx| async move {
3127 if let Some(buffer) = this
3128 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3129 .await
3130 .log_err()
3131 {
3132 if open_buffers.borrow_mut().insert(buffer.clone()) {
3133 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3134 buffers_tx.send((buffer, snapshot)).await?;
3135 }
3136 }
3137
3138 Ok::<_, anyhow::Error>(())
3139 })
3140 .detach();
3141 }
3142
3143 Ok::<_, anyhow::Error>(())
3144 })
3145 .detach_and_log_err(cx);
3146
3147 let background = cx.background().clone();
3148 cx.background().spawn(async move {
3149 let query = &query;
3150 let mut matched_buffers = Vec::new();
3151 for _ in 0..workers {
3152 matched_buffers.push(HashMap::default());
3153 }
3154 background
3155 .scoped(|scope| {
3156 for worker_matched_buffers in matched_buffers.iter_mut() {
3157 let mut buffers_rx = buffers_rx.clone();
3158 scope.spawn(async move {
3159 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3160 let buffer_matches = query
3161 .search(snapshot.as_rope())
3162 .await
3163 .iter()
3164 .map(|range| {
3165 snapshot.anchor_before(range.start)
3166 ..snapshot.anchor_after(range.end)
3167 })
3168 .collect::<Vec<_>>();
3169 if !buffer_matches.is_empty() {
3170 worker_matched_buffers
3171 .insert(buffer.clone(), buffer_matches);
3172 }
3173 }
3174 });
3175 }
3176 })
3177 .await;
3178 Ok(matched_buffers.into_iter().flatten().collect())
3179 })
3180 } else if let Some(project_id) = self.remote_id() {
3181 let request = self.client.request(query.to_proto(project_id));
3182 cx.spawn(|this, mut cx| async move {
3183 let response = request.await?;
3184 let mut result = HashMap::default();
3185 for location in response.locations {
3186 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3187 let target_buffer = this
3188 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3189 .await?;
3190 let start = location
3191 .start
3192 .and_then(deserialize_anchor)
3193 .ok_or_else(|| anyhow!("missing target start"))?;
3194 let end = location
3195 .end
3196 .and_then(deserialize_anchor)
3197 .ok_or_else(|| anyhow!("missing target end"))?;
3198 result
3199 .entry(target_buffer)
3200 .or_insert(Vec::new())
3201 .push(start..end)
3202 }
3203 Ok(result)
3204 })
3205 } else {
3206 Task::ready(Ok(Default::default()))
3207 }
3208 }
3209
3210 fn request_lsp<R: LspCommand>(
3211 &self,
3212 buffer_handle: ModelHandle<Buffer>,
3213 request: R,
3214 cx: &mut ModelContext<Self>,
3215 ) -> Task<Result<R::Response>>
3216 where
3217 <R::LspRequest as lsp::request::Request>::Result: Send,
3218 {
3219 let buffer = buffer_handle.read(cx);
3220 if self.is_local() {
3221 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3222 if let Some((file, (_, language_server))) =
3223 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3224 {
3225 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3226 return cx.spawn(|this, cx| async move {
3227 if !request.check_capabilities(&language_server.capabilities()) {
3228 return Ok(Default::default());
3229 }
3230
3231 let response = language_server
3232 .request::<R::LspRequest>(lsp_params)
3233 .await
3234 .context("lsp request failed")?;
3235 request
3236 .response_from_lsp(response, this, buffer_handle, cx)
3237 .await
3238 });
3239 }
3240 } else if let Some(project_id) = self.remote_id() {
3241 let rpc = self.client.clone();
3242 let message = request.to_proto(project_id, buffer);
3243 return cx.spawn(|this, cx| async move {
3244 let response = rpc.request(message).await?;
3245 request
3246 .response_from_proto(response, this, buffer_handle, cx)
3247 .await
3248 });
3249 }
3250 Task::ready(Ok(Default::default()))
3251 }
3252
3253 pub fn find_or_create_local_worktree(
3254 &mut self,
3255 abs_path: impl AsRef<Path>,
3256 visible: bool,
3257 cx: &mut ModelContext<Self>,
3258 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3259 let abs_path = abs_path.as_ref();
3260 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3261 Task::ready(Ok((tree.clone(), relative_path.into())))
3262 } else {
3263 let worktree = self.create_local_worktree(abs_path, visible, cx);
3264 cx.foreground()
3265 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3266 }
3267 }
3268
3269 pub fn find_local_worktree(
3270 &self,
3271 abs_path: &Path,
3272 cx: &AppContext,
3273 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3274 for tree in self.worktrees(cx) {
3275 if let Some(relative_path) = tree
3276 .read(cx)
3277 .as_local()
3278 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3279 {
3280 return Some((tree.clone(), relative_path.into()));
3281 }
3282 }
3283 None
3284 }
3285
3286 pub fn is_shared(&self) -> bool {
3287 match &self.client_state {
3288 ProjectClientState::Local { is_shared, .. } => *is_shared,
3289 ProjectClientState::Remote { .. } => false,
3290 }
3291 }
3292
3293 fn create_local_worktree(
3294 &mut self,
3295 abs_path: impl AsRef<Path>,
3296 visible: bool,
3297 cx: &mut ModelContext<Self>,
3298 ) -> Task<Result<ModelHandle<Worktree>>> {
3299 let fs = self.fs.clone();
3300 let client = self.client.clone();
3301 let next_entry_id = self.next_entry_id.clone();
3302 let path: Arc<Path> = abs_path.as_ref().into();
3303 let task = self
3304 .loading_local_worktrees
3305 .entry(path.clone())
3306 .or_insert_with(|| {
3307 cx.spawn(|project, mut cx| {
3308 async move {
3309 let worktree = Worktree::local(
3310 client.clone(),
3311 path.clone(),
3312 visible,
3313 fs,
3314 next_entry_id,
3315 &mut cx,
3316 )
3317 .await;
3318 project.update(&mut cx, |project, _| {
3319 project.loading_local_worktrees.remove(&path);
3320 });
3321 let worktree = worktree?;
3322
3323 let (remote_project_id, is_shared) =
3324 project.update(&mut cx, |project, cx| {
3325 project.add_worktree(&worktree, cx);
3326 (project.remote_id(), project.is_shared())
3327 });
3328
3329 if let Some(project_id) = remote_project_id {
3330 if is_shared {
3331 worktree
3332 .update(&mut cx, |worktree, cx| {
3333 worktree.as_local_mut().unwrap().share(project_id, cx)
3334 })
3335 .await?;
3336 } else {
3337 worktree
3338 .update(&mut cx, |worktree, cx| {
3339 worktree.as_local_mut().unwrap().register(project_id, cx)
3340 })
3341 .await?;
3342 }
3343 }
3344
3345 Ok(worktree)
3346 }
3347 .map_err(|err| Arc::new(err))
3348 })
3349 .shared()
3350 })
3351 .clone();
3352 cx.foreground().spawn(async move {
3353 match task.await {
3354 Ok(worktree) => Ok(worktree),
3355 Err(err) => Err(anyhow!("{}", err)),
3356 }
3357 })
3358 }
3359
3360 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3361 self.worktrees.retain(|worktree| {
3362 worktree
3363 .upgrade(cx)
3364 .map_or(false, |w| w.read(cx).id() != id)
3365 });
3366 cx.notify();
3367 }
3368
3369 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3370 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3371 if worktree.read(cx).is_local() {
3372 cx.subscribe(&worktree, |this, worktree, _, cx| {
3373 this.update_local_worktree_buffers(worktree, cx);
3374 })
3375 .detach();
3376 }
3377
3378 let push_strong_handle = {
3379 let worktree = worktree.read(cx);
3380 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3381 };
3382 if push_strong_handle {
3383 self.worktrees
3384 .push(WorktreeHandle::Strong(worktree.clone()));
3385 } else {
3386 cx.observe_release(&worktree, |this, _, cx| {
3387 this.worktrees
3388 .retain(|worktree| worktree.upgrade(cx).is_some());
3389 cx.notify();
3390 })
3391 .detach();
3392 self.worktrees
3393 .push(WorktreeHandle::Weak(worktree.downgrade()));
3394 }
3395 cx.notify();
3396 }
3397
3398 fn update_local_worktree_buffers(
3399 &mut self,
3400 worktree_handle: ModelHandle<Worktree>,
3401 cx: &mut ModelContext<Self>,
3402 ) {
3403 let snapshot = worktree_handle.read(cx).snapshot();
3404 let mut buffers_to_delete = Vec::new();
3405 let mut renamed_buffers = Vec::new();
3406 for (buffer_id, buffer) in &self.opened_buffers {
3407 if let Some(buffer) = buffer.upgrade(cx) {
3408 buffer.update(cx, |buffer, cx| {
3409 if let Some(old_file) = File::from_dyn(buffer.file()) {
3410 if old_file.worktree != worktree_handle {
3411 return;
3412 }
3413
3414 let new_file = if let Some(entry) = old_file
3415 .entry_id
3416 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3417 {
3418 File {
3419 is_local: true,
3420 entry_id: Some(entry.id),
3421 mtime: entry.mtime,
3422 path: entry.path.clone(),
3423 worktree: worktree_handle.clone(),
3424 }
3425 } else if let Some(entry) =
3426 snapshot.entry_for_path(old_file.path().as_ref())
3427 {
3428 File {
3429 is_local: true,
3430 entry_id: Some(entry.id),
3431 mtime: entry.mtime,
3432 path: entry.path.clone(),
3433 worktree: worktree_handle.clone(),
3434 }
3435 } else {
3436 File {
3437 is_local: true,
3438 entry_id: None,
3439 path: old_file.path().clone(),
3440 mtime: old_file.mtime(),
3441 worktree: worktree_handle.clone(),
3442 }
3443 };
3444
3445 let old_path = old_file.abs_path(cx);
3446 if new_file.abs_path(cx) != old_path {
3447 renamed_buffers.push((cx.handle(), old_path));
3448 }
3449
3450 if let Some(project_id) = self.remote_id() {
3451 self.client
3452 .send(proto::UpdateBufferFile {
3453 project_id,
3454 buffer_id: *buffer_id as u64,
3455 file: Some(new_file.to_proto()),
3456 })
3457 .log_err();
3458 }
3459 buffer.file_updated(Box::new(new_file), cx).detach();
3460 }
3461 });
3462 } else {
3463 buffers_to_delete.push(*buffer_id);
3464 }
3465 }
3466
3467 for buffer_id in buffers_to_delete {
3468 self.opened_buffers.remove(&buffer_id);
3469 }
3470
3471 for (buffer, old_path) in renamed_buffers {
3472 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3473 self.assign_language_to_buffer(&buffer, cx);
3474 self.register_buffer_with_language_server(&buffer, cx);
3475 }
3476 }
3477
3478 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3479 let new_active_entry = entry.and_then(|project_path| {
3480 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3481 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3482 Some(entry.id)
3483 });
3484 if new_active_entry != self.active_entry {
3485 self.active_entry = new_active_entry;
3486 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3487 }
3488 }
3489
3490 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3491 self.language_server_statuses
3492 .values()
3493 .any(|status| status.pending_diagnostic_updates > 0)
3494 }
3495
3496 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3497 let mut summary = DiagnosticSummary::default();
3498 for (_, path_summary) in self.diagnostic_summaries(cx) {
3499 summary.error_count += path_summary.error_count;
3500 summary.warning_count += path_summary.warning_count;
3501 summary.info_count += path_summary.info_count;
3502 summary.hint_count += path_summary.hint_count;
3503 }
3504 summary
3505 }
3506
3507 pub fn diagnostic_summaries<'a>(
3508 &'a self,
3509 cx: &'a AppContext,
3510 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3511 self.worktrees(cx).flat_map(move |worktree| {
3512 let worktree = worktree.read(cx);
3513 let worktree_id = worktree.id();
3514 worktree
3515 .diagnostic_summaries()
3516 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3517 })
3518 }
3519
3520 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3521 if self
3522 .language_server_statuses
3523 .values()
3524 .map(|status| status.pending_diagnostic_updates)
3525 .sum::<isize>()
3526 == 1
3527 {
3528 cx.emit(Event::DiskBasedDiagnosticsStarted);
3529 }
3530 }
3531
3532 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3533 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3534 if self
3535 .language_server_statuses
3536 .values()
3537 .map(|status| status.pending_diagnostic_updates)
3538 .sum::<isize>()
3539 == 0
3540 {
3541 cx.emit(Event::DiskBasedDiagnosticsFinished);
3542 }
3543 }
3544
3545 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3546 self.active_entry
3547 }
3548
3549 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3550 self.worktree_for_id(path.worktree_id, cx)?
3551 .read(cx)
3552 .entry_for_path(&path.path)
3553 .map(|entry| entry.id)
3554 }
3555
3556 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3557 let worktree = self.worktree_for_entry(entry_id, cx)?;
3558 let worktree = worktree.read(cx);
3559 let worktree_id = worktree.id();
3560 let path = worktree.entry_for_id(entry_id)?.path.clone();
3561 Some(ProjectPath { worktree_id, path })
3562 }
3563
3564 // RPC message handlers
3565
3566 async fn handle_unshare_project(
3567 this: ModelHandle<Self>,
3568 _: TypedEnvelope<proto::UnshareProject>,
3569 _: Arc<Client>,
3570 mut cx: AsyncAppContext,
3571 ) -> Result<()> {
3572 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3573 Ok(())
3574 }
3575
3576 async fn handle_add_collaborator(
3577 this: ModelHandle<Self>,
3578 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3579 _: Arc<Client>,
3580 mut cx: AsyncAppContext,
3581 ) -> Result<()> {
3582 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3583 let collaborator = envelope
3584 .payload
3585 .collaborator
3586 .take()
3587 .ok_or_else(|| anyhow!("empty collaborator"))?;
3588
3589 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3590 this.update(&mut cx, |this, cx| {
3591 this.collaborators
3592 .insert(collaborator.peer_id, collaborator);
3593 cx.notify();
3594 });
3595
3596 Ok(())
3597 }
3598
3599 async fn handle_remove_collaborator(
3600 this: ModelHandle<Self>,
3601 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3602 _: Arc<Client>,
3603 mut cx: AsyncAppContext,
3604 ) -> Result<()> {
3605 this.update(&mut cx, |this, cx| {
3606 let peer_id = PeerId(envelope.payload.peer_id);
3607 let replica_id = this
3608 .collaborators
3609 .remove(&peer_id)
3610 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3611 .replica_id;
3612 for (_, buffer) in &this.opened_buffers {
3613 if let Some(buffer) = buffer.upgrade(cx) {
3614 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3615 }
3616 }
3617 cx.emit(Event::CollaboratorLeft(peer_id));
3618 cx.notify();
3619 Ok(())
3620 })
3621 }
3622
3623 async fn handle_register_worktree(
3624 this: ModelHandle<Self>,
3625 envelope: TypedEnvelope<proto::RegisterWorktree>,
3626 client: Arc<Client>,
3627 mut cx: AsyncAppContext,
3628 ) -> Result<()> {
3629 this.update(&mut cx, |this, cx| {
3630 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3631 let replica_id = this.replica_id();
3632 let worktree = proto::Worktree {
3633 id: envelope.payload.worktree_id,
3634 root_name: envelope.payload.root_name,
3635 entries: Default::default(),
3636 diagnostic_summaries: Default::default(),
3637 visible: envelope.payload.visible,
3638 };
3639 let (worktree, load_task) =
3640 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3641 this.add_worktree(&worktree, cx);
3642 load_task.detach();
3643 Ok(())
3644 })
3645 }
3646
3647 async fn handle_unregister_worktree(
3648 this: ModelHandle<Self>,
3649 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3650 _: Arc<Client>,
3651 mut cx: AsyncAppContext,
3652 ) -> Result<()> {
3653 this.update(&mut cx, |this, cx| {
3654 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3655 this.remove_worktree(worktree_id, cx);
3656 Ok(())
3657 })
3658 }
3659
3660 async fn handle_update_worktree(
3661 this: ModelHandle<Self>,
3662 envelope: TypedEnvelope<proto::UpdateWorktree>,
3663 _: Arc<Client>,
3664 mut cx: AsyncAppContext,
3665 ) -> Result<()> {
3666 this.update(&mut cx, |this, cx| {
3667 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3668 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3669 worktree.update(cx, |worktree, _| {
3670 let worktree = worktree.as_remote_mut().unwrap();
3671 worktree.update_from_remote(envelope)
3672 })?;
3673 }
3674 Ok(())
3675 })
3676 }
3677
3678 async fn handle_update_diagnostic_summary(
3679 this: ModelHandle<Self>,
3680 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3681 _: Arc<Client>,
3682 mut cx: AsyncAppContext,
3683 ) -> Result<()> {
3684 this.update(&mut cx, |this, cx| {
3685 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3686 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3687 if let Some(summary) = envelope.payload.summary {
3688 let project_path = ProjectPath {
3689 worktree_id,
3690 path: Path::new(&summary.path).into(),
3691 };
3692 worktree.update(cx, |worktree, _| {
3693 worktree
3694 .as_remote_mut()
3695 .unwrap()
3696 .update_diagnostic_summary(project_path.path.clone(), &summary);
3697 });
3698 cx.emit(Event::DiagnosticsUpdated(project_path));
3699 }
3700 }
3701 Ok(())
3702 })
3703 }
3704
3705 async fn handle_start_language_server(
3706 this: ModelHandle<Self>,
3707 envelope: TypedEnvelope<proto::StartLanguageServer>,
3708 _: Arc<Client>,
3709 mut cx: AsyncAppContext,
3710 ) -> Result<()> {
3711 let server = envelope
3712 .payload
3713 .server
3714 .ok_or_else(|| anyhow!("invalid server"))?;
3715 this.update(&mut cx, |this, cx| {
3716 this.language_server_statuses.insert(
3717 server.id as usize,
3718 LanguageServerStatus {
3719 name: server.name,
3720 pending_work: Default::default(),
3721 pending_diagnostic_updates: 0,
3722 },
3723 );
3724 cx.notify();
3725 });
3726 Ok(())
3727 }
3728
3729 async fn handle_update_language_server(
3730 this: ModelHandle<Self>,
3731 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3732 _: Arc<Client>,
3733 mut cx: AsyncAppContext,
3734 ) -> Result<()> {
3735 let language_server_id = envelope.payload.language_server_id as usize;
3736 match envelope
3737 .payload
3738 .variant
3739 .ok_or_else(|| anyhow!("invalid variant"))?
3740 {
3741 proto::update_language_server::Variant::WorkStart(payload) => {
3742 this.update(&mut cx, |this, cx| {
3743 this.on_lsp_work_start(language_server_id, payload.token, cx);
3744 })
3745 }
3746 proto::update_language_server::Variant::WorkProgress(payload) => {
3747 this.update(&mut cx, |this, cx| {
3748 this.on_lsp_work_progress(
3749 language_server_id,
3750 payload.token,
3751 LanguageServerProgress {
3752 message: payload.message,
3753 percentage: payload.percentage.map(|p| p as usize),
3754 last_update_at: Instant::now(),
3755 },
3756 cx,
3757 );
3758 })
3759 }
3760 proto::update_language_server::Variant::WorkEnd(payload) => {
3761 this.update(&mut cx, |this, cx| {
3762 this.on_lsp_work_end(language_server_id, payload.token, cx);
3763 })
3764 }
3765 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3766 this.update(&mut cx, |this, cx| {
3767 this.disk_based_diagnostics_started(cx);
3768 })
3769 }
3770 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3771 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3772 }
3773 }
3774
3775 Ok(())
3776 }
3777
3778 async fn handle_update_buffer(
3779 this: ModelHandle<Self>,
3780 envelope: TypedEnvelope<proto::UpdateBuffer>,
3781 _: Arc<Client>,
3782 mut cx: AsyncAppContext,
3783 ) -> Result<()> {
3784 this.update(&mut cx, |this, cx| {
3785 let payload = envelope.payload.clone();
3786 let buffer_id = payload.buffer_id;
3787 let ops = payload
3788 .operations
3789 .into_iter()
3790 .map(|op| language::proto::deserialize_operation(op))
3791 .collect::<Result<Vec<_>, _>>()?;
3792 match this.opened_buffers.entry(buffer_id) {
3793 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3794 OpenBuffer::Strong(buffer) => {
3795 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3796 }
3797 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3798 OpenBuffer::Weak(_) => {}
3799 },
3800 hash_map::Entry::Vacant(e) => {
3801 e.insert(OpenBuffer::Loading(ops));
3802 }
3803 }
3804 Ok(())
3805 })
3806 }
3807
3808 async fn handle_update_buffer_file(
3809 this: ModelHandle<Self>,
3810 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3811 _: Arc<Client>,
3812 mut cx: AsyncAppContext,
3813 ) -> Result<()> {
3814 this.update(&mut cx, |this, cx| {
3815 let payload = envelope.payload.clone();
3816 let buffer_id = payload.buffer_id;
3817 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3818 let worktree = this
3819 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3820 .ok_or_else(|| anyhow!("no such worktree"))?;
3821 let file = File::from_proto(file, worktree.clone(), cx)?;
3822 let buffer = this
3823 .opened_buffers
3824 .get_mut(&buffer_id)
3825 .and_then(|b| b.upgrade(cx))
3826 .ok_or_else(|| anyhow!("no such buffer"))?;
3827 buffer.update(cx, |buffer, cx| {
3828 buffer.file_updated(Box::new(file), cx).detach();
3829 });
3830 Ok(())
3831 })
3832 }
3833
3834 async fn handle_save_buffer(
3835 this: ModelHandle<Self>,
3836 envelope: TypedEnvelope<proto::SaveBuffer>,
3837 _: Arc<Client>,
3838 mut cx: AsyncAppContext,
3839 ) -> Result<proto::BufferSaved> {
3840 let buffer_id = envelope.payload.buffer_id;
3841 let requested_version = deserialize_version(envelope.payload.version);
3842
3843 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3844 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3845 let buffer = this
3846 .opened_buffers
3847 .get(&buffer_id)
3848 .and_then(|buffer| buffer.upgrade(cx))
3849 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3850 Ok::<_, anyhow::Error>((project_id, buffer))
3851 })?;
3852 buffer
3853 .update(&mut cx, |buffer, _| {
3854 buffer.wait_for_version(requested_version)
3855 })
3856 .await;
3857
3858 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3859 Ok(proto::BufferSaved {
3860 project_id,
3861 buffer_id,
3862 version: serialize_version(&saved_version),
3863 mtime: Some(mtime.into()),
3864 })
3865 }
3866
3867 async fn handle_reload_buffers(
3868 this: ModelHandle<Self>,
3869 envelope: TypedEnvelope<proto::ReloadBuffers>,
3870 _: Arc<Client>,
3871 mut cx: AsyncAppContext,
3872 ) -> Result<proto::ReloadBuffersResponse> {
3873 let sender_id = envelope.original_sender_id()?;
3874 let reload = this.update(&mut cx, |this, cx| {
3875 let mut buffers = HashSet::default();
3876 for buffer_id in &envelope.payload.buffer_ids {
3877 buffers.insert(
3878 this.opened_buffers
3879 .get(buffer_id)
3880 .and_then(|buffer| buffer.upgrade(cx))
3881 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3882 );
3883 }
3884 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3885 })?;
3886
3887 let project_transaction = reload.await?;
3888 let project_transaction = this.update(&mut cx, |this, cx| {
3889 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3890 });
3891 Ok(proto::ReloadBuffersResponse {
3892 transaction: Some(project_transaction),
3893 })
3894 }
3895
3896 async fn handle_format_buffers(
3897 this: ModelHandle<Self>,
3898 envelope: TypedEnvelope<proto::FormatBuffers>,
3899 _: Arc<Client>,
3900 mut cx: AsyncAppContext,
3901 ) -> Result<proto::FormatBuffersResponse> {
3902 let sender_id = envelope.original_sender_id()?;
3903 let format = this.update(&mut cx, |this, cx| {
3904 let mut buffers = HashSet::default();
3905 for buffer_id in &envelope.payload.buffer_ids {
3906 buffers.insert(
3907 this.opened_buffers
3908 .get(buffer_id)
3909 .and_then(|buffer| buffer.upgrade(cx))
3910 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3911 );
3912 }
3913 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3914 })?;
3915
3916 let project_transaction = format.await?;
3917 let project_transaction = this.update(&mut cx, |this, cx| {
3918 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3919 });
3920 Ok(proto::FormatBuffersResponse {
3921 transaction: Some(project_transaction),
3922 })
3923 }
3924
3925 async fn handle_get_completions(
3926 this: ModelHandle<Self>,
3927 envelope: TypedEnvelope<proto::GetCompletions>,
3928 _: Arc<Client>,
3929 mut cx: AsyncAppContext,
3930 ) -> Result<proto::GetCompletionsResponse> {
3931 let position = envelope
3932 .payload
3933 .position
3934 .and_then(language::proto::deserialize_anchor)
3935 .ok_or_else(|| anyhow!("invalid position"))?;
3936 let version = deserialize_version(envelope.payload.version);
3937 let buffer = this.read_with(&cx, |this, cx| {
3938 this.opened_buffers
3939 .get(&envelope.payload.buffer_id)
3940 .and_then(|buffer| buffer.upgrade(cx))
3941 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3942 })?;
3943 buffer
3944 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3945 .await;
3946 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3947 let completions = this
3948 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3949 .await?;
3950
3951 Ok(proto::GetCompletionsResponse {
3952 completions: completions
3953 .iter()
3954 .map(language::proto::serialize_completion)
3955 .collect(),
3956 version: serialize_version(&version),
3957 })
3958 }
3959
3960 async fn handle_apply_additional_edits_for_completion(
3961 this: ModelHandle<Self>,
3962 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3963 _: Arc<Client>,
3964 mut cx: AsyncAppContext,
3965 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3966 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3967 let buffer = this
3968 .opened_buffers
3969 .get(&envelope.payload.buffer_id)
3970 .and_then(|buffer| buffer.upgrade(cx))
3971 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3972 let language = buffer.read(cx).language();
3973 let completion = language::proto::deserialize_completion(
3974 envelope
3975 .payload
3976 .completion
3977 .ok_or_else(|| anyhow!("invalid completion"))?,
3978 language,
3979 )?;
3980 Ok::<_, anyhow::Error>(
3981 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3982 )
3983 })?;
3984
3985 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3986 transaction: apply_additional_edits
3987 .await?
3988 .as_ref()
3989 .map(language::proto::serialize_transaction),
3990 })
3991 }
3992
3993 async fn handle_get_code_actions(
3994 this: ModelHandle<Self>,
3995 envelope: TypedEnvelope<proto::GetCodeActions>,
3996 _: Arc<Client>,
3997 mut cx: AsyncAppContext,
3998 ) -> Result<proto::GetCodeActionsResponse> {
3999 let start = envelope
4000 .payload
4001 .start
4002 .and_then(language::proto::deserialize_anchor)
4003 .ok_or_else(|| anyhow!("invalid start"))?;
4004 let end = envelope
4005 .payload
4006 .end
4007 .and_then(language::proto::deserialize_anchor)
4008 .ok_or_else(|| anyhow!("invalid end"))?;
4009 let buffer = this.update(&mut cx, |this, cx| {
4010 this.opened_buffers
4011 .get(&envelope.payload.buffer_id)
4012 .and_then(|buffer| buffer.upgrade(cx))
4013 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4014 })?;
4015 buffer
4016 .update(&mut cx, |buffer, _| {
4017 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4018 })
4019 .await;
4020
4021 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4022 let code_actions = this.update(&mut cx, |this, cx| {
4023 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4024 })?;
4025
4026 Ok(proto::GetCodeActionsResponse {
4027 actions: code_actions
4028 .await?
4029 .iter()
4030 .map(language::proto::serialize_code_action)
4031 .collect(),
4032 version: serialize_version(&version),
4033 })
4034 }
4035
4036 async fn handle_apply_code_action(
4037 this: ModelHandle<Self>,
4038 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4039 _: Arc<Client>,
4040 mut cx: AsyncAppContext,
4041 ) -> Result<proto::ApplyCodeActionResponse> {
4042 let sender_id = envelope.original_sender_id()?;
4043 let action = language::proto::deserialize_code_action(
4044 envelope
4045 .payload
4046 .action
4047 .ok_or_else(|| anyhow!("invalid action"))?,
4048 )?;
4049 let apply_code_action = this.update(&mut cx, |this, cx| {
4050 let buffer = this
4051 .opened_buffers
4052 .get(&envelope.payload.buffer_id)
4053 .and_then(|buffer| buffer.upgrade(cx))
4054 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4055 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4056 })?;
4057
4058 let project_transaction = apply_code_action.await?;
4059 let project_transaction = this.update(&mut cx, |this, cx| {
4060 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4061 });
4062 Ok(proto::ApplyCodeActionResponse {
4063 transaction: Some(project_transaction),
4064 })
4065 }
4066
4067 async fn handle_lsp_command<T: LspCommand>(
4068 this: ModelHandle<Self>,
4069 envelope: TypedEnvelope<T::ProtoRequest>,
4070 _: Arc<Client>,
4071 mut cx: AsyncAppContext,
4072 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4073 where
4074 <T::LspRequest as lsp::request::Request>::Result: Send,
4075 {
4076 let sender_id = envelope.original_sender_id()?;
4077 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4078 let buffer_handle = this.read_with(&cx, |this, _| {
4079 this.opened_buffers
4080 .get(&buffer_id)
4081 .and_then(|buffer| buffer.upgrade(&cx))
4082 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4083 })?;
4084 let request = T::from_proto(
4085 envelope.payload,
4086 this.clone(),
4087 buffer_handle.clone(),
4088 cx.clone(),
4089 )
4090 .await?;
4091 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4092 let response = this
4093 .update(&mut cx, |this, cx| {
4094 this.request_lsp(buffer_handle, request, cx)
4095 })
4096 .await?;
4097 this.update(&mut cx, |this, cx| {
4098 Ok(T::response_to_proto(
4099 response,
4100 this,
4101 sender_id,
4102 &buffer_version,
4103 cx,
4104 ))
4105 })
4106 }
4107
4108 async fn handle_get_project_symbols(
4109 this: ModelHandle<Self>,
4110 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4111 _: Arc<Client>,
4112 mut cx: AsyncAppContext,
4113 ) -> Result<proto::GetProjectSymbolsResponse> {
4114 let symbols = this
4115 .update(&mut cx, |this, cx| {
4116 this.symbols(&envelope.payload.query, cx)
4117 })
4118 .await?;
4119
4120 Ok(proto::GetProjectSymbolsResponse {
4121 symbols: symbols.iter().map(serialize_symbol).collect(),
4122 })
4123 }
4124
4125 async fn handle_search_project(
4126 this: ModelHandle<Self>,
4127 envelope: TypedEnvelope<proto::SearchProject>,
4128 _: Arc<Client>,
4129 mut cx: AsyncAppContext,
4130 ) -> Result<proto::SearchProjectResponse> {
4131 let peer_id = envelope.original_sender_id()?;
4132 let query = SearchQuery::from_proto(envelope.payload)?;
4133 let result = this
4134 .update(&mut cx, |this, cx| this.search(query, cx))
4135 .await?;
4136
4137 this.update(&mut cx, |this, cx| {
4138 let mut locations = Vec::new();
4139 for (buffer, ranges) in result {
4140 for range in ranges {
4141 let start = serialize_anchor(&range.start);
4142 let end = serialize_anchor(&range.end);
4143 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4144 locations.push(proto::Location {
4145 buffer: Some(buffer),
4146 start: Some(start),
4147 end: Some(end),
4148 });
4149 }
4150 }
4151 Ok(proto::SearchProjectResponse { locations })
4152 })
4153 }
4154
4155 async fn handle_open_buffer_for_symbol(
4156 this: ModelHandle<Self>,
4157 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4158 _: Arc<Client>,
4159 mut cx: AsyncAppContext,
4160 ) -> Result<proto::OpenBufferForSymbolResponse> {
4161 let peer_id = envelope.original_sender_id()?;
4162 let symbol = envelope
4163 .payload
4164 .symbol
4165 .ok_or_else(|| anyhow!("invalid symbol"))?;
4166 let symbol = this.read_with(&cx, |this, _| {
4167 let symbol = this.deserialize_symbol(symbol)?;
4168 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4169 if signature == symbol.signature {
4170 Ok(symbol)
4171 } else {
4172 Err(anyhow!("invalid symbol signature"))
4173 }
4174 })?;
4175 let buffer = this
4176 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4177 .await?;
4178
4179 Ok(proto::OpenBufferForSymbolResponse {
4180 buffer: Some(this.update(&mut cx, |this, cx| {
4181 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4182 })),
4183 })
4184 }
4185
4186 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4187 let mut hasher = Sha256::new();
4188 hasher.update(worktree_id.to_proto().to_be_bytes());
4189 hasher.update(path.to_string_lossy().as_bytes());
4190 hasher.update(self.nonce.to_be_bytes());
4191 hasher.finalize().as_slice().try_into().unwrap()
4192 }
4193
4194 async fn handle_open_buffer_by_id(
4195 this: ModelHandle<Self>,
4196 envelope: TypedEnvelope<proto::OpenBufferById>,
4197 _: Arc<Client>,
4198 mut cx: AsyncAppContext,
4199 ) -> Result<proto::OpenBufferResponse> {
4200 let peer_id = envelope.original_sender_id()?;
4201 let buffer = this
4202 .update(&mut cx, |this, cx| {
4203 this.open_buffer_by_id(envelope.payload.id, cx)
4204 })
4205 .await?;
4206 this.update(&mut cx, |this, cx| {
4207 Ok(proto::OpenBufferResponse {
4208 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4209 })
4210 })
4211 }
4212
4213 async fn handle_open_buffer_by_path(
4214 this: ModelHandle<Self>,
4215 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4216 _: Arc<Client>,
4217 mut cx: AsyncAppContext,
4218 ) -> Result<proto::OpenBufferResponse> {
4219 let peer_id = envelope.original_sender_id()?;
4220 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4221 let open_buffer = this.update(&mut cx, |this, cx| {
4222 this.open_buffer(
4223 ProjectPath {
4224 worktree_id,
4225 path: PathBuf::from(envelope.payload.path).into(),
4226 },
4227 cx,
4228 )
4229 });
4230
4231 let buffer = open_buffer.await?;
4232 this.update(&mut cx, |this, cx| {
4233 Ok(proto::OpenBufferResponse {
4234 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4235 })
4236 })
4237 }
4238
4239 fn serialize_project_transaction_for_peer(
4240 &mut self,
4241 project_transaction: ProjectTransaction,
4242 peer_id: PeerId,
4243 cx: &AppContext,
4244 ) -> proto::ProjectTransaction {
4245 let mut serialized_transaction = proto::ProjectTransaction {
4246 buffers: Default::default(),
4247 transactions: Default::default(),
4248 };
4249 for (buffer, transaction) in project_transaction.0 {
4250 serialized_transaction
4251 .buffers
4252 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4253 serialized_transaction
4254 .transactions
4255 .push(language::proto::serialize_transaction(&transaction));
4256 }
4257 serialized_transaction
4258 }
4259
4260 fn deserialize_project_transaction(
4261 &mut self,
4262 message: proto::ProjectTransaction,
4263 push_to_history: bool,
4264 cx: &mut ModelContext<Self>,
4265 ) -> Task<Result<ProjectTransaction>> {
4266 cx.spawn(|this, mut cx| async move {
4267 let mut project_transaction = ProjectTransaction::default();
4268 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4269 let buffer = this
4270 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4271 .await?;
4272 let transaction = language::proto::deserialize_transaction(transaction)?;
4273 project_transaction.0.insert(buffer, transaction);
4274 }
4275
4276 for (buffer, transaction) in &project_transaction.0 {
4277 buffer
4278 .update(&mut cx, |buffer, _| {
4279 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4280 })
4281 .await;
4282
4283 if push_to_history {
4284 buffer.update(&mut cx, |buffer, _| {
4285 buffer.push_transaction(transaction.clone(), Instant::now());
4286 });
4287 }
4288 }
4289
4290 Ok(project_transaction)
4291 })
4292 }
4293
4294 fn serialize_buffer_for_peer(
4295 &mut self,
4296 buffer: &ModelHandle<Buffer>,
4297 peer_id: PeerId,
4298 cx: &AppContext,
4299 ) -> proto::Buffer {
4300 let buffer_id = buffer.read(cx).remote_id();
4301 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4302 if shared_buffers.insert(buffer_id) {
4303 proto::Buffer {
4304 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4305 }
4306 } else {
4307 proto::Buffer {
4308 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4309 }
4310 }
4311 }
4312
4313 fn deserialize_buffer(
4314 &mut self,
4315 buffer: proto::Buffer,
4316 cx: &mut ModelContext<Self>,
4317 ) -> Task<Result<ModelHandle<Buffer>>> {
4318 let replica_id = self.replica_id();
4319
4320 let opened_buffer_tx = self.opened_buffer.0.clone();
4321 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4322 cx.spawn(|this, mut cx| async move {
4323 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4324 proto::buffer::Variant::Id(id) => {
4325 let buffer = loop {
4326 let buffer = this.read_with(&cx, |this, cx| {
4327 this.opened_buffers
4328 .get(&id)
4329 .and_then(|buffer| buffer.upgrade(cx))
4330 });
4331 if let Some(buffer) = buffer {
4332 break buffer;
4333 }
4334 opened_buffer_rx
4335 .next()
4336 .await
4337 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4338 };
4339 Ok(buffer)
4340 }
4341 proto::buffer::Variant::State(mut buffer) => {
4342 let mut buffer_worktree = None;
4343 let mut buffer_file = None;
4344 if let Some(file) = buffer.file.take() {
4345 this.read_with(&cx, |this, cx| {
4346 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4347 let worktree =
4348 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4349 anyhow!("no worktree found for id {}", file.worktree_id)
4350 })?;
4351 buffer_file =
4352 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4353 as Box<dyn language::File>);
4354 buffer_worktree = Some(worktree);
4355 Ok::<_, anyhow::Error>(())
4356 })?;
4357 }
4358
4359 let buffer = cx.add_model(|cx| {
4360 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4361 });
4362
4363 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4364
4365 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4366 Ok(buffer)
4367 }
4368 }
4369 })
4370 }
4371
4372 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4373 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4374 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4375 let start = serialized_symbol
4376 .start
4377 .ok_or_else(|| anyhow!("invalid start"))?;
4378 let end = serialized_symbol
4379 .end
4380 .ok_or_else(|| anyhow!("invalid end"))?;
4381 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4382 let path = PathBuf::from(serialized_symbol.path);
4383 let language = self.languages.select_language(&path);
4384 Ok(Symbol {
4385 source_worktree_id,
4386 worktree_id,
4387 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4388 label: language
4389 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4390 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4391 name: serialized_symbol.name,
4392 path,
4393 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4394 kind,
4395 signature: serialized_symbol
4396 .signature
4397 .try_into()
4398 .map_err(|_| anyhow!("invalid signature"))?,
4399 })
4400 }
4401
4402 async fn handle_buffer_saved(
4403 this: ModelHandle<Self>,
4404 envelope: TypedEnvelope<proto::BufferSaved>,
4405 _: Arc<Client>,
4406 mut cx: AsyncAppContext,
4407 ) -> Result<()> {
4408 let version = deserialize_version(envelope.payload.version);
4409 let mtime = envelope
4410 .payload
4411 .mtime
4412 .ok_or_else(|| anyhow!("missing mtime"))?
4413 .into();
4414
4415 this.update(&mut cx, |this, cx| {
4416 let buffer = this
4417 .opened_buffers
4418 .get(&envelope.payload.buffer_id)
4419 .and_then(|buffer| buffer.upgrade(cx));
4420 if let Some(buffer) = buffer {
4421 buffer.update(cx, |buffer, cx| {
4422 buffer.did_save(version, mtime, None, cx);
4423 });
4424 }
4425 Ok(())
4426 })
4427 }
4428
4429 async fn handle_buffer_reloaded(
4430 this: ModelHandle<Self>,
4431 envelope: TypedEnvelope<proto::BufferReloaded>,
4432 _: Arc<Client>,
4433 mut cx: AsyncAppContext,
4434 ) -> Result<()> {
4435 let payload = envelope.payload.clone();
4436 let version = deserialize_version(payload.version);
4437 let mtime = payload
4438 .mtime
4439 .ok_or_else(|| anyhow!("missing mtime"))?
4440 .into();
4441 this.update(&mut cx, |this, cx| {
4442 let buffer = this
4443 .opened_buffers
4444 .get(&payload.buffer_id)
4445 .and_then(|buffer| buffer.upgrade(cx));
4446 if let Some(buffer) = buffer {
4447 buffer.update(cx, |buffer, cx| {
4448 buffer.did_reload(version, mtime, cx);
4449 });
4450 }
4451 Ok(())
4452 })
4453 }
4454
4455 pub fn match_paths<'a>(
4456 &self,
4457 query: &'a str,
4458 include_ignored: bool,
4459 smart_case: bool,
4460 max_results: usize,
4461 cancel_flag: &'a AtomicBool,
4462 cx: &AppContext,
4463 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4464 let worktrees = self
4465 .worktrees(cx)
4466 .filter(|worktree| worktree.read(cx).is_visible())
4467 .collect::<Vec<_>>();
4468 let include_root_name = worktrees.len() > 1;
4469 let candidate_sets = worktrees
4470 .into_iter()
4471 .map(|worktree| CandidateSet {
4472 snapshot: worktree.read(cx).snapshot(),
4473 include_ignored,
4474 include_root_name,
4475 })
4476 .collect::<Vec<_>>();
4477
4478 let background = cx.background().clone();
4479 async move {
4480 fuzzy::match_paths(
4481 candidate_sets.as_slice(),
4482 query,
4483 smart_case,
4484 max_results,
4485 cancel_flag,
4486 background,
4487 )
4488 .await
4489 }
4490 }
4491
4492 fn edits_from_lsp(
4493 &mut self,
4494 buffer: &ModelHandle<Buffer>,
4495 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4496 version: Option<i32>,
4497 cx: &mut ModelContext<Self>,
4498 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4499 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4500 cx.background().spawn(async move {
4501 let snapshot = snapshot?;
4502 let mut lsp_edits = lsp_edits
4503 .into_iter()
4504 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4505 .peekable();
4506
4507 let mut edits = Vec::new();
4508 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4509 // Combine any LSP edits that are adjacent.
4510 //
4511 // Also, combine LSP edits that are separated from each other by only
4512 // a newline. This is important because for some code actions,
4513 // Rust-analyzer rewrites the entire buffer via a series of edits that
4514 // are separated by unchanged newline characters.
4515 //
4516 // In order for the diffing logic below to work properly, any edits that
4517 // cancel each other out must be combined into one.
4518 while let Some((next_range, next_text)) = lsp_edits.peek() {
4519 if next_range.start > range.end {
4520 if next_range.start.row > range.end.row + 1
4521 || next_range.start.column > 0
4522 || snapshot.clip_point_utf16(
4523 PointUtf16::new(range.end.row, u32::MAX),
4524 Bias::Left,
4525 ) > range.end
4526 {
4527 break;
4528 }
4529 new_text.push('\n');
4530 }
4531 range.end = next_range.end;
4532 new_text.push_str(&next_text);
4533 lsp_edits.next();
4534 }
4535
4536 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4537 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4538 {
4539 return Err(anyhow!("invalid edits received from language server"));
4540 }
4541
4542 // For multiline edits, perform a diff of the old and new text so that
4543 // we can identify the changes more precisely, preserving the locations
4544 // of any anchors positioned in the unchanged regions.
4545 if range.end.row > range.start.row {
4546 let mut offset = range.start.to_offset(&snapshot);
4547 let old_text = snapshot.text_for_range(range).collect::<String>();
4548
4549 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4550 let mut moved_since_edit = true;
4551 for change in diff.iter_all_changes() {
4552 let tag = change.tag();
4553 let value = change.value();
4554 match tag {
4555 ChangeTag::Equal => {
4556 offset += value.len();
4557 moved_since_edit = true;
4558 }
4559 ChangeTag::Delete => {
4560 let start = snapshot.anchor_after(offset);
4561 let end = snapshot.anchor_before(offset + value.len());
4562 if moved_since_edit {
4563 edits.push((start..end, String::new()));
4564 } else {
4565 edits.last_mut().unwrap().0.end = end;
4566 }
4567 offset += value.len();
4568 moved_since_edit = false;
4569 }
4570 ChangeTag::Insert => {
4571 if moved_since_edit {
4572 let anchor = snapshot.anchor_after(offset);
4573 edits.push((anchor.clone()..anchor, value.to_string()));
4574 } else {
4575 edits.last_mut().unwrap().1.push_str(value);
4576 }
4577 moved_since_edit = false;
4578 }
4579 }
4580 }
4581 } else if range.end == range.start {
4582 let anchor = snapshot.anchor_after(range.start);
4583 edits.push((anchor.clone()..anchor, new_text));
4584 } else {
4585 let edit_start = snapshot.anchor_after(range.start);
4586 let edit_end = snapshot.anchor_before(range.end);
4587 edits.push((edit_start..edit_end, new_text));
4588 }
4589 }
4590
4591 Ok(edits)
4592 })
4593 }
4594
4595 fn buffer_snapshot_for_lsp_version(
4596 &mut self,
4597 buffer: &ModelHandle<Buffer>,
4598 version: Option<i32>,
4599 cx: &AppContext,
4600 ) -> Result<TextBufferSnapshot> {
4601 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4602
4603 if let Some(version) = version {
4604 let buffer_id = buffer.read(cx).remote_id();
4605 let snapshots = self
4606 .buffer_snapshots
4607 .get_mut(&buffer_id)
4608 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4609 let mut found_snapshot = None;
4610 snapshots.retain(|(snapshot_version, snapshot)| {
4611 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4612 false
4613 } else {
4614 if *snapshot_version == version {
4615 found_snapshot = Some(snapshot.clone());
4616 }
4617 true
4618 }
4619 });
4620
4621 found_snapshot.ok_or_else(|| {
4622 anyhow!(
4623 "snapshot not found for buffer {} at version {}",
4624 buffer_id,
4625 version
4626 )
4627 })
4628 } else {
4629 Ok((buffer.read(cx)).text_snapshot())
4630 }
4631 }
4632
4633 fn language_server_for_buffer(
4634 &self,
4635 buffer: &Buffer,
4636 cx: &AppContext,
4637 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4638 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4639 let worktree_id = file.worktree_id(cx);
4640 self.language_servers
4641 .get(&(worktree_id, language.lsp_adapter()?.name()))
4642 } else {
4643 None
4644 }
4645 }
4646}
4647
4648impl WorktreeHandle {
4649 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4650 match self {
4651 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4652 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4653 }
4654 }
4655}
4656
4657impl OpenBuffer {
4658 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4659 match self {
4660 OpenBuffer::Strong(handle) => Some(handle.clone()),
4661 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4662 OpenBuffer::Loading(_) => None,
4663 }
4664 }
4665}
4666
4667struct CandidateSet {
4668 snapshot: Snapshot,
4669 include_ignored: bool,
4670 include_root_name: bool,
4671}
4672
4673impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4674 type Candidates = CandidateSetIter<'a>;
4675
4676 fn id(&self) -> usize {
4677 self.snapshot.id().to_usize()
4678 }
4679
4680 fn len(&self) -> usize {
4681 if self.include_ignored {
4682 self.snapshot.file_count()
4683 } else {
4684 self.snapshot.visible_file_count()
4685 }
4686 }
4687
4688 fn prefix(&self) -> Arc<str> {
4689 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4690 self.snapshot.root_name().into()
4691 } else if self.include_root_name {
4692 format!("{}/", self.snapshot.root_name()).into()
4693 } else {
4694 "".into()
4695 }
4696 }
4697
4698 fn candidates(&'a self, start: usize) -> Self::Candidates {
4699 CandidateSetIter {
4700 traversal: self.snapshot.files(self.include_ignored, start),
4701 }
4702 }
4703}
4704
4705struct CandidateSetIter<'a> {
4706 traversal: Traversal<'a>,
4707}
4708
4709impl<'a> Iterator for CandidateSetIter<'a> {
4710 type Item = PathMatchCandidate<'a>;
4711
4712 fn next(&mut self) -> Option<Self::Item> {
4713 self.traversal.next().map(|entry| {
4714 if let EntryKind::File(char_bag) = entry.kind {
4715 PathMatchCandidate {
4716 path: &entry.path,
4717 char_bag,
4718 }
4719 } else {
4720 unreachable!()
4721 }
4722 })
4723 }
4724}
4725
4726impl Entity for Project {
4727 type Event = Event;
4728
4729 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4730 match &self.client_state {
4731 ProjectClientState::Local { remote_id_rx, .. } => {
4732 if let Some(project_id) = *remote_id_rx.borrow() {
4733 self.client
4734 .send(proto::UnregisterProject { project_id })
4735 .log_err();
4736 }
4737 }
4738 ProjectClientState::Remote { remote_id, .. } => {
4739 self.client
4740 .send(proto::LeaveProject {
4741 project_id: *remote_id,
4742 })
4743 .log_err();
4744 }
4745 }
4746 }
4747
4748 fn app_will_quit(
4749 &mut self,
4750 _: &mut MutableAppContext,
4751 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4752 let shutdown_futures = self
4753 .language_servers
4754 .drain()
4755 .filter_map(|(_, (_, server))| server.shutdown())
4756 .collect::<Vec<_>>();
4757 Some(
4758 async move {
4759 futures::future::join_all(shutdown_futures).await;
4760 }
4761 .boxed(),
4762 )
4763 }
4764}
4765
4766impl Collaborator {
4767 fn from_proto(
4768 message: proto::Collaborator,
4769 user_store: &ModelHandle<UserStore>,
4770 cx: &mut AsyncAppContext,
4771 ) -> impl Future<Output = Result<Self>> {
4772 let user = user_store.update(cx, |user_store, cx| {
4773 user_store.fetch_user(message.user_id, cx)
4774 });
4775
4776 async move {
4777 Ok(Self {
4778 peer_id: PeerId(message.peer_id),
4779 user: user.await?,
4780 replica_id: message.replica_id as ReplicaId,
4781 })
4782 }
4783 }
4784}
4785
4786impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4787 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4788 Self {
4789 worktree_id,
4790 path: path.as_ref().into(),
4791 }
4792 }
4793}
4794
4795impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4796 fn from(options: lsp::CreateFileOptions) -> Self {
4797 Self {
4798 overwrite: options.overwrite.unwrap_or(false),
4799 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4800 }
4801 }
4802}
4803
4804impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4805 fn from(options: lsp::RenameFileOptions) -> Self {
4806 Self {
4807 overwrite: options.overwrite.unwrap_or(false),
4808 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4809 }
4810 }
4811}
4812
4813impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4814 fn from(options: lsp::DeleteFileOptions) -> Self {
4815 Self {
4816 recursive: options.recursive.unwrap_or(false),
4817 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4818 }
4819 }
4820}
4821
4822fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4823 proto::Symbol {
4824 source_worktree_id: symbol.source_worktree_id.to_proto(),
4825 worktree_id: symbol.worktree_id.to_proto(),
4826 language_server_name: symbol.language_server_name.0.to_string(),
4827 name: symbol.name.clone(),
4828 kind: unsafe { mem::transmute(symbol.kind) },
4829 path: symbol.path.to_string_lossy().to_string(),
4830 start: Some(proto::Point {
4831 row: symbol.range.start.row,
4832 column: symbol.range.start.column,
4833 }),
4834 end: Some(proto::Point {
4835 row: symbol.range.end.row,
4836 column: symbol.range.end.column,
4837 }),
4838 signature: symbol.signature.to_vec(),
4839 }
4840}
4841
4842fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4843 let mut path_components = path.components();
4844 let mut base_components = base.components();
4845 let mut components: Vec<Component> = Vec::new();
4846 loop {
4847 match (path_components.next(), base_components.next()) {
4848 (None, None) => break,
4849 (Some(a), None) => {
4850 components.push(a);
4851 components.extend(path_components.by_ref());
4852 break;
4853 }
4854 (None, _) => components.push(Component::ParentDir),
4855 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4856 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4857 (Some(a), Some(_)) => {
4858 components.push(Component::ParentDir);
4859 for _ in base_components {
4860 components.push(Component::ParentDir);
4861 }
4862 components.push(a);
4863 components.extend(path_components.by_ref());
4864 break;
4865 }
4866 }
4867 }
4868 components.iter().map(|c| c.as_os_str()).collect()
4869}
4870
4871impl Item for Buffer {
4872 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4873 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4874 }
4875}
4876
4877#[cfg(test)]
4878mod tests {
4879 use super::{Event, *};
4880 use fs::RealFs;
4881 use futures::{future, StreamExt};
4882 use gpui::test::subscribe;
4883 use language::{
4884 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4885 ToPoint,
4886 };
4887 use lsp::Url;
4888 use serde_json::json;
4889 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4890 use unindent::Unindent as _;
4891 use util::{assert_set_eq, test::temp_tree};
4892 use worktree::WorktreeHandle as _;
4893
4894 #[gpui::test]
4895 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4896 let dir = temp_tree(json!({
4897 "root": {
4898 "apple": "",
4899 "banana": {
4900 "carrot": {
4901 "date": "",
4902 "endive": "",
4903 }
4904 },
4905 "fennel": {
4906 "grape": "",
4907 }
4908 }
4909 }));
4910
4911 let root_link_path = dir.path().join("root_link");
4912 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4913 unix::fs::symlink(
4914 &dir.path().join("root/fennel"),
4915 &dir.path().join("root/finnochio"),
4916 )
4917 .unwrap();
4918
4919 let project = Project::test(Arc::new(RealFs), cx);
4920
4921 let (tree, _) = project
4922 .update(cx, |project, cx| {
4923 project.find_or_create_local_worktree(&root_link_path, true, cx)
4924 })
4925 .await
4926 .unwrap();
4927
4928 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4929 .await;
4930 cx.read(|cx| {
4931 let tree = tree.read(cx);
4932 assert_eq!(tree.file_count(), 5);
4933 assert_eq!(
4934 tree.inode_for_path("fennel/grape"),
4935 tree.inode_for_path("finnochio/grape")
4936 );
4937 });
4938
4939 let cancel_flag = Default::default();
4940 let results = project
4941 .read_with(cx, |project, cx| {
4942 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4943 })
4944 .await;
4945 assert_eq!(
4946 results
4947 .into_iter()
4948 .map(|result| result.path)
4949 .collect::<Vec<Arc<Path>>>(),
4950 vec![
4951 PathBuf::from("banana/carrot/date").into(),
4952 PathBuf::from("banana/carrot/endive").into(),
4953 ]
4954 );
4955 }
4956
4957 #[gpui::test]
4958 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4959 cx.foreground().forbid_parking();
4960
4961 let mut rust_language = Language::new(
4962 LanguageConfig {
4963 name: "Rust".into(),
4964 path_suffixes: vec!["rs".to_string()],
4965 ..Default::default()
4966 },
4967 Some(tree_sitter_rust::language()),
4968 );
4969 let mut json_language = Language::new(
4970 LanguageConfig {
4971 name: "JSON".into(),
4972 path_suffixes: vec!["json".to_string()],
4973 ..Default::default()
4974 },
4975 None,
4976 );
4977 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4978 name: "the-rust-language-server",
4979 capabilities: lsp::ServerCapabilities {
4980 completion_provider: Some(lsp::CompletionOptions {
4981 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4982 ..Default::default()
4983 }),
4984 ..Default::default()
4985 },
4986 ..Default::default()
4987 });
4988 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4989 name: "the-json-language-server",
4990 capabilities: lsp::ServerCapabilities {
4991 completion_provider: Some(lsp::CompletionOptions {
4992 trigger_characters: Some(vec![":".to_string()]),
4993 ..Default::default()
4994 }),
4995 ..Default::default()
4996 },
4997 ..Default::default()
4998 });
4999
5000 let fs = FakeFs::new(cx.background());
5001 fs.insert_tree(
5002 "/the-root",
5003 json!({
5004 "test.rs": "const A: i32 = 1;",
5005 "test2.rs": "",
5006 "Cargo.toml": "a = 1",
5007 "package.json": "{\"a\": 1}",
5008 }),
5009 )
5010 .await;
5011
5012 let project = Project::test(fs.clone(), cx);
5013 project.update(cx, |project, _| {
5014 project.languages.add(Arc::new(rust_language));
5015 project.languages.add(Arc::new(json_language));
5016 });
5017
5018 let worktree_id = project
5019 .update(cx, |project, cx| {
5020 project.find_or_create_local_worktree("/the-root", true, cx)
5021 })
5022 .await
5023 .unwrap()
5024 .0
5025 .read_with(cx, |tree, _| tree.id());
5026
5027 // Open a buffer without an associated language server.
5028 let toml_buffer = project
5029 .update(cx, |project, cx| {
5030 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5031 })
5032 .await
5033 .unwrap();
5034
5035 // Open a buffer with an associated language server.
5036 let rust_buffer = project
5037 .update(cx, |project, cx| {
5038 project.open_buffer((worktree_id, "test.rs"), cx)
5039 })
5040 .await
5041 .unwrap();
5042
5043 // A server is started up, and it is notified about Rust files.
5044 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5045 assert_eq!(
5046 fake_rust_server
5047 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5048 .await
5049 .text_document,
5050 lsp::TextDocumentItem {
5051 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5052 version: 0,
5053 text: "const A: i32 = 1;".to_string(),
5054 language_id: Default::default()
5055 }
5056 );
5057
5058 // The buffer is configured based on the language server's capabilities.
5059 rust_buffer.read_with(cx, |buffer, _| {
5060 assert_eq!(
5061 buffer.completion_triggers(),
5062 &[".".to_string(), "::".to_string()]
5063 );
5064 });
5065 toml_buffer.read_with(cx, |buffer, _| {
5066 assert!(buffer.completion_triggers().is_empty());
5067 });
5068
5069 // Edit a buffer. The changes are reported to the language server.
5070 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5071 assert_eq!(
5072 fake_rust_server
5073 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5074 .await
5075 .text_document,
5076 lsp::VersionedTextDocumentIdentifier::new(
5077 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5078 1
5079 )
5080 );
5081
5082 // Open a third buffer with a different associated language server.
5083 let json_buffer = project
5084 .update(cx, |project, cx| {
5085 project.open_buffer((worktree_id, "package.json"), cx)
5086 })
5087 .await
5088 .unwrap();
5089
5090 // A json language server is started up and is only notified about the json buffer.
5091 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5092 assert_eq!(
5093 fake_json_server
5094 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5095 .await
5096 .text_document,
5097 lsp::TextDocumentItem {
5098 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5099 version: 0,
5100 text: "{\"a\": 1}".to_string(),
5101 language_id: Default::default()
5102 }
5103 );
5104
5105 // This buffer is configured based on the second language server's
5106 // capabilities.
5107 json_buffer.read_with(cx, |buffer, _| {
5108 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5109 });
5110
5111 // When opening another buffer whose language server is already running,
5112 // it is also configured based on the existing language server's capabilities.
5113 let rust_buffer2 = project
5114 .update(cx, |project, cx| {
5115 project.open_buffer((worktree_id, "test2.rs"), cx)
5116 })
5117 .await
5118 .unwrap();
5119 rust_buffer2.read_with(cx, |buffer, _| {
5120 assert_eq!(
5121 buffer.completion_triggers(),
5122 &[".".to_string(), "::".to_string()]
5123 );
5124 });
5125
5126 // Changes are reported only to servers matching the buffer's language.
5127 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5128 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5129 assert_eq!(
5130 fake_rust_server
5131 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5132 .await
5133 .text_document,
5134 lsp::VersionedTextDocumentIdentifier::new(
5135 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5136 1
5137 )
5138 );
5139
5140 // Save notifications are reported to all servers.
5141 toml_buffer
5142 .update(cx, |buffer, cx| buffer.save(cx))
5143 .await
5144 .unwrap();
5145 assert_eq!(
5146 fake_rust_server
5147 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5148 .await
5149 .text_document,
5150 lsp::TextDocumentIdentifier::new(
5151 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5152 )
5153 );
5154 assert_eq!(
5155 fake_json_server
5156 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5157 .await
5158 .text_document,
5159 lsp::TextDocumentIdentifier::new(
5160 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5161 )
5162 );
5163
5164 // Renames are reported only to servers matching the buffer's language.
5165 fs.rename(
5166 Path::new("/the-root/test2.rs"),
5167 Path::new("/the-root/test3.rs"),
5168 Default::default(),
5169 )
5170 .await
5171 .unwrap();
5172 assert_eq!(
5173 fake_rust_server
5174 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5175 .await
5176 .text_document,
5177 lsp::TextDocumentIdentifier::new(
5178 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5179 ),
5180 );
5181 assert_eq!(
5182 fake_rust_server
5183 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5184 .await
5185 .text_document,
5186 lsp::TextDocumentItem {
5187 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5188 version: 0,
5189 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5190 language_id: Default::default()
5191 },
5192 );
5193
5194 rust_buffer2.update(cx, |buffer, cx| {
5195 buffer.update_diagnostics(
5196 DiagnosticSet::from_sorted_entries(
5197 vec![DiagnosticEntry {
5198 diagnostic: Default::default(),
5199 range: Anchor::MIN..Anchor::MAX,
5200 }],
5201 &buffer.snapshot(),
5202 ),
5203 cx,
5204 );
5205 assert_eq!(
5206 buffer
5207 .snapshot()
5208 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5209 .count(),
5210 1
5211 );
5212 });
5213
5214 // When the rename changes the extension of the file, the buffer gets closed on the old
5215 // language server and gets opened on the new one.
5216 fs.rename(
5217 Path::new("/the-root/test3.rs"),
5218 Path::new("/the-root/test3.json"),
5219 Default::default(),
5220 )
5221 .await
5222 .unwrap();
5223 assert_eq!(
5224 fake_rust_server
5225 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5226 .await
5227 .text_document,
5228 lsp::TextDocumentIdentifier::new(
5229 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5230 ),
5231 );
5232 assert_eq!(
5233 fake_json_server
5234 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5235 .await
5236 .text_document,
5237 lsp::TextDocumentItem {
5238 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5239 version: 0,
5240 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5241 language_id: Default::default()
5242 },
5243 );
5244 // We clear the diagnostics, since the language has changed.
5245 rust_buffer2.read_with(cx, |buffer, _| {
5246 assert_eq!(
5247 buffer
5248 .snapshot()
5249 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5250 .count(),
5251 0
5252 );
5253 });
5254
5255 // The renamed file's version resets after changing language server.
5256 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5257 assert_eq!(
5258 fake_json_server
5259 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5260 .await
5261 .text_document,
5262 lsp::VersionedTextDocumentIdentifier::new(
5263 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5264 1
5265 )
5266 );
5267
5268 // Restart language servers
5269 project.update(cx, |project, cx| {
5270 project.restart_language_servers_for_buffers(
5271 vec![rust_buffer.clone(), json_buffer.clone()],
5272 cx,
5273 );
5274 });
5275
5276 let mut rust_shutdown_requests = fake_rust_server
5277 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5278 let mut json_shutdown_requests = fake_json_server
5279 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5280 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5281
5282 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5283 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5284
5285 // Ensure rust document is reopened in new rust language server
5286 assert_eq!(
5287 fake_rust_server
5288 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5289 .await
5290 .text_document,
5291 lsp::TextDocumentItem {
5292 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5293 version: 1,
5294 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5295 language_id: Default::default()
5296 }
5297 );
5298
5299 // Ensure json documents are reopened in new json language server
5300 assert_set_eq!(
5301 [
5302 fake_json_server
5303 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5304 .await
5305 .text_document,
5306 fake_json_server
5307 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5308 .await
5309 .text_document,
5310 ],
5311 [
5312 lsp::TextDocumentItem {
5313 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5314 version: 0,
5315 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5316 language_id: Default::default()
5317 },
5318 lsp::TextDocumentItem {
5319 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5320 version: 1,
5321 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5322 language_id: Default::default()
5323 }
5324 ]
5325 );
5326
5327 // Close notifications are reported only to servers matching the buffer's language.
5328 cx.update(|_| drop(json_buffer));
5329 let close_message = lsp::DidCloseTextDocumentParams {
5330 text_document: lsp::TextDocumentIdentifier::new(
5331 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5332 ),
5333 };
5334 assert_eq!(
5335 fake_json_server
5336 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5337 .await,
5338 close_message,
5339 );
5340 }
5341
5342 #[gpui::test]
5343 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5344 cx.foreground().forbid_parking();
5345
5346 let progress_token = "the-progress-token";
5347 let mut language = Language::new(
5348 LanguageConfig {
5349 name: "Rust".into(),
5350 path_suffixes: vec!["rs".to_string()],
5351 ..Default::default()
5352 },
5353 Some(tree_sitter_rust::language()),
5354 );
5355 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5356 disk_based_diagnostics_progress_token: Some(progress_token),
5357 disk_based_diagnostics_sources: &["disk"],
5358 ..Default::default()
5359 });
5360
5361 let fs = FakeFs::new(cx.background());
5362 fs.insert_tree(
5363 "/dir",
5364 json!({
5365 "a.rs": "fn a() { A }",
5366 "b.rs": "const y: i32 = 1",
5367 }),
5368 )
5369 .await;
5370
5371 let project = Project::test(fs, cx);
5372 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5373
5374 let (tree, _) = project
5375 .update(cx, |project, cx| {
5376 project.find_or_create_local_worktree("/dir", true, cx)
5377 })
5378 .await
5379 .unwrap();
5380 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5381
5382 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5383 .await;
5384
5385 // Cause worktree to start the fake language server
5386 let _buffer = project
5387 .update(cx, |project, cx| {
5388 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5389 })
5390 .await
5391 .unwrap();
5392
5393 let mut events = subscribe(&project, cx);
5394
5395 let mut fake_server = fake_servers.next().await.unwrap();
5396 fake_server.start_progress(progress_token).await;
5397 assert_eq!(
5398 events.next().await.unwrap(),
5399 Event::DiskBasedDiagnosticsStarted
5400 );
5401
5402 fake_server.start_progress(progress_token).await;
5403 fake_server.end_progress(progress_token).await;
5404 fake_server.start_progress(progress_token).await;
5405
5406 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5407 lsp::PublishDiagnosticsParams {
5408 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5409 version: None,
5410 diagnostics: vec![lsp::Diagnostic {
5411 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5412 severity: Some(lsp::DiagnosticSeverity::ERROR),
5413 message: "undefined variable 'A'".to_string(),
5414 ..Default::default()
5415 }],
5416 },
5417 );
5418 assert_eq!(
5419 events.next().await.unwrap(),
5420 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5421 );
5422
5423 fake_server.end_progress(progress_token).await;
5424 fake_server.end_progress(progress_token).await;
5425 assert_eq!(
5426 events.next().await.unwrap(),
5427 Event::DiskBasedDiagnosticsUpdated
5428 );
5429 assert_eq!(
5430 events.next().await.unwrap(),
5431 Event::DiskBasedDiagnosticsFinished
5432 );
5433
5434 let buffer = project
5435 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5436 .await
5437 .unwrap();
5438
5439 buffer.read_with(cx, |buffer, _| {
5440 let snapshot = buffer.snapshot();
5441 let diagnostics = snapshot
5442 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5443 .collect::<Vec<_>>();
5444 assert_eq!(
5445 diagnostics,
5446 &[DiagnosticEntry {
5447 range: Point::new(0, 9)..Point::new(0, 10),
5448 diagnostic: Diagnostic {
5449 severity: lsp::DiagnosticSeverity::ERROR,
5450 message: "undefined variable 'A'".to_string(),
5451 group_id: 0,
5452 is_primary: true,
5453 ..Default::default()
5454 }
5455 }]
5456 )
5457 });
5458 }
5459
5460 #[gpui::test]
5461 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5462 cx.foreground().forbid_parking();
5463
5464 let progress_token = "the-progress-token";
5465 let mut language = Language::new(
5466 LanguageConfig {
5467 path_suffixes: vec!["rs".to_string()],
5468 ..Default::default()
5469 },
5470 None,
5471 );
5472 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5473 disk_based_diagnostics_sources: &["disk"],
5474 disk_based_diagnostics_progress_token: Some(progress_token),
5475 ..Default::default()
5476 });
5477
5478 let fs = FakeFs::new(cx.background());
5479 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5480
5481 let project = Project::test(fs, cx);
5482 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5483
5484 let worktree_id = project
5485 .update(cx, |project, cx| {
5486 project.find_or_create_local_worktree("/dir", true, cx)
5487 })
5488 .await
5489 .unwrap()
5490 .0
5491 .read_with(cx, |tree, _| tree.id());
5492
5493 let buffer = project
5494 .update(cx, |project, cx| {
5495 project.open_buffer((worktree_id, "a.rs"), cx)
5496 })
5497 .await
5498 .unwrap();
5499
5500 // Simulate diagnostics starting to update.
5501 let mut fake_server = fake_servers.next().await.unwrap();
5502 fake_server.start_progress(progress_token).await;
5503
5504 // Restart the server before the diagnostics finish updating.
5505 project.update(cx, |project, cx| {
5506 project.restart_language_servers_for_buffers([buffer], cx);
5507 });
5508 let mut events = subscribe(&project, cx);
5509
5510 // Simulate the newly started server sending more diagnostics.
5511 let mut fake_server = fake_servers.next().await.unwrap();
5512 fake_server.start_progress(progress_token).await;
5513 assert_eq!(
5514 events.next().await.unwrap(),
5515 Event::DiskBasedDiagnosticsStarted
5516 );
5517
5518 // All diagnostics are considered done, despite the old server's diagnostic
5519 // task never completing.
5520 fake_server.end_progress(progress_token).await;
5521 assert_eq!(
5522 events.next().await.unwrap(),
5523 Event::DiskBasedDiagnosticsUpdated
5524 );
5525 assert_eq!(
5526 events.next().await.unwrap(),
5527 Event::DiskBasedDiagnosticsFinished
5528 );
5529 project.read_with(cx, |project, _| {
5530 assert!(!project.is_running_disk_based_diagnostics());
5531 });
5532 }
5533
5534 #[gpui::test]
5535 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5536 cx.foreground().forbid_parking();
5537
5538 let mut language = Language::new(
5539 LanguageConfig {
5540 name: "Rust".into(),
5541 path_suffixes: vec!["rs".to_string()],
5542 ..Default::default()
5543 },
5544 Some(tree_sitter_rust::language()),
5545 );
5546 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5547 disk_based_diagnostics_sources: &["disk"],
5548 ..Default::default()
5549 });
5550
5551 let text = "
5552 fn a() { A }
5553 fn b() { BB }
5554 fn c() { CCC }
5555 "
5556 .unindent();
5557
5558 let fs = FakeFs::new(cx.background());
5559 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5560
5561 let project = Project::test(fs, cx);
5562 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5563
5564 let worktree_id = project
5565 .update(cx, |project, cx| {
5566 project.find_or_create_local_worktree("/dir", true, cx)
5567 })
5568 .await
5569 .unwrap()
5570 .0
5571 .read_with(cx, |tree, _| tree.id());
5572
5573 let buffer = project
5574 .update(cx, |project, cx| {
5575 project.open_buffer((worktree_id, "a.rs"), cx)
5576 })
5577 .await
5578 .unwrap();
5579
5580 let mut fake_server = fake_servers.next().await.unwrap();
5581 let open_notification = fake_server
5582 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5583 .await;
5584
5585 // Edit the buffer, moving the content down
5586 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5587 let change_notification_1 = fake_server
5588 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5589 .await;
5590 assert!(
5591 change_notification_1.text_document.version > open_notification.text_document.version
5592 );
5593
5594 // Report some diagnostics for the initial version of the buffer
5595 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5596 lsp::PublishDiagnosticsParams {
5597 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5598 version: Some(open_notification.text_document.version),
5599 diagnostics: vec![
5600 lsp::Diagnostic {
5601 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5602 severity: Some(DiagnosticSeverity::ERROR),
5603 message: "undefined variable 'A'".to_string(),
5604 source: Some("disk".to_string()),
5605 ..Default::default()
5606 },
5607 lsp::Diagnostic {
5608 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5609 severity: Some(DiagnosticSeverity::ERROR),
5610 message: "undefined variable 'BB'".to_string(),
5611 source: Some("disk".to_string()),
5612 ..Default::default()
5613 },
5614 lsp::Diagnostic {
5615 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5616 severity: Some(DiagnosticSeverity::ERROR),
5617 source: Some("disk".to_string()),
5618 message: "undefined variable 'CCC'".to_string(),
5619 ..Default::default()
5620 },
5621 ],
5622 },
5623 );
5624
5625 // The diagnostics have moved down since they were created.
5626 buffer.next_notification(cx).await;
5627 buffer.read_with(cx, |buffer, _| {
5628 assert_eq!(
5629 buffer
5630 .snapshot()
5631 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5632 .collect::<Vec<_>>(),
5633 &[
5634 DiagnosticEntry {
5635 range: Point::new(3, 9)..Point::new(3, 11),
5636 diagnostic: Diagnostic {
5637 severity: DiagnosticSeverity::ERROR,
5638 message: "undefined variable 'BB'".to_string(),
5639 is_disk_based: true,
5640 group_id: 1,
5641 is_primary: true,
5642 ..Default::default()
5643 },
5644 },
5645 DiagnosticEntry {
5646 range: Point::new(4, 9)..Point::new(4, 12),
5647 diagnostic: Diagnostic {
5648 severity: DiagnosticSeverity::ERROR,
5649 message: "undefined variable 'CCC'".to_string(),
5650 is_disk_based: true,
5651 group_id: 2,
5652 is_primary: true,
5653 ..Default::default()
5654 }
5655 }
5656 ]
5657 );
5658 assert_eq!(
5659 chunks_with_diagnostics(buffer, 0..buffer.len()),
5660 [
5661 ("\n\nfn a() { ".to_string(), None),
5662 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5663 (" }\nfn b() { ".to_string(), None),
5664 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5665 (" }\nfn c() { ".to_string(), None),
5666 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5667 (" }\n".to_string(), None),
5668 ]
5669 );
5670 assert_eq!(
5671 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5672 [
5673 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5674 (" }\nfn c() { ".to_string(), None),
5675 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5676 ]
5677 );
5678 });
5679
5680 // Ensure overlapping diagnostics are highlighted correctly.
5681 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5682 lsp::PublishDiagnosticsParams {
5683 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5684 version: Some(open_notification.text_document.version),
5685 diagnostics: vec![
5686 lsp::Diagnostic {
5687 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5688 severity: Some(DiagnosticSeverity::ERROR),
5689 message: "undefined variable 'A'".to_string(),
5690 source: Some("disk".to_string()),
5691 ..Default::default()
5692 },
5693 lsp::Diagnostic {
5694 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5695 severity: Some(DiagnosticSeverity::WARNING),
5696 message: "unreachable statement".to_string(),
5697 source: Some("disk".to_string()),
5698 ..Default::default()
5699 },
5700 ],
5701 },
5702 );
5703
5704 buffer.next_notification(cx).await;
5705 buffer.read_with(cx, |buffer, _| {
5706 assert_eq!(
5707 buffer
5708 .snapshot()
5709 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5710 .collect::<Vec<_>>(),
5711 &[
5712 DiagnosticEntry {
5713 range: Point::new(2, 9)..Point::new(2, 12),
5714 diagnostic: Diagnostic {
5715 severity: DiagnosticSeverity::WARNING,
5716 message: "unreachable statement".to_string(),
5717 is_disk_based: true,
5718 group_id: 1,
5719 is_primary: true,
5720 ..Default::default()
5721 }
5722 },
5723 DiagnosticEntry {
5724 range: Point::new(2, 9)..Point::new(2, 10),
5725 diagnostic: Diagnostic {
5726 severity: DiagnosticSeverity::ERROR,
5727 message: "undefined variable 'A'".to_string(),
5728 is_disk_based: true,
5729 group_id: 0,
5730 is_primary: true,
5731 ..Default::default()
5732 },
5733 }
5734 ]
5735 );
5736 assert_eq!(
5737 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5738 [
5739 ("fn a() { ".to_string(), None),
5740 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5741 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5742 ("\n".to_string(), None),
5743 ]
5744 );
5745 assert_eq!(
5746 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5747 [
5748 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5749 ("\n".to_string(), None),
5750 ]
5751 );
5752 });
5753
5754 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5755 // changes since the last save.
5756 buffer.update(cx, |buffer, cx| {
5757 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5758 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5759 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5760 });
5761 let change_notification_2 = fake_server
5762 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5763 .await;
5764 assert!(
5765 change_notification_2.text_document.version
5766 > change_notification_1.text_document.version
5767 );
5768
5769 // Handle out-of-order diagnostics
5770 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5771 lsp::PublishDiagnosticsParams {
5772 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5773 version: Some(change_notification_2.text_document.version),
5774 diagnostics: vec![
5775 lsp::Diagnostic {
5776 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5777 severity: Some(DiagnosticSeverity::ERROR),
5778 message: "undefined variable 'BB'".to_string(),
5779 source: Some("disk".to_string()),
5780 ..Default::default()
5781 },
5782 lsp::Diagnostic {
5783 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5784 severity: Some(DiagnosticSeverity::WARNING),
5785 message: "undefined variable 'A'".to_string(),
5786 source: Some("disk".to_string()),
5787 ..Default::default()
5788 },
5789 ],
5790 },
5791 );
5792
5793 buffer.next_notification(cx).await;
5794 buffer.read_with(cx, |buffer, _| {
5795 assert_eq!(
5796 buffer
5797 .snapshot()
5798 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5799 .collect::<Vec<_>>(),
5800 &[
5801 DiagnosticEntry {
5802 range: Point::new(2, 21)..Point::new(2, 22),
5803 diagnostic: Diagnostic {
5804 severity: DiagnosticSeverity::WARNING,
5805 message: "undefined variable 'A'".to_string(),
5806 is_disk_based: true,
5807 group_id: 1,
5808 is_primary: true,
5809 ..Default::default()
5810 }
5811 },
5812 DiagnosticEntry {
5813 range: Point::new(3, 9)..Point::new(3, 14),
5814 diagnostic: Diagnostic {
5815 severity: DiagnosticSeverity::ERROR,
5816 message: "undefined variable 'BB'".to_string(),
5817 is_disk_based: true,
5818 group_id: 0,
5819 is_primary: true,
5820 ..Default::default()
5821 },
5822 }
5823 ]
5824 );
5825 });
5826 }
5827
5828 #[gpui::test]
5829 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5830 cx.foreground().forbid_parking();
5831
5832 let text = concat!(
5833 "let one = ;\n", //
5834 "let two = \n",
5835 "let three = 3;\n",
5836 );
5837
5838 let fs = FakeFs::new(cx.background());
5839 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5840
5841 let project = Project::test(fs, cx);
5842 let worktree_id = project
5843 .update(cx, |project, cx| {
5844 project.find_or_create_local_worktree("/dir", true, cx)
5845 })
5846 .await
5847 .unwrap()
5848 .0
5849 .read_with(cx, |tree, _| tree.id());
5850
5851 let buffer = project
5852 .update(cx, |project, cx| {
5853 project.open_buffer((worktree_id, "a.rs"), cx)
5854 })
5855 .await
5856 .unwrap();
5857
5858 project.update(cx, |project, cx| {
5859 project
5860 .update_buffer_diagnostics(
5861 &buffer,
5862 vec![
5863 DiagnosticEntry {
5864 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5865 diagnostic: Diagnostic {
5866 severity: DiagnosticSeverity::ERROR,
5867 message: "syntax error 1".to_string(),
5868 ..Default::default()
5869 },
5870 },
5871 DiagnosticEntry {
5872 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5873 diagnostic: Diagnostic {
5874 severity: DiagnosticSeverity::ERROR,
5875 message: "syntax error 2".to_string(),
5876 ..Default::default()
5877 },
5878 },
5879 ],
5880 None,
5881 cx,
5882 )
5883 .unwrap();
5884 });
5885
5886 // An empty range is extended forward to include the following character.
5887 // At the end of a line, an empty range is extended backward to include
5888 // the preceding character.
5889 buffer.read_with(cx, |buffer, _| {
5890 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5891 assert_eq!(
5892 chunks
5893 .iter()
5894 .map(|(s, d)| (s.as_str(), *d))
5895 .collect::<Vec<_>>(),
5896 &[
5897 ("let one = ", None),
5898 (";", Some(DiagnosticSeverity::ERROR)),
5899 ("\nlet two =", None),
5900 (" ", Some(DiagnosticSeverity::ERROR)),
5901 ("\nlet three = 3;\n", None)
5902 ]
5903 );
5904 });
5905 }
5906
5907 #[gpui::test]
5908 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5909 cx.foreground().forbid_parking();
5910
5911 let mut language = Language::new(
5912 LanguageConfig {
5913 name: "Rust".into(),
5914 path_suffixes: vec!["rs".to_string()],
5915 ..Default::default()
5916 },
5917 Some(tree_sitter_rust::language()),
5918 );
5919 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5920
5921 let text = "
5922 fn a() {
5923 f1();
5924 }
5925 fn b() {
5926 f2();
5927 }
5928 fn c() {
5929 f3();
5930 }
5931 "
5932 .unindent();
5933
5934 let fs = FakeFs::new(cx.background());
5935 fs.insert_tree(
5936 "/dir",
5937 json!({
5938 "a.rs": text.clone(),
5939 }),
5940 )
5941 .await;
5942
5943 let project = Project::test(fs, cx);
5944 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5945
5946 let worktree_id = project
5947 .update(cx, |project, cx| {
5948 project.find_or_create_local_worktree("/dir", true, cx)
5949 })
5950 .await
5951 .unwrap()
5952 .0
5953 .read_with(cx, |tree, _| tree.id());
5954
5955 let buffer = project
5956 .update(cx, |project, cx| {
5957 project.open_buffer((worktree_id, "a.rs"), cx)
5958 })
5959 .await
5960 .unwrap();
5961
5962 let mut fake_server = fake_servers.next().await.unwrap();
5963 let lsp_document_version = fake_server
5964 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5965 .await
5966 .text_document
5967 .version;
5968
5969 // Simulate editing the buffer after the language server computes some edits.
5970 buffer.update(cx, |buffer, cx| {
5971 buffer.edit(
5972 [Point::new(0, 0)..Point::new(0, 0)],
5973 "// above first function\n",
5974 cx,
5975 );
5976 buffer.edit(
5977 [Point::new(2, 0)..Point::new(2, 0)],
5978 " // inside first function\n",
5979 cx,
5980 );
5981 buffer.edit(
5982 [Point::new(6, 4)..Point::new(6, 4)],
5983 "// inside second function ",
5984 cx,
5985 );
5986
5987 assert_eq!(
5988 buffer.text(),
5989 "
5990 // above first function
5991 fn a() {
5992 // inside first function
5993 f1();
5994 }
5995 fn b() {
5996 // inside second function f2();
5997 }
5998 fn c() {
5999 f3();
6000 }
6001 "
6002 .unindent()
6003 );
6004 });
6005
6006 let edits = project
6007 .update(cx, |project, cx| {
6008 project.edits_from_lsp(
6009 &buffer,
6010 vec![
6011 // replace body of first function
6012 lsp::TextEdit {
6013 range: lsp::Range::new(
6014 lsp::Position::new(0, 0),
6015 lsp::Position::new(3, 0),
6016 ),
6017 new_text: "
6018 fn a() {
6019 f10();
6020 }
6021 "
6022 .unindent(),
6023 },
6024 // edit inside second function
6025 lsp::TextEdit {
6026 range: lsp::Range::new(
6027 lsp::Position::new(4, 6),
6028 lsp::Position::new(4, 6),
6029 ),
6030 new_text: "00".into(),
6031 },
6032 // edit inside third function via two distinct edits
6033 lsp::TextEdit {
6034 range: lsp::Range::new(
6035 lsp::Position::new(7, 5),
6036 lsp::Position::new(7, 5),
6037 ),
6038 new_text: "4000".into(),
6039 },
6040 lsp::TextEdit {
6041 range: lsp::Range::new(
6042 lsp::Position::new(7, 5),
6043 lsp::Position::new(7, 6),
6044 ),
6045 new_text: "".into(),
6046 },
6047 ],
6048 Some(lsp_document_version),
6049 cx,
6050 )
6051 })
6052 .await
6053 .unwrap();
6054
6055 buffer.update(cx, |buffer, cx| {
6056 for (range, new_text) in edits {
6057 buffer.edit([range], new_text, cx);
6058 }
6059 assert_eq!(
6060 buffer.text(),
6061 "
6062 // above first function
6063 fn a() {
6064 // inside first function
6065 f10();
6066 }
6067 fn b() {
6068 // inside second function f200();
6069 }
6070 fn c() {
6071 f4000();
6072 }
6073 "
6074 .unindent()
6075 );
6076 });
6077 }
6078
6079 #[gpui::test]
6080 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6081 cx.foreground().forbid_parking();
6082
6083 let text = "
6084 use a::b;
6085 use a::c;
6086
6087 fn f() {
6088 b();
6089 c();
6090 }
6091 "
6092 .unindent();
6093
6094 let fs = FakeFs::new(cx.background());
6095 fs.insert_tree(
6096 "/dir",
6097 json!({
6098 "a.rs": text.clone(),
6099 }),
6100 )
6101 .await;
6102
6103 let project = Project::test(fs, cx);
6104 let worktree_id = project
6105 .update(cx, |project, cx| {
6106 project.find_or_create_local_worktree("/dir", true, cx)
6107 })
6108 .await
6109 .unwrap()
6110 .0
6111 .read_with(cx, |tree, _| tree.id());
6112
6113 let buffer = project
6114 .update(cx, |project, cx| {
6115 project.open_buffer((worktree_id, "a.rs"), cx)
6116 })
6117 .await
6118 .unwrap();
6119
6120 // Simulate the language server sending us a small edit in the form of a very large diff.
6121 // Rust-analyzer does this when performing a merge-imports code action.
6122 let edits = project
6123 .update(cx, |project, cx| {
6124 project.edits_from_lsp(
6125 &buffer,
6126 [
6127 // Replace the first use statement without editing the semicolon.
6128 lsp::TextEdit {
6129 range: lsp::Range::new(
6130 lsp::Position::new(0, 4),
6131 lsp::Position::new(0, 8),
6132 ),
6133 new_text: "a::{b, c}".into(),
6134 },
6135 // Reinsert the remainder of the file between the semicolon and the final
6136 // newline of the file.
6137 lsp::TextEdit {
6138 range: lsp::Range::new(
6139 lsp::Position::new(0, 9),
6140 lsp::Position::new(0, 9),
6141 ),
6142 new_text: "\n\n".into(),
6143 },
6144 lsp::TextEdit {
6145 range: lsp::Range::new(
6146 lsp::Position::new(0, 9),
6147 lsp::Position::new(0, 9),
6148 ),
6149 new_text: "
6150 fn f() {
6151 b();
6152 c();
6153 }"
6154 .unindent(),
6155 },
6156 // Delete everything after the first newline of the file.
6157 lsp::TextEdit {
6158 range: lsp::Range::new(
6159 lsp::Position::new(1, 0),
6160 lsp::Position::new(7, 0),
6161 ),
6162 new_text: "".into(),
6163 },
6164 ],
6165 None,
6166 cx,
6167 )
6168 })
6169 .await
6170 .unwrap();
6171
6172 buffer.update(cx, |buffer, cx| {
6173 let edits = edits
6174 .into_iter()
6175 .map(|(range, text)| {
6176 (
6177 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6178 text,
6179 )
6180 })
6181 .collect::<Vec<_>>();
6182
6183 assert_eq!(
6184 edits,
6185 [
6186 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6187 (Point::new(1, 0)..Point::new(2, 0), "".into())
6188 ]
6189 );
6190
6191 for (range, new_text) in edits {
6192 buffer.edit([range], new_text, cx);
6193 }
6194 assert_eq!(
6195 buffer.text(),
6196 "
6197 use a::{b, c};
6198
6199 fn f() {
6200 b();
6201 c();
6202 }
6203 "
6204 .unindent()
6205 );
6206 });
6207 }
6208
6209 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6210 buffer: &Buffer,
6211 range: Range<T>,
6212 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6213 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6214 for chunk in buffer.snapshot().chunks(range, true) {
6215 if chunks.last().map_or(false, |prev_chunk| {
6216 prev_chunk.1 == chunk.diagnostic_severity
6217 }) {
6218 chunks.last_mut().unwrap().0.push_str(chunk.text);
6219 } else {
6220 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6221 }
6222 }
6223 chunks
6224 }
6225
6226 #[gpui::test]
6227 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6228 let dir = temp_tree(json!({
6229 "root": {
6230 "dir1": {},
6231 "dir2": {
6232 "dir3": {}
6233 }
6234 }
6235 }));
6236
6237 let project = Project::test(Arc::new(RealFs), cx);
6238 let (tree, _) = project
6239 .update(cx, |project, cx| {
6240 project.find_or_create_local_worktree(&dir.path(), true, cx)
6241 })
6242 .await
6243 .unwrap();
6244
6245 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6246 .await;
6247
6248 let cancel_flag = Default::default();
6249 let results = project
6250 .read_with(cx, |project, cx| {
6251 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6252 })
6253 .await;
6254
6255 assert!(results.is_empty());
6256 }
6257
6258 #[gpui::test]
6259 async fn test_definition(cx: &mut gpui::TestAppContext) {
6260 let mut language = Language::new(
6261 LanguageConfig {
6262 name: "Rust".into(),
6263 path_suffixes: vec!["rs".to_string()],
6264 ..Default::default()
6265 },
6266 Some(tree_sitter_rust::language()),
6267 );
6268 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6269
6270 let fs = FakeFs::new(cx.background());
6271 fs.insert_tree(
6272 "/dir",
6273 json!({
6274 "a.rs": "const fn a() { A }",
6275 "b.rs": "const y: i32 = crate::a()",
6276 }),
6277 )
6278 .await;
6279
6280 let project = Project::test(fs, cx);
6281 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6282
6283 let (tree, _) = project
6284 .update(cx, |project, cx| {
6285 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6286 })
6287 .await
6288 .unwrap();
6289 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6290 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6291 .await;
6292
6293 let buffer = project
6294 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6295 .await
6296 .unwrap();
6297
6298 let fake_server = fake_servers.next().await.unwrap();
6299 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6300 let params = params.text_document_position_params;
6301 assert_eq!(
6302 params.text_document.uri.to_file_path().unwrap(),
6303 Path::new("/dir/b.rs"),
6304 );
6305 assert_eq!(params.position, lsp::Position::new(0, 22));
6306
6307 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6308 lsp::Location::new(
6309 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6310 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6311 ),
6312 )))
6313 });
6314
6315 let mut definitions = project
6316 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6317 .await
6318 .unwrap();
6319
6320 assert_eq!(definitions.len(), 1);
6321 let definition = definitions.pop().unwrap();
6322 cx.update(|cx| {
6323 let target_buffer = definition.buffer.read(cx);
6324 assert_eq!(
6325 target_buffer
6326 .file()
6327 .unwrap()
6328 .as_local()
6329 .unwrap()
6330 .abs_path(cx),
6331 Path::new("/dir/a.rs"),
6332 );
6333 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6334 assert_eq!(
6335 list_worktrees(&project, cx),
6336 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6337 );
6338
6339 drop(definition);
6340 });
6341 cx.read(|cx| {
6342 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6343 });
6344
6345 fn list_worktrees<'a>(
6346 project: &'a ModelHandle<Project>,
6347 cx: &'a AppContext,
6348 ) -> Vec<(&'a Path, bool)> {
6349 project
6350 .read(cx)
6351 .worktrees(cx)
6352 .map(|worktree| {
6353 let worktree = worktree.read(cx);
6354 (
6355 worktree.as_local().unwrap().abs_path().as_ref(),
6356 worktree.is_visible(),
6357 )
6358 })
6359 .collect::<Vec<_>>()
6360 }
6361 }
6362
6363 #[gpui::test(iterations = 10)]
6364 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6365 let mut language = Language::new(
6366 LanguageConfig {
6367 name: "TypeScript".into(),
6368 path_suffixes: vec!["ts".to_string()],
6369 ..Default::default()
6370 },
6371 None,
6372 );
6373 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6374
6375 let fs = FakeFs::new(cx.background());
6376 fs.insert_tree(
6377 "/dir",
6378 json!({
6379 "a.ts": "a",
6380 }),
6381 )
6382 .await;
6383
6384 let project = Project::test(fs, cx);
6385 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6386
6387 let (tree, _) = project
6388 .update(cx, |project, cx| {
6389 project.find_or_create_local_worktree("/dir", true, cx)
6390 })
6391 .await
6392 .unwrap();
6393 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6394 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6395 .await;
6396
6397 let buffer = project
6398 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6399 .await
6400 .unwrap();
6401
6402 let fake_server = fake_language_servers.next().await.unwrap();
6403
6404 // Language server returns code actions that contain commands, and not edits.
6405 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6406 fake_server
6407 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6408 Ok(Some(vec![
6409 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6410 title: "The code action".into(),
6411 command: Some(lsp::Command {
6412 title: "The command".into(),
6413 command: "_the/command".into(),
6414 arguments: Some(vec![json!("the-argument")]),
6415 }),
6416 ..Default::default()
6417 }),
6418 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6419 title: "two".into(),
6420 ..Default::default()
6421 }),
6422 ]))
6423 })
6424 .next()
6425 .await;
6426
6427 let action = actions.await.unwrap()[0].clone();
6428 let apply = project.update(cx, |project, cx| {
6429 project.apply_code_action(buffer.clone(), action, true, cx)
6430 });
6431
6432 // Resolving the code action does not populate its edits. In absence of
6433 // edits, we must execute the given command.
6434 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6435 |action, _| async move { Ok(action) },
6436 );
6437
6438 // While executing the command, the language server sends the editor
6439 // a `workspaceEdit` request.
6440 fake_server
6441 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6442 let fake = fake_server.clone();
6443 move |params, _| {
6444 assert_eq!(params.command, "_the/command");
6445 let fake = fake.clone();
6446 async move {
6447 fake.server
6448 .request::<lsp::request::ApplyWorkspaceEdit>(
6449 lsp::ApplyWorkspaceEditParams {
6450 label: None,
6451 edit: lsp::WorkspaceEdit {
6452 changes: Some(
6453 [(
6454 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6455 vec![lsp::TextEdit {
6456 range: lsp::Range::new(
6457 lsp::Position::new(0, 0),
6458 lsp::Position::new(0, 0),
6459 ),
6460 new_text: "X".into(),
6461 }],
6462 )]
6463 .into_iter()
6464 .collect(),
6465 ),
6466 ..Default::default()
6467 },
6468 },
6469 )
6470 .await
6471 .unwrap();
6472 Ok(Some(json!(null)))
6473 }
6474 }
6475 })
6476 .next()
6477 .await;
6478
6479 // Applying the code action returns a project transaction containing the edits
6480 // sent by the language server in its `workspaceEdit` request.
6481 let transaction = apply.await.unwrap();
6482 assert!(transaction.0.contains_key(&buffer));
6483 buffer.update(cx, |buffer, cx| {
6484 assert_eq!(buffer.text(), "Xa");
6485 buffer.undo(cx);
6486 assert_eq!(buffer.text(), "a");
6487 });
6488 }
6489
6490 #[gpui::test]
6491 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6492 let fs = FakeFs::new(cx.background());
6493 fs.insert_tree(
6494 "/dir",
6495 json!({
6496 "file1": "the old contents",
6497 }),
6498 )
6499 .await;
6500
6501 let project = Project::test(fs.clone(), cx);
6502 let worktree_id = project
6503 .update(cx, |p, cx| {
6504 p.find_or_create_local_worktree("/dir", true, cx)
6505 })
6506 .await
6507 .unwrap()
6508 .0
6509 .read_with(cx, |tree, _| tree.id());
6510
6511 let buffer = project
6512 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6513 .await
6514 .unwrap();
6515 buffer
6516 .update(cx, |buffer, cx| {
6517 assert_eq!(buffer.text(), "the old contents");
6518 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6519 buffer.save(cx)
6520 })
6521 .await
6522 .unwrap();
6523
6524 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6525 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6526 }
6527
6528 #[gpui::test]
6529 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6530 let fs = FakeFs::new(cx.background());
6531 fs.insert_tree(
6532 "/dir",
6533 json!({
6534 "file1": "the old contents",
6535 }),
6536 )
6537 .await;
6538
6539 let project = Project::test(fs.clone(), cx);
6540 let worktree_id = project
6541 .update(cx, |p, cx| {
6542 p.find_or_create_local_worktree("/dir/file1", true, cx)
6543 })
6544 .await
6545 .unwrap()
6546 .0
6547 .read_with(cx, |tree, _| tree.id());
6548
6549 let buffer = project
6550 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6551 .await
6552 .unwrap();
6553 buffer
6554 .update(cx, |buffer, cx| {
6555 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6556 buffer.save(cx)
6557 })
6558 .await
6559 .unwrap();
6560
6561 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6562 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6563 }
6564
6565 #[gpui::test]
6566 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6567 let fs = FakeFs::new(cx.background());
6568 fs.insert_tree("/dir", json!({})).await;
6569
6570 let project = Project::test(fs.clone(), cx);
6571 let (worktree, _) = project
6572 .update(cx, |project, cx| {
6573 project.find_or_create_local_worktree("/dir", true, cx)
6574 })
6575 .await
6576 .unwrap();
6577 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6578
6579 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6580 buffer.update(cx, |buffer, cx| {
6581 buffer.edit([0..0], "abc", cx);
6582 assert!(buffer.is_dirty());
6583 assert!(!buffer.has_conflict());
6584 });
6585 project
6586 .update(cx, |project, cx| {
6587 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6588 })
6589 .await
6590 .unwrap();
6591 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6592 buffer.read_with(cx, |buffer, cx| {
6593 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6594 assert!(!buffer.is_dirty());
6595 assert!(!buffer.has_conflict());
6596 });
6597
6598 let opened_buffer = project
6599 .update(cx, |project, cx| {
6600 project.open_buffer((worktree_id, "file1"), cx)
6601 })
6602 .await
6603 .unwrap();
6604 assert_eq!(opened_buffer, buffer);
6605 }
6606
6607 #[gpui::test(retries = 5)]
6608 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6609 let dir = temp_tree(json!({
6610 "a": {
6611 "file1": "",
6612 "file2": "",
6613 "file3": "",
6614 },
6615 "b": {
6616 "c": {
6617 "file4": "",
6618 "file5": "",
6619 }
6620 }
6621 }));
6622
6623 let project = Project::test(Arc::new(RealFs), cx);
6624 let rpc = project.read_with(cx, |p, _| p.client.clone());
6625
6626 let (tree, _) = project
6627 .update(cx, |p, cx| {
6628 p.find_or_create_local_worktree(dir.path(), true, cx)
6629 })
6630 .await
6631 .unwrap();
6632 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6633
6634 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6635 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6636 async move { buffer.await.unwrap() }
6637 };
6638 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6639 tree.read_with(cx, |tree, _| {
6640 tree.entry_for_path(path)
6641 .expect(&format!("no entry for path {}", path))
6642 .id
6643 })
6644 };
6645
6646 let buffer2 = buffer_for_path("a/file2", cx).await;
6647 let buffer3 = buffer_for_path("a/file3", cx).await;
6648 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6649 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6650
6651 let file2_id = id_for_path("a/file2", &cx);
6652 let file3_id = id_for_path("a/file3", &cx);
6653 let file4_id = id_for_path("b/c/file4", &cx);
6654
6655 // Wait for the initial scan.
6656 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6657 .await;
6658
6659 // Create a remote copy of this worktree.
6660 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6661 let (remote, load_task) = cx.update(|cx| {
6662 Worktree::remote(
6663 1,
6664 1,
6665 initial_snapshot.to_proto(&Default::default(), true),
6666 rpc.clone(),
6667 cx,
6668 )
6669 });
6670 load_task.await;
6671
6672 cx.read(|cx| {
6673 assert!(!buffer2.read(cx).is_dirty());
6674 assert!(!buffer3.read(cx).is_dirty());
6675 assert!(!buffer4.read(cx).is_dirty());
6676 assert!(!buffer5.read(cx).is_dirty());
6677 });
6678
6679 // Rename and delete files and directories.
6680 tree.flush_fs_events(&cx).await;
6681 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6682 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6683 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6684 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6685 tree.flush_fs_events(&cx).await;
6686
6687 let expected_paths = vec![
6688 "a",
6689 "a/file1",
6690 "a/file2.new",
6691 "b",
6692 "d",
6693 "d/file3",
6694 "d/file4",
6695 ];
6696
6697 cx.read(|app| {
6698 assert_eq!(
6699 tree.read(app)
6700 .paths()
6701 .map(|p| p.to_str().unwrap())
6702 .collect::<Vec<_>>(),
6703 expected_paths
6704 );
6705
6706 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6707 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6708 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6709
6710 assert_eq!(
6711 buffer2.read(app).file().unwrap().path().as_ref(),
6712 Path::new("a/file2.new")
6713 );
6714 assert_eq!(
6715 buffer3.read(app).file().unwrap().path().as_ref(),
6716 Path::new("d/file3")
6717 );
6718 assert_eq!(
6719 buffer4.read(app).file().unwrap().path().as_ref(),
6720 Path::new("d/file4")
6721 );
6722 assert_eq!(
6723 buffer5.read(app).file().unwrap().path().as_ref(),
6724 Path::new("b/c/file5")
6725 );
6726
6727 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6728 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6729 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6730 assert!(buffer5.read(app).file().unwrap().is_deleted());
6731 });
6732
6733 // Update the remote worktree. Check that it becomes consistent with the
6734 // local worktree.
6735 remote.update(cx, |remote, cx| {
6736 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6737 &initial_snapshot,
6738 1,
6739 1,
6740 true,
6741 );
6742 remote
6743 .as_remote_mut()
6744 .unwrap()
6745 .snapshot
6746 .apply_remote_update(update_message)
6747 .unwrap();
6748
6749 assert_eq!(
6750 remote
6751 .paths()
6752 .map(|p| p.to_str().unwrap())
6753 .collect::<Vec<_>>(),
6754 expected_paths
6755 );
6756 });
6757 }
6758
6759 #[gpui::test]
6760 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6761 let fs = FakeFs::new(cx.background());
6762 fs.insert_tree(
6763 "/the-dir",
6764 json!({
6765 "a.txt": "a-contents",
6766 "b.txt": "b-contents",
6767 }),
6768 )
6769 .await;
6770
6771 let project = Project::test(fs.clone(), cx);
6772 let worktree_id = project
6773 .update(cx, |p, cx| {
6774 p.find_or_create_local_worktree("/the-dir", true, cx)
6775 })
6776 .await
6777 .unwrap()
6778 .0
6779 .read_with(cx, |tree, _| tree.id());
6780
6781 // Spawn multiple tasks to open paths, repeating some paths.
6782 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6783 (
6784 p.open_buffer((worktree_id, "a.txt"), cx),
6785 p.open_buffer((worktree_id, "b.txt"), cx),
6786 p.open_buffer((worktree_id, "a.txt"), cx),
6787 )
6788 });
6789
6790 let buffer_a_1 = buffer_a_1.await.unwrap();
6791 let buffer_a_2 = buffer_a_2.await.unwrap();
6792 let buffer_b = buffer_b.await.unwrap();
6793 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6794 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6795
6796 // There is only one buffer per path.
6797 let buffer_a_id = buffer_a_1.id();
6798 assert_eq!(buffer_a_2.id(), buffer_a_id);
6799
6800 // Open the same path again while it is still open.
6801 drop(buffer_a_1);
6802 let buffer_a_3 = project
6803 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6804 .await
6805 .unwrap();
6806
6807 // There's still only one buffer per path.
6808 assert_eq!(buffer_a_3.id(), buffer_a_id);
6809 }
6810
6811 #[gpui::test]
6812 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6813 use std::fs;
6814
6815 let dir = temp_tree(json!({
6816 "file1": "abc",
6817 "file2": "def",
6818 "file3": "ghi",
6819 }));
6820
6821 let project = Project::test(Arc::new(RealFs), cx);
6822 let (worktree, _) = project
6823 .update(cx, |p, cx| {
6824 p.find_or_create_local_worktree(dir.path(), true, cx)
6825 })
6826 .await
6827 .unwrap();
6828 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6829
6830 worktree.flush_fs_events(&cx).await;
6831 worktree
6832 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6833 .await;
6834
6835 let buffer1 = project
6836 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6837 .await
6838 .unwrap();
6839 let events = Rc::new(RefCell::new(Vec::new()));
6840
6841 // initially, the buffer isn't dirty.
6842 buffer1.update(cx, |buffer, cx| {
6843 cx.subscribe(&buffer1, {
6844 let events = events.clone();
6845 move |_, _, event, _| match event {
6846 BufferEvent::Operation(_) => {}
6847 _ => events.borrow_mut().push(event.clone()),
6848 }
6849 })
6850 .detach();
6851
6852 assert!(!buffer.is_dirty());
6853 assert!(events.borrow().is_empty());
6854
6855 buffer.edit(vec![1..2], "", cx);
6856 });
6857
6858 // after the first edit, the buffer is dirty, and emits a dirtied event.
6859 buffer1.update(cx, |buffer, cx| {
6860 assert!(buffer.text() == "ac");
6861 assert!(buffer.is_dirty());
6862 assert_eq!(
6863 *events.borrow(),
6864 &[language::Event::Edited, language::Event::Dirtied]
6865 );
6866 events.borrow_mut().clear();
6867 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6868 });
6869
6870 // after saving, the buffer is not dirty, and emits a saved event.
6871 buffer1.update(cx, |buffer, cx| {
6872 assert!(!buffer.is_dirty());
6873 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6874 events.borrow_mut().clear();
6875
6876 buffer.edit(vec![1..1], "B", cx);
6877 buffer.edit(vec![2..2], "D", cx);
6878 });
6879
6880 // after editing again, the buffer is dirty, and emits another dirty event.
6881 buffer1.update(cx, |buffer, cx| {
6882 assert!(buffer.text() == "aBDc");
6883 assert!(buffer.is_dirty());
6884 assert_eq!(
6885 *events.borrow(),
6886 &[
6887 language::Event::Edited,
6888 language::Event::Dirtied,
6889 language::Event::Edited,
6890 ],
6891 );
6892 events.borrow_mut().clear();
6893
6894 // TODO - currently, after restoring the buffer to its
6895 // previously-saved state, the is still considered dirty.
6896 buffer.edit([1..3], "", cx);
6897 assert!(buffer.text() == "ac");
6898 assert!(buffer.is_dirty());
6899 });
6900
6901 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6902
6903 // When a file is deleted, the buffer is considered dirty.
6904 let events = Rc::new(RefCell::new(Vec::new()));
6905 let buffer2 = project
6906 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6907 .await
6908 .unwrap();
6909 buffer2.update(cx, |_, cx| {
6910 cx.subscribe(&buffer2, {
6911 let events = events.clone();
6912 move |_, _, event, _| events.borrow_mut().push(event.clone())
6913 })
6914 .detach();
6915 });
6916
6917 fs::remove_file(dir.path().join("file2")).unwrap();
6918 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6919 assert_eq!(
6920 *events.borrow(),
6921 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6922 );
6923
6924 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6925 let events = Rc::new(RefCell::new(Vec::new()));
6926 let buffer3 = project
6927 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6928 .await
6929 .unwrap();
6930 buffer3.update(cx, |_, cx| {
6931 cx.subscribe(&buffer3, {
6932 let events = events.clone();
6933 move |_, _, event, _| events.borrow_mut().push(event.clone())
6934 })
6935 .detach();
6936 });
6937
6938 worktree.flush_fs_events(&cx).await;
6939 buffer3.update(cx, |buffer, cx| {
6940 buffer.edit(Some(0..0), "x", cx);
6941 });
6942 events.borrow_mut().clear();
6943 fs::remove_file(dir.path().join("file3")).unwrap();
6944 buffer3
6945 .condition(&cx, |_, _| !events.borrow().is_empty())
6946 .await;
6947 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6948 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6949 }
6950
6951 #[gpui::test]
6952 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6953 use std::fs;
6954
6955 let initial_contents = "aaa\nbbbbb\nc\n";
6956 let dir = temp_tree(json!({ "the-file": initial_contents }));
6957
6958 let project = Project::test(Arc::new(RealFs), cx);
6959 let (worktree, _) = project
6960 .update(cx, |p, cx| {
6961 p.find_or_create_local_worktree(dir.path(), true, cx)
6962 })
6963 .await
6964 .unwrap();
6965 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6966
6967 worktree
6968 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6969 .await;
6970
6971 let abs_path = dir.path().join("the-file");
6972 let buffer = project
6973 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6974 .await
6975 .unwrap();
6976
6977 // TODO
6978 // Add a cursor on each row.
6979 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6980 // assert!(!buffer.is_dirty());
6981 // buffer.add_selection_set(
6982 // &(0..3)
6983 // .map(|row| Selection {
6984 // id: row as usize,
6985 // start: Point::new(row, 1),
6986 // end: Point::new(row, 1),
6987 // reversed: false,
6988 // goal: SelectionGoal::None,
6989 // })
6990 // .collect::<Vec<_>>(),
6991 // cx,
6992 // )
6993 // });
6994
6995 // Change the file on disk, adding two new lines of text, and removing
6996 // one line.
6997 buffer.read_with(cx, |buffer, _| {
6998 assert!(!buffer.is_dirty());
6999 assert!(!buffer.has_conflict());
7000 });
7001 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7002 fs::write(&abs_path, new_contents).unwrap();
7003
7004 // Because the buffer was not modified, it is reloaded from disk. Its
7005 // contents are edited according to the diff between the old and new
7006 // file contents.
7007 buffer
7008 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7009 .await;
7010
7011 buffer.update(cx, |buffer, _| {
7012 assert_eq!(buffer.text(), new_contents);
7013 assert!(!buffer.is_dirty());
7014 assert!(!buffer.has_conflict());
7015
7016 // TODO
7017 // let cursor_positions = buffer
7018 // .selection_set(selection_set_id)
7019 // .unwrap()
7020 // .selections::<Point>(&*buffer)
7021 // .map(|selection| {
7022 // assert_eq!(selection.start, selection.end);
7023 // selection.start
7024 // })
7025 // .collect::<Vec<_>>();
7026 // assert_eq!(
7027 // cursor_positions,
7028 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7029 // );
7030 });
7031
7032 // Modify the buffer
7033 buffer.update(cx, |buffer, cx| {
7034 buffer.edit(vec![0..0], " ", cx);
7035 assert!(buffer.is_dirty());
7036 assert!(!buffer.has_conflict());
7037 });
7038
7039 // Change the file on disk again, adding blank lines to the beginning.
7040 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
7041
7042 // Because the buffer is modified, it doesn't reload from disk, but is
7043 // marked as having a conflict.
7044 buffer
7045 .condition(&cx, |buffer, _| buffer.has_conflict())
7046 .await;
7047 }
7048
7049 #[gpui::test]
7050 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7051 cx.foreground().forbid_parking();
7052
7053 let fs = FakeFs::new(cx.background());
7054 fs.insert_tree(
7055 "/the-dir",
7056 json!({
7057 "a.rs": "
7058 fn foo(mut v: Vec<usize>) {
7059 for x in &v {
7060 v.push(1);
7061 }
7062 }
7063 "
7064 .unindent(),
7065 }),
7066 )
7067 .await;
7068
7069 let project = Project::test(fs.clone(), cx);
7070 let (worktree, _) = project
7071 .update(cx, |p, cx| {
7072 p.find_or_create_local_worktree("/the-dir", true, cx)
7073 })
7074 .await
7075 .unwrap();
7076 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7077
7078 let buffer = project
7079 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7080 .await
7081 .unwrap();
7082
7083 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7084 let message = lsp::PublishDiagnosticsParams {
7085 uri: buffer_uri.clone(),
7086 diagnostics: vec![
7087 lsp::Diagnostic {
7088 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7089 severity: Some(DiagnosticSeverity::WARNING),
7090 message: "error 1".to_string(),
7091 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7092 location: lsp::Location {
7093 uri: buffer_uri.clone(),
7094 range: lsp::Range::new(
7095 lsp::Position::new(1, 8),
7096 lsp::Position::new(1, 9),
7097 ),
7098 },
7099 message: "error 1 hint 1".to_string(),
7100 }]),
7101 ..Default::default()
7102 },
7103 lsp::Diagnostic {
7104 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7105 severity: Some(DiagnosticSeverity::HINT),
7106 message: "error 1 hint 1".to_string(),
7107 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7108 location: lsp::Location {
7109 uri: buffer_uri.clone(),
7110 range: lsp::Range::new(
7111 lsp::Position::new(1, 8),
7112 lsp::Position::new(1, 9),
7113 ),
7114 },
7115 message: "original diagnostic".to_string(),
7116 }]),
7117 ..Default::default()
7118 },
7119 lsp::Diagnostic {
7120 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7121 severity: Some(DiagnosticSeverity::ERROR),
7122 message: "error 2".to_string(),
7123 related_information: Some(vec![
7124 lsp::DiagnosticRelatedInformation {
7125 location: lsp::Location {
7126 uri: buffer_uri.clone(),
7127 range: lsp::Range::new(
7128 lsp::Position::new(1, 13),
7129 lsp::Position::new(1, 15),
7130 ),
7131 },
7132 message: "error 2 hint 1".to_string(),
7133 },
7134 lsp::DiagnosticRelatedInformation {
7135 location: lsp::Location {
7136 uri: buffer_uri.clone(),
7137 range: lsp::Range::new(
7138 lsp::Position::new(1, 13),
7139 lsp::Position::new(1, 15),
7140 ),
7141 },
7142 message: "error 2 hint 2".to_string(),
7143 },
7144 ]),
7145 ..Default::default()
7146 },
7147 lsp::Diagnostic {
7148 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7149 severity: Some(DiagnosticSeverity::HINT),
7150 message: "error 2 hint 1".to_string(),
7151 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7152 location: lsp::Location {
7153 uri: buffer_uri.clone(),
7154 range: lsp::Range::new(
7155 lsp::Position::new(2, 8),
7156 lsp::Position::new(2, 17),
7157 ),
7158 },
7159 message: "original diagnostic".to_string(),
7160 }]),
7161 ..Default::default()
7162 },
7163 lsp::Diagnostic {
7164 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7165 severity: Some(DiagnosticSeverity::HINT),
7166 message: "error 2 hint 2".to_string(),
7167 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7168 location: lsp::Location {
7169 uri: buffer_uri.clone(),
7170 range: lsp::Range::new(
7171 lsp::Position::new(2, 8),
7172 lsp::Position::new(2, 17),
7173 ),
7174 },
7175 message: "original diagnostic".to_string(),
7176 }]),
7177 ..Default::default()
7178 },
7179 ],
7180 version: None,
7181 };
7182
7183 project
7184 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7185 .unwrap();
7186 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7187
7188 assert_eq!(
7189 buffer
7190 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7191 .collect::<Vec<_>>(),
7192 &[
7193 DiagnosticEntry {
7194 range: Point::new(1, 8)..Point::new(1, 9),
7195 diagnostic: Diagnostic {
7196 severity: DiagnosticSeverity::WARNING,
7197 message: "error 1".to_string(),
7198 group_id: 0,
7199 is_primary: true,
7200 ..Default::default()
7201 }
7202 },
7203 DiagnosticEntry {
7204 range: Point::new(1, 8)..Point::new(1, 9),
7205 diagnostic: Diagnostic {
7206 severity: DiagnosticSeverity::HINT,
7207 message: "error 1 hint 1".to_string(),
7208 group_id: 0,
7209 is_primary: false,
7210 ..Default::default()
7211 }
7212 },
7213 DiagnosticEntry {
7214 range: Point::new(1, 13)..Point::new(1, 15),
7215 diagnostic: Diagnostic {
7216 severity: DiagnosticSeverity::HINT,
7217 message: "error 2 hint 1".to_string(),
7218 group_id: 1,
7219 is_primary: false,
7220 ..Default::default()
7221 }
7222 },
7223 DiagnosticEntry {
7224 range: Point::new(1, 13)..Point::new(1, 15),
7225 diagnostic: Diagnostic {
7226 severity: DiagnosticSeverity::HINT,
7227 message: "error 2 hint 2".to_string(),
7228 group_id: 1,
7229 is_primary: false,
7230 ..Default::default()
7231 }
7232 },
7233 DiagnosticEntry {
7234 range: Point::new(2, 8)..Point::new(2, 17),
7235 diagnostic: Diagnostic {
7236 severity: DiagnosticSeverity::ERROR,
7237 message: "error 2".to_string(),
7238 group_id: 1,
7239 is_primary: true,
7240 ..Default::default()
7241 }
7242 }
7243 ]
7244 );
7245
7246 assert_eq!(
7247 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7248 &[
7249 DiagnosticEntry {
7250 range: Point::new(1, 8)..Point::new(1, 9),
7251 diagnostic: Diagnostic {
7252 severity: DiagnosticSeverity::WARNING,
7253 message: "error 1".to_string(),
7254 group_id: 0,
7255 is_primary: true,
7256 ..Default::default()
7257 }
7258 },
7259 DiagnosticEntry {
7260 range: Point::new(1, 8)..Point::new(1, 9),
7261 diagnostic: Diagnostic {
7262 severity: DiagnosticSeverity::HINT,
7263 message: "error 1 hint 1".to_string(),
7264 group_id: 0,
7265 is_primary: false,
7266 ..Default::default()
7267 }
7268 },
7269 ]
7270 );
7271 assert_eq!(
7272 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7273 &[
7274 DiagnosticEntry {
7275 range: Point::new(1, 13)..Point::new(1, 15),
7276 diagnostic: Diagnostic {
7277 severity: DiagnosticSeverity::HINT,
7278 message: "error 2 hint 1".to_string(),
7279 group_id: 1,
7280 is_primary: false,
7281 ..Default::default()
7282 }
7283 },
7284 DiagnosticEntry {
7285 range: Point::new(1, 13)..Point::new(1, 15),
7286 diagnostic: Diagnostic {
7287 severity: DiagnosticSeverity::HINT,
7288 message: "error 2 hint 2".to_string(),
7289 group_id: 1,
7290 is_primary: false,
7291 ..Default::default()
7292 }
7293 },
7294 DiagnosticEntry {
7295 range: Point::new(2, 8)..Point::new(2, 17),
7296 diagnostic: Diagnostic {
7297 severity: DiagnosticSeverity::ERROR,
7298 message: "error 2".to_string(),
7299 group_id: 1,
7300 is_primary: true,
7301 ..Default::default()
7302 }
7303 }
7304 ]
7305 );
7306 }
7307
7308 #[gpui::test]
7309 async fn test_rename(cx: &mut gpui::TestAppContext) {
7310 cx.foreground().forbid_parking();
7311
7312 let mut language = Language::new(
7313 LanguageConfig {
7314 name: "Rust".into(),
7315 path_suffixes: vec!["rs".to_string()],
7316 ..Default::default()
7317 },
7318 Some(tree_sitter_rust::language()),
7319 );
7320 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7321
7322 let fs = FakeFs::new(cx.background());
7323 fs.insert_tree(
7324 "/dir",
7325 json!({
7326 "one.rs": "const ONE: usize = 1;",
7327 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7328 }),
7329 )
7330 .await;
7331
7332 let project = Project::test(fs.clone(), cx);
7333 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7334
7335 let (tree, _) = project
7336 .update(cx, |project, cx| {
7337 project.find_or_create_local_worktree("/dir", true, cx)
7338 })
7339 .await
7340 .unwrap();
7341 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7342 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7343 .await;
7344
7345 let buffer = project
7346 .update(cx, |project, cx| {
7347 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7348 })
7349 .await
7350 .unwrap();
7351
7352 let fake_server = fake_servers.next().await.unwrap();
7353
7354 let response = project.update(cx, |project, cx| {
7355 project.prepare_rename(buffer.clone(), 7, cx)
7356 });
7357 fake_server
7358 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7359 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7360 assert_eq!(params.position, lsp::Position::new(0, 7));
7361 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7362 lsp::Position::new(0, 6),
7363 lsp::Position::new(0, 9),
7364 ))))
7365 })
7366 .next()
7367 .await
7368 .unwrap();
7369 let range = response.await.unwrap().unwrap();
7370 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7371 assert_eq!(range, 6..9);
7372
7373 let response = project.update(cx, |project, cx| {
7374 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7375 });
7376 fake_server
7377 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7378 assert_eq!(
7379 params.text_document_position.text_document.uri.as_str(),
7380 "file:///dir/one.rs"
7381 );
7382 assert_eq!(
7383 params.text_document_position.position,
7384 lsp::Position::new(0, 7)
7385 );
7386 assert_eq!(params.new_name, "THREE");
7387 Ok(Some(lsp::WorkspaceEdit {
7388 changes: Some(
7389 [
7390 (
7391 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7392 vec![lsp::TextEdit::new(
7393 lsp::Range::new(
7394 lsp::Position::new(0, 6),
7395 lsp::Position::new(0, 9),
7396 ),
7397 "THREE".to_string(),
7398 )],
7399 ),
7400 (
7401 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7402 vec![
7403 lsp::TextEdit::new(
7404 lsp::Range::new(
7405 lsp::Position::new(0, 24),
7406 lsp::Position::new(0, 27),
7407 ),
7408 "THREE".to_string(),
7409 ),
7410 lsp::TextEdit::new(
7411 lsp::Range::new(
7412 lsp::Position::new(0, 35),
7413 lsp::Position::new(0, 38),
7414 ),
7415 "THREE".to_string(),
7416 ),
7417 ],
7418 ),
7419 ]
7420 .into_iter()
7421 .collect(),
7422 ),
7423 ..Default::default()
7424 }))
7425 })
7426 .next()
7427 .await
7428 .unwrap();
7429 let mut transaction = response.await.unwrap().0;
7430 assert_eq!(transaction.len(), 2);
7431 assert_eq!(
7432 transaction
7433 .remove_entry(&buffer)
7434 .unwrap()
7435 .0
7436 .read_with(cx, |buffer, _| buffer.text()),
7437 "const THREE: usize = 1;"
7438 );
7439 assert_eq!(
7440 transaction
7441 .into_keys()
7442 .next()
7443 .unwrap()
7444 .read_with(cx, |buffer, _| buffer.text()),
7445 "const TWO: usize = one::THREE + one::THREE;"
7446 );
7447 }
7448
7449 #[gpui::test]
7450 async fn test_search(cx: &mut gpui::TestAppContext) {
7451 let fs = FakeFs::new(cx.background());
7452 fs.insert_tree(
7453 "/dir",
7454 json!({
7455 "one.rs": "const ONE: usize = 1;",
7456 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7457 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7458 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7459 }),
7460 )
7461 .await;
7462 let project = Project::test(fs.clone(), cx);
7463 let (tree, _) = project
7464 .update(cx, |project, cx| {
7465 project.find_or_create_local_worktree("/dir", true, cx)
7466 })
7467 .await
7468 .unwrap();
7469 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7470 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7471 .await;
7472
7473 assert_eq!(
7474 search(&project, SearchQuery::text("TWO", false, true), cx)
7475 .await
7476 .unwrap(),
7477 HashMap::from_iter([
7478 ("two.rs".to_string(), vec![6..9]),
7479 ("three.rs".to_string(), vec![37..40])
7480 ])
7481 );
7482
7483 let buffer_4 = project
7484 .update(cx, |project, cx| {
7485 project.open_buffer((worktree_id, "four.rs"), cx)
7486 })
7487 .await
7488 .unwrap();
7489 buffer_4.update(cx, |buffer, cx| {
7490 buffer.edit([20..28, 31..43], "two::TWO", cx);
7491 });
7492
7493 assert_eq!(
7494 search(&project, SearchQuery::text("TWO", false, true), cx)
7495 .await
7496 .unwrap(),
7497 HashMap::from_iter([
7498 ("two.rs".to_string(), vec![6..9]),
7499 ("three.rs".to_string(), vec![37..40]),
7500 ("four.rs".to_string(), vec![25..28, 36..39])
7501 ])
7502 );
7503
7504 async fn search(
7505 project: &ModelHandle<Project>,
7506 query: SearchQuery,
7507 cx: &mut gpui::TestAppContext,
7508 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7509 let results = project
7510 .update(cx, |project, cx| project.search(query, cx))
7511 .await?;
7512
7513 Ok(results
7514 .into_iter()
7515 .map(|(buffer, ranges)| {
7516 buffer.read_with(cx, |buffer, _| {
7517 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7518 let ranges = ranges
7519 .into_iter()
7520 .map(|range| range.to_offset(buffer))
7521 .collect::<Vec<_>>();
7522 (path, ranges)
7523 })
7524 })
7525 .collect())
7526 }
7527 }
7528}