1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use sha2::{Digest, Sha256};
32use similar::{ChangeTag, TextDiff};
33use std::{
34 cell::RefCell,
35 cmp::{self, Ordering},
36 convert::TryInto,
37 hash::Hash,
38 mem,
39 ops::Range,
40 path::{Component, Path, PathBuf},
41 rc::Rc,
42 sync::{
43 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
44 Arc,
45 },
46 time::Instant,
47};
48use util::{post_inc, ResultExt, TryFutureExt as _};
49
50pub use fs::*;
51pub use worktree::*;
52
53pub trait Item: Entity {
54 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
55}
56
57pub struct Project {
58 worktrees: Vec<WorktreeHandle>,
59 active_entry: Option<ProjectEntryId>,
60 languages: Arc<LanguageRegistry>,
61 language_servers:
62 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
63 started_language_servers:
64 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
65 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
66 language_server_settings: Arc<Mutex<serde_json::Value>>,
67 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
68 next_language_server_id: usize,
69 client: Arc<client::Client>,
70 next_entry_id: Arc<AtomicUsize>,
71 user_store: ModelHandle<UserStore>,
72 fs: Arc<dyn Fs>,
73 client_state: ProjectClientState,
74 collaborators: HashMap<PeerId, Collaborator>,
75 subscriptions: Vec<client::Subscription>,
76 language_servers_with_diagnostics_running: isize,
77 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
78 shared_buffers: HashMap<PeerId, HashSet<u64>>,
79 loading_buffers: HashMap<
80 ProjectPath,
81 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
82 >,
83 loading_local_worktrees:
84 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
85 opened_buffers: HashMap<u64, OpenBuffer>,
86 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
87 nonce: u128,
88}
89
90enum OpenBuffer {
91 Strong(ModelHandle<Buffer>),
92 Weak(WeakModelHandle<Buffer>),
93 Loading(Vec<Operation>),
94}
95
96enum WorktreeHandle {
97 Strong(ModelHandle<Worktree>),
98 Weak(WeakModelHandle<Worktree>),
99}
100
101enum ProjectClientState {
102 Local {
103 is_shared: bool,
104 remote_id_tx: watch::Sender<Option<u64>>,
105 remote_id_rx: watch::Receiver<Option<u64>>,
106 _maintain_remote_id_task: Task<Option<()>>,
107 },
108 Remote {
109 sharing_has_stopped: bool,
110 remote_id: u64,
111 replica_id: ReplicaId,
112 _detect_unshare_task: Task<Option<()>>,
113 },
114}
115
116#[derive(Clone, Debug)]
117pub struct Collaborator {
118 pub user: Arc<User>,
119 pub peer_id: PeerId,
120 pub replica_id: ReplicaId,
121}
122
123#[derive(Clone, Debug, PartialEq)]
124pub enum Event {
125 ActiveEntryChanged(Option<ProjectEntryId>),
126 WorktreeRemoved(WorktreeId),
127 DiskBasedDiagnosticsStarted,
128 DiskBasedDiagnosticsUpdated,
129 DiskBasedDiagnosticsFinished,
130 DiagnosticsUpdated(ProjectPath),
131 RemoteIdChanged(Option<u64>),
132 CollaboratorLeft(PeerId),
133}
134
135pub struct LanguageServerStatus {
136 pub name: String,
137 pub pending_work: BTreeMap<String, LanguageServerProgress>,
138 pending_diagnostic_updates: isize,
139}
140
141#[derive(Clone, Debug)]
142pub struct LanguageServerProgress {
143 pub message: Option<String>,
144 pub percentage: Option<usize>,
145 pub last_update_at: Instant,
146}
147
148#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
149pub struct ProjectPath {
150 pub worktree_id: WorktreeId,
151 pub path: Arc<Path>,
152}
153
154#[derive(Clone, Debug, Default, PartialEq)]
155pub struct DiagnosticSummary {
156 pub error_count: usize,
157 pub warning_count: usize,
158 pub info_count: usize,
159 pub hint_count: usize,
160}
161
162#[derive(Debug)]
163pub struct Location {
164 pub buffer: ModelHandle<Buffer>,
165 pub range: Range<language::Anchor>,
166}
167
168#[derive(Debug)]
169pub struct DocumentHighlight {
170 pub range: Range<language::Anchor>,
171 pub kind: DocumentHighlightKind,
172}
173
174#[derive(Clone, Debug)]
175pub struct Symbol {
176 pub source_worktree_id: WorktreeId,
177 pub worktree_id: WorktreeId,
178 pub language_server_name: LanguageServerName,
179 pub path: PathBuf,
180 pub label: CodeLabel,
181 pub name: String,
182 pub kind: lsp::SymbolKind,
183 pub range: Range<PointUtf16>,
184 pub signature: [u8; 32],
185}
186
187#[derive(Default)]
188pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
189
190impl DiagnosticSummary {
191 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
192 let mut this = Self {
193 error_count: 0,
194 warning_count: 0,
195 info_count: 0,
196 hint_count: 0,
197 };
198
199 for entry in diagnostics {
200 if entry.diagnostic.is_primary {
201 match entry.diagnostic.severity {
202 DiagnosticSeverity::ERROR => this.error_count += 1,
203 DiagnosticSeverity::WARNING => this.warning_count += 1,
204 DiagnosticSeverity::INFORMATION => this.info_count += 1,
205 DiagnosticSeverity::HINT => this.hint_count += 1,
206 _ => {}
207 }
208 }
209 }
210
211 this
212 }
213
214 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
215 proto::DiagnosticSummary {
216 path: path.to_string_lossy().to_string(),
217 error_count: self.error_count as u32,
218 warning_count: self.warning_count as u32,
219 info_count: self.info_count as u32,
220 hint_count: self.hint_count as u32,
221 }
222 }
223}
224
225#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
226pub struct ProjectEntryId(usize);
227
228impl ProjectEntryId {
229 pub fn new(counter: &AtomicUsize) -> Self {
230 Self(counter.fetch_add(1, SeqCst))
231 }
232
233 pub fn from_proto(id: u64) -> Self {
234 Self(id as usize)
235 }
236
237 pub fn to_proto(&self) -> u64 {
238 self.0 as u64
239 }
240
241 pub fn to_usize(&self) -> usize {
242 self.0
243 }
244}
245
246impl Project {
247 pub fn init(client: &Arc<Client>) {
248 client.add_model_message_handler(Self::handle_add_collaborator);
249 client.add_model_message_handler(Self::handle_buffer_reloaded);
250 client.add_model_message_handler(Self::handle_buffer_saved);
251 client.add_model_message_handler(Self::handle_start_language_server);
252 client.add_model_message_handler(Self::handle_update_language_server);
253 client.add_model_message_handler(Self::handle_remove_collaborator);
254 client.add_model_message_handler(Self::handle_register_worktree);
255 client.add_model_message_handler(Self::handle_unregister_worktree);
256 client.add_model_message_handler(Self::handle_unshare_project);
257 client.add_model_message_handler(Self::handle_update_buffer_file);
258 client.add_model_message_handler(Self::handle_update_buffer);
259 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
260 client.add_model_message_handler(Self::handle_update_worktree);
261 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
262 client.add_model_request_handler(Self::handle_apply_code_action);
263 client.add_model_request_handler(Self::handle_reload_buffers);
264 client.add_model_request_handler(Self::handle_format_buffers);
265 client.add_model_request_handler(Self::handle_get_code_actions);
266 client.add_model_request_handler(Self::handle_get_completions);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
269 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
271 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
272 client.add_model_request_handler(Self::handle_search_project);
273 client.add_model_request_handler(Self::handle_get_project_symbols);
274 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
275 client.add_model_request_handler(Self::handle_open_buffer_by_id);
276 client.add_model_request_handler(Self::handle_open_buffer_by_path);
277 client.add_model_request_handler(Self::handle_save_buffer);
278 }
279
280 pub fn local(
281 client: Arc<Client>,
282 user_store: ModelHandle<UserStore>,
283 languages: Arc<LanguageRegistry>,
284 fs: Arc<dyn Fs>,
285 cx: &mut MutableAppContext,
286 ) -> ModelHandle<Self> {
287 cx.add_model(|cx: &mut ModelContext<Self>| {
288 let (remote_id_tx, remote_id_rx) = watch::channel();
289 let _maintain_remote_id_task = cx.spawn_weak({
290 let rpc = client.clone();
291 move |this, mut cx| {
292 async move {
293 let mut status = rpc.status();
294 while let Some(status) = status.next().await {
295 if let Some(this) = this.upgrade(&cx) {
296 if status.is_connected() {
297 this.update(&mut cx, |this, cx| this.register(cx)).await?;
298 } else {
299 this.update(&mut cx, |this, cx| this.unregister(cx));
300 }
301 }
302 }
303 Ok(())
304 }
305 .log_err()
306 }
307 });
308
309 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
310 Self {
311 worktrees: Default::default(),
312 collaborators: Default::default(),
313 opened_buffers: Default::default(),
314 shared_buffers: Default::default(),
315 loading_buffers: Default::default(),
316 loading_local_worktrees: Default::default(),
317 buffer_snapshots: Default::default(),
318 client_state: ProjectClientState::Local {
319 is_shared: false,
320 remote_id_tx,
321 remote_id_rx,
322 _maintain_remote_id_task,
323 },
324 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
325 subscriptions: Vec::new(),
326 active_entry: None,
327 languages,
328 client,
329 user_store,
330 fs,
331 next_entry_id: Default::default(),
332 language_servers_with_diagnostics_running: 0,
333 language_servers: Default::default(),
334 started_language_servers: Default::default(),
335 language_server_statuses: Default::default(),
336 last_workspace_edits_by_language_server: Default::default(),
337 language_server_settings: Default::default(),
338 next_language_server_id: 0,
339 nonce: StdRng::from_entropy().gen(),
340 }
341 })
342 }
343
344 pub async fn remote(
345 remote_id: u64,
346 client: Arc<Client>,
347 user_store: ModelHandle<UserStore>,
348 languages: Arc<LanguageRegistry>,
349 fs: Arc<dyn Fs>,
350 cx: &mut AsyncAppContext,
351 ) -> Result<ModelHandle<Self>> {
352 client.authenticate_and_connect(true, &cx).await?;
353
354 let response = client
355 .request(proto::JoinProject {
356 project_id: remote_id,
357 })
358 .await?;
359
360 let replica_id = response.replica_id as ReplicaId;
361
362 let mut worktrees = Vec::new();
363 for worktree in response.worktrees {
364 let (worktree, load_task) = cx
365 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
366 worktrees.push(worktree);
367 load_task.detach();
368 }
369
370 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
371 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
372 let mut this = Self {
373 worktrees: Vec::new(),
374 loading_buffers: Default::default(),
375 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
376 shared_buffers: Default::default(),
377 loading_local_worktrees: Default::default(),
378 active_entry: None,
379 collaborators: Default::default(),
380 languages,
381 user_store: user_store.clone(),
382 fs,
383 next_entry_id: Default::default(),
384 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
385 client: client.clone(),
386 client_state: ProjectClientState::Remote {
387 sharing_has_stopped: false,
388 remote_id,
389 replica_id,
390 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
391 async move {
392 let mut status = client.status();
393 let is_connected =
394 status.next().await.map_or(false, |s| s.is_connected());
395 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
396 if !is_connected || status.next().await.is_some() {
397 if let Some(this) = this.upgrade(&cx) {
398 this.update(&mut cx, |this, cx| this.project_unshared(cx))
399 }
400 }
401 Ok(())
402 }
403 .log_err()
404 }),
405 },
406 language_servers_with_diagnostics_running: 0,
407 language_servers: Default::default(),
408 started_language_servers: Default::default(),
409 language_server_settings: Default::default(),
410 language_server_statuses: response
411 .language_servers
412 .into_iter()
413 .map(|server| {
414 (
415 server.id as usize,
416 LanguageServerStatus {
417 name: server.name,
418 pending_work: Default::default(),
419 pending_diagnostic_updates: 0,
420 },
421 )
422 })
423 .collect(),
424 last_workspace_edits_by_language_server: Default::default(),
425 next_language_server_id: 0,
426 opened_buffers: Default::default(),
427 buffer_snapshots: Default::default(),
428 nonce: StdRng::from_entropy().gen(),
429 };
430 for worktree in worktrees {
431 this.add_worktree(&worktree, cx);
432 }
433 this
434 });
435
436 let user_ids = response
437 .collaborators
438 .iter()
439 .map(|peer| peer.user_id)
440 .collect();
441 user_store
442 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
443 .await?;
444 let mut collaborators = HashMap::default();
445 for message in response.collaborators {
446 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
447 collaborators.insert(collaborator.peer_id, collaborator);
448 }
449
450 this.update(cx, |this, _| {
451 this.collaborators = collaborators;
452 });
453
454 Ok(this)
455 }
456
457 #[cfg(any(test, feature = "test-support"))]
458 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
459 let languages = Arc::new(LanguageRegistry::test());
460 let http_client = client::test::FakeHttpClient::with_404_response();
461 let client = client::Client::new(http_client.clone());
462 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
463 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
464 }
465
466 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
467 self.opened_buffers
468 .get(&remote_id)
469 .and_then(|buffer| buffer.upgrade(cx))
470 }
471
472 #[cfg(any(test, feature = "test-support"))]
473 pub fn languages(&self) -> &Arc<LanguageRegistry> {
474 &self.languages
475 }
476
477 #[cfg(any(test, feature = "test-support"))]
478 pub fn check_invariants(&self, cx: &AppContext) {
479 if self.is_local() {
480 let mut worktree_root_paths = HashMap::default();
481 for worktree in self.worktrees(cx) {
482 let worktree = worktree.read(cx);
483 let abs_path = worktree.as_local().unwrap().abs_path().clone();
484 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
485 assert_eq!(
486 prev_worktree_id,
487 None,
488 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
489 abs_path,
490 worktree.id(),
491 prev_worktree_id
492 )
493 }
494 } else {
495 let replica_id = self.replica_id();
496 for buffer in self.opened_buffers.values() {
497 if let Some(buffer) = buffer.upgrade(cx) {
498 let buffer = buffer.read(cx);
499 assert_eq!(
500 buffer.deferred_ops_len(),
501 0,
502 "replica {}, buffer {} has deferred operations",
503 replica_id,
504 buffer.remote_id()
505 );
506 }
507 }
508 }
509 }
510
511 #[cfg(any(test, feature = "test-support"))]
512 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
513 let path = path.into();
514 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
515 self.opened_buffers.iter().any(|(_, buffer)| {
516 if let Some(buffer) = buffer.upgrade(cx) {
517 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
518 if file.worktree == worktree && file.path() == &path.path {
519 return true;
520 }
521 }
522 }
523 false
524 })
525 } else {
526 false
527 }
528 }
529
530 pub fn fs(&self) -> &Arc<dyn Fs> {
531 &self.fs
532 }
533
534 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
535 self.unshare(cx);
536 for worktree in &self.worktrees {
537 if let Some(worktree) = worktree.upgrade(cx) {
538 worktree.update(cx, |worktree, _| {
539 worktree.as_local_mut().unwrap().unregister();
540 });
541 }
542 }
543
544 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
545 *remote_id_tx.borrow_mut() = None;
546 }
547
548 self.subscriptions.clear();
549 }
550
551 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
552 self.unregister(cx);
553
554 let response = self.client.request(proto::RegisterProject {});
555 cx.spawn(|this, mut cx| async move {
556 let remote_id = response.await?.project_id;
557
558 let mut registrations = Vec::new();
559 this.update(&mut cx, |this, cx| {
560 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
561 *remote_id_tx.borrow_mut() = Some(remote_id);
562 }
563
564 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
565
566 this.subscriptions
567 .push(this.client.add_model_for_remote_entity(remote_id, cx));
568
569 for worktree in &this.worktrees {
570 if let Some(worktree) = worktree.upgrade(cx) {
571 registrations.push(worktree.update(cx, |worktree, cx| {
572 let worktree = worktree.as_local_mut().unwrap();
573 worktree.register(remote_id, cx)
574 }));
575 }
576 }
577 });
578
579 futures::future::try_join_all(registrations).await?;
580 Ok(())
581 })
582 }
583
584 pub fn remote_id(&self) -> Option<u64> {
585 match &self.client_state {
586 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
587 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
588 }
589 }
590
591 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
592 let mut id = None;
593 let mut watch = None;
594 match &self.client_state {
595 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
596 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
597 }
598
599 async move {
600 if let Some(id) = id {
601 return id;
602 }
603 let mut watch = watch.unwrap();
604 loop {
605 let id = *watch.borrow();
606 if let Some(id) = id {
607 return id;
608 }
609 watch.next().await;
610 }
611 }
612 }
613
614 pub fn replica_id(&self) -> ReplicaId {
615 match &self.client_state {
616 ProjectClientState::Local { .. } => 0,
617 ProjectClientState::Remote { replica_id, .. } => *replica_id,
618 }
619 }
620
621 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
622 &self.collaborators
623 }
624
625 pub fn worktrees<'a>(
626 &'a self,
627 cx: &'a AppContext,
628 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
629 self.worktrees
630 .iter()
631 .filter_map(move |worktree| worktree.upgrade(cx))
632 }
633
634 pub fn visible_worktrees<'a>(
635 &'a self,
636 cx: &'a AppContext,
637 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
638 self.worktrees.iter().filter_map(|worktree| {
639 worktree.upgrade(cx).and_then(|worktree| {
640 if worktree.read(cx).is_visible() {
641 Some(worktree)
642 } else {
643 None
644 }
645 })
646 })
647 }
648
649 pub fn worktree_for_id(
650 &self,
651 id: WorktreeId,
652 cx: &AppContext,
653 ) -> Option<ModelHandle<Worktree>> {
654 self.worktrees(cx)
655 .find(|worktree| worktree.read(cx).id() == id)
656 }
657
658 pub fn worktree_for_entry(
659 &self,
660 entry_id: ProjectEntryId,
661 cx: &AppContext,
662 ) -> Option<ModelHandle<Worktree>> {
663 self.worktrees(cx)
664 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
665 }
666
667 pub fn worktree_id_for_entry(
668 &self,
669 entry_id: ProjectEntryId,
670 cx: &AppContext,
671 ) -> Option<WorktreeId> {
672 self.worktree_for_entry(entry_id, cx)
673 .map(|worktree| worktree.read(cx).id())
674 }
675
676 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
677 let rpc = self.client.clone();
678 cx.spawn(|this, mut cx| async move {
679 let project_id = this.update(&mut cx, |this, cx| {
680 if let ProjectClientState::Local {
681 is_shared,
682 remote_id_rx,
683 ..
684 } = &mut this.client_state
685 {
686 *is_shared = true;
687
688 for open_buffer in this.opened_buffers.values_mut() {
689 match open_buffer {
690 OpenBuffer::Strong(_) => {}
691 OpenBuffer::Weak(buffer) => {
692 if let Some(buffer) = buffer.upgrade(cx) {
693 *open_buffer = OpenBuffer::Strong(buffer);
694 }
695 }
696 OpenBuffer::Loading(_) => unreachable!(),
697 }
698 }
699
700 for worktree_handle in this.worktrees.iter_mut() {
701 match worktree_handle {
702 WorktreeHandle::Strong(_) => {}
703 WorktreeHandle::Weak(worktree) => {
704 if let Some(worktree) = worktree.upgrade(cx) {
705 *worktree_handle = WorktreeHandle::Strong(worktree);
706 }
707 }
708 }
709 }
710
711 remote_id_rx
712 .borrow()
713 .ok_or_else(|| anyhow!("no project id"))
714 } else {
715 Err(anyhow!("can't share a remote project"))
716 }
717 })?;
718
719 rpc.request(proto::ShareProject { project_id }).await?;
720
721 let mut tasks = Vec::new();
722 this.update(&mut cx, |this, cx| {
723 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
724 worktree.update(cx, |worktree, cx| {
725 let worktree = worktree.as_local_mut().unwrap();
726 tasks.push(worktree.share(project_id, cx));
727 });
728 }
729 });
730 for task in tasks {
731 task.await?;
732 }
733 this.update(&mut cx, |_, cx| cx.notify());
734 Ok(())
735 })
736 }
737
738 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
739 let rpc = self.client.clone();
740
741 if let ProjectClientState::Local {
742 is_shared,
743 remote_id_rx,
744 ..
745 } = &mut self.client_state
746 {
747 if !*is_shared {
748 return;
749 }
750
751 *is_shared = false;
752 self.collaborators.clear();
753 self.shared_buffers.clear();
754 for worktree_handle in self.worktrees.iter_mut() {
755 if let WorktreeHandle::Strong(worktree) = worktree_handle {
756 let is_visible = worktree.update(cx, |worktree, _| {
757 worktree.as_local_mut().unwrap().unshare();
758 worktree.is_visible()
759 });
760 if !is_visible {
761 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
762 }
763 }
764 }
765
766 for open_buffer in self.opened_buffers.values_mut() {
767 match open_buffer {
768 OpenBuffer::Strong(buffer) => {
769 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
770 }
771 _ => {}
772 }
773 }
774
775 if let Some(project_id) = *remote_id_rx.borrow() {
776 rpc.send(proto::UnshareProject { project_id }).log_err();
777 }
778
779 cx.notify();
780 } else {
781 log::error!("attempted to unshare a remote project");
782 }
783 }
784
785 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
786 if let ProjectClientState::Remote {
787 sharing_has_stopped,
788 ..
789 } = &mut self.client_state
790 {
791 *sharing_has_stopped = true;
792 self.collaborators.clear();
793 cx.notify();
794 }
795 }
796
797 pub fn is_read_only(&self) -> bool {
798 match &self.client_state {
799 ProjectClientState::Local { .. } => false,
800 ProjectClientState::Remote {
801 sharing_has_stopped,
802 ..
803 } => *sharing_has_stopped,
804 }
805 }
806
807 pub fn is_local(&self) -> bool {
808 match &self.client_state {
809 ProjectClientState::Local { .. } => true,
810 ProjectClientState::Remote { .. } => false,
811 }
812 }
813
814 pub fn is_remote(&self) -> bool {
815 !self.is_local()
816 }
817
818 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
819 if self.is_remote() {
820 return Err(anyhow!("creating buffers as a guest is not supported yet"));
821 }
822
823 let buffer = cx.add_model(|cx| {
824 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
825 });
826 self.register_buffer(&buffer, cx)?;
827 Ok(buffer)
828 }
829
830 pub fn open_path(
831 &mut self,
832 path: impl Into<ProjectPath>,
833 cx: &mut ModelContext<Self>,
834 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
835 let task = self.open_buffer(path, cx);
836 cx.spawn_weak(|_, cx| async move {
837 let buffer = task.await?;
838 let project_entry_id = buffer
839 .read_with(&cx, |buffer, cx| {
840 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
841 })
842 .ok_or_else(|| anyhow!("no project entry"))?;
843 Ok((project_entry_id, buffer.into()))
844 })
845 }
846
847 pub fn open_buffer(
848 &mut self,
849 path: impl Into<ProjectPath>,
850 cx: &mut ModelContext<Self>,
851 ) -> Task<Result<ModelHandle<Buffer>>> {
852 let project_path = path.into();
853 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
854 worktree
855 } else {
856 return Task::ready(Err(anyhow!("no such worktree")));
857 };
858
859 // If there is already a buffer for the given path, then return it.
860 let existing_buffer = self.get_open_buffer(&project_path, cx);
861 if let Some(existing_buffer) = existing_buffer {
862 return Task::ready(Ok(existing_buffer));
863 }
864
865 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
866 // If the given path is already being loaded, then wait for that existing
867 // task to complete and return the same buffer.
868 hash_map::Entry::Occupied(e) => e.get().clone(),
869
870 // Otherwise, record the fact that this path is now being loaded.
871 hash_map::Entry::Vacant(entry) => {
872 let (mut tx, rx) = postage::watch::channel();
873 entry.insert(rx.clone());
874
875 let load_buffer = if worktree.read(cx).is_local() {
876 self.open_local_buffer(&project_path.path, &worktree, cx)
877 } else {
878 self.open_remote_buffer(&project_path.path, &worktree, cx)
879 };
880
881 cx.spawn(move |this, mut cx| async move {
882 let load_result = load_buffer.await;
883 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
884 // Record the fact that the buffer is no longer loading.
885 this.loading_buffers.remove(&project_path);
886 let buffer = load_result.map_err(Arc::new)?;
887 Ok(buffer)
888 }));
889 })
890 .detach();
891 rx
892 }
893 };
894
895 cx.foreground().spawn(async move {
896 loop {
897 if let Some(result) = loading_watch.borrow().as_ref() {
898 match result {
899 Ok(buffer) => return Ok(buffer.clone()),
900 Err(error) => return Err(anyhow!("{}", error)),
901 }
902 }
903 loading_watch.next().await;
904 }
905 })
906 }
907
908 fn open_local_buffer(
909 &mut self,
910 path: &Arc<Path>,
911 worktree: &ModelHandle<Worktree>,
912 cx: &mut ModelContext<Self>,
913 ) -> Task<Result<ModelHandle<Buffer>>> {
914 let load_buffer = worktree.update(cx, |worktree, cx| {
915 let worktree = worktree.as_local_mut().unwrap();
916 worktree.load_buffer(path, cx)
917 });
918 cx.spawn(|this, mut cx| async move {
919 let buffer = load_buffer.await?;
920 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
921 Ok(buffer)
922 })
923 }
924
925 fn open_remote_buffer(
926 &mut self,
927 path: &Arc<Path>,
928 worktree: &ModelHandle<Worktree>,
929 cx: &mut ModelContext<Self>,
930 ) -> Task<Result<ModelHandle<Buffer>>> {
931 let rpc = self.client.clone();
932 let project_id = self.remote_id().unwrap();
933 let remote_worktree_id = worktree.read(cx).id();
934 let path = path.clone();
935 let path_string = path.to_string_lossy().to_string();
936 cx.spawn(|this, mut cx| async move {
937 let response = rpc
938 .request(proto::OpenBufferByPath {
939 project_id,
940 worktree_id: remote_worktree_id.to_proto(),
941 path: path_string,
942 })
943 .await?;
944 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
945 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
946 .await
947 })
948 }
949
950 fn open_local_buffer_via_lsp(
951 &mut self,
952 abs_path: lsp::Url,
953 lsp_adapter: Arc<dyn LspAdapter>,
954 lsp_server: Arc<LanguageServer>,
955 cx: &mut ModelContext<Self>,
956 ) -> Task<Result<ModelHandle<Buffer>>> {
957 cx.spawn(|this, mut cx| async move {
958 let abs_path = abs_path
959 .to_file_path()
960 .map_err(|_| anyhow!("can't convert URI to path"))?;
961 let (worktree, relative_path) = if let Some(result) =
962 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
963 {
964 result
965 } else {
966 let worktree = this
967 .update(&mut cx, |this, cx| {
968 this.create_local_worktree(&abs_path, false, cx)
969 })
970 .await?;
971 this.update(&mut cx, |this, cx| {
972 this.language_servers.insert(
973 (worktree.read(cx).id(), lsp_adapter.name()),
974 (lsp_adapter, lsp_server),
975 );
976 });
977 (worktree, PathBuf::new())
978 };
979
980 let project_path = ProjectPath {
981 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
982 path: relative_path.into(),
983 };
984 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
985 .await
986 })
987 }
988
989 pub fn open_buffer_by_id(
990 &mut self,
991 id: u64,
992 cx: &mut ModelContext<Self>,
993 ) -> Task<Result<ModelHandle<Buffer>>> {
994 if let Some(buffer) = self.buffer_for_id(id, cx) {
995 Task::ready(Ok(buffer))
996 } else if self.is_local() {
997 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
998 } else if let Some(project_id) = self.remote_id() {
999 let request = self
1000 .client
1001 .request(proto::OpenBufferById { project_id, id });
1002 cx.spawn(|this, mut cx| async move {
1003 let buffer = request
1004 .await?
1005 .buffer
1006 .ok_or_else(|| anyhow!("invalid buffer"))?;
1007 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1008 .await
1009 })
1010 } else {
1011 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1012 }
1013 }
1014
1015 pub fn save_buffer_as(
1016 &mut self,
1017 buffer: ModelHandle<Buffer>,
1018 abs_path: PathBuf,
1019 cx: &mut ModelContext<Project>,
1020 ) -> Task<Result<()>> {
1021 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1022 let old_path =
1023 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1024 cx.spawn(|this, mut cx| async move {
1025 if let Some(old_path) = old_path {
1026 this.update(&mut cx, |this, cx| {
1027 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1028 });
1029 }
1030 let (worktree, path) = worktree_task.await?;
1031 worktree
1032 .update(&mut cx, |worktree, cx| {
1033 worktree
1034 .as_local_mut()
1035 .unwrap()
1036 .save_buffer_as(buffer.clone(), path, cx)
1037 })
1038 .await?;
1039 this.update(&mut cx, |this, cx| {
1040 this.assign_language_to_buffer(&buffer, cx);
1041 this.register_buffer_with_language_server(&buffer, cx);
1042 });
1043 Ok(())
1044 })
1045 }
1046
1047 pub fn get_open_buffer(
1048 &mut self,
1049 path: &ProjectPath,
1050 cx: &mut ModelContext<Self>,
1051 ) -> Option<ModelHandle<Buffer>> {
1052 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1053 self.opened_buffers.values().find_map(|buffer| {
1054 let buffer = buffer.upgrade(cx)?;
1055 let file = File::from_dyn(buffer.read(cx).file())?;
1056 if file.worktree == worktree && file.path() == &path.path {
1057 Some(buffer)
1058 } else {
1059 None
1060 }
1061 })
1062 }
1063
1064 fn register_buffer(
1065 &mut self,
1066 buffer: &ModelHandle<Buffer>,
1067 cx: &mut ModelContext<Self>,
1068 ) -> Result<()> {
1069 let remote_id = buffer.read(cx).remote_id();
1070 let open_buffer = if self.is_remote() || self.is_shared() {
1071 OpenBuffer::Strong(buffer.clone())
1072 } else {
1073 OpenBuffer::Weak(buffer.downgrade())
1074 };
1075
1076 match self.opened_buffers.insert(remote_id, open_buffer) {
1077 None => {}
1078 Some(OpenBuffer::Loading(operations)) => {
1079 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1080 }
1081 Some(OpenBuffer::Weak(existing_handle)) => {
1082 if existing_handle.upgrade(cx).is_some() {
1083 Err(anyhow!(
1084 "already registered buffer with remote id {}",
1085 remote_id
1086 ))?
1087 }
1088 }
1089 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1090 "already registered buffer with remote id {}",
1091 remote_id
1092 ))?,
1093 }
1094 cx.subscribe(buffer, |this, buffer, event, cx| {
1095 this.on_buffer_event(buffer, event, cx);
1096 })
1097 .detach();
1098
1099 self.assign_language_to_buffer(buffer, cx);
1100 self.register_buffer_with_language_server(buffer, cx);
1101 cx.observe_release(buffer, |this, buffer, cx| {
1102 if let Some(file) = File::from_dyn(buffer.file()) {
1103 if file.is_local() {
1104 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1105 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1106 server
1107 .notify::<lsp::notification::DidCloseTextDocument>(
1108 lsp::DidCloseTextDocumentParams {
1109 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1110 },
1111 )
1112 .log_err();
1113 }
1114 }
1115 }
1116 })
1117 .detach();
1118
1119 Ok(())
1120 }
1121
1122 fn register_buffer_with_language_server(
1123 &mut self,
1124 buffer_handle: &ModelHandle<Buffer>,
1125 cx: &mut ModelContext<Self>,
1126 ) {
1127 let buffer = buffer_handle.read(cx);
1128 let buffer_id = buffer.remote_id();
1129 if let Some(file) = File::from_dyn(buffer.file()) {
1130 if file.is_local() {
1131 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1132 let initial_snapshot = buffer.text_snapshot();
1133 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1134
1135 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1136 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1137 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1138 .log_err();
1139 }
1140 }
1141
1142 if let Some((_, server)) = language_server {
1143 server
1144 .notify::<lsp::notification::DidOpenTextDocument>(
1145 lsp::DidOpenTextDocumentParams {
1146 text_document: lsp::TextDocumentItem::new(
1147 uri,
1148 Default::default(),
1149 0,
1150 initial_snapshot.text(),
1151 ),
1152 }
1153 .clone(),
1154 )
1155 .log_err();
1156 buffer_handle.update(cx, |buffer, cx| {
1157 buffer.set_completion_triggers(
1158 server
1159 .capabilities()
1160 .completion_provider
1161 .as_ref()
1162 .and_then(|provider| provider.trigger_characters.clone())
1163 .unwrap_or(Vec::new()),
1164 cx,
1165 )
1166 });
1167 self.buffer_snapshots
1168 .insert(buffer_id, vec![(0, initial_snapshot)]);
1169 }
1170 }
1171 }
1172 }
1173
1174 fn unregister_buffer_from_language_server(
1175 &mut self,
1176 buffer: &ModelHandle<Buffer>,
1177 old_path: PathBuf,
1178 cx: &mut ModelContext<Self>,
1179 ) {
1180 let buffer = &buffer.read(cx);
1181 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1182 language_server
1183 .notify::<lsp::notification::DidCloseTextDocument>(
1184 lsp::DidCloseTextDocumentParams {
1185 text_document: lsp::TextDocumentIdentifier::new(
1186 lsp::Url::from_file_path(old_path).unwrap(),
1187 ),
1188 },
1189 )
1190 .log_err();
1191 }
1192 self.buffer_snapshots.remove(&buffer.remote_id());
1193 }
1194
1195 fn on_buffer_event(
1196 &mut self,
1197 buffer: ModelHandle<Buffer>,
1198 event: &BufferEvent,
1199 cx: &mut ModelContext<Self>,
1200 ) -> Option<()> {
1201 match event {
1202 BufferEvent::Operation(operation) => {
1203 let project_id = self.remote_id()?;
1204 let request = self.client.request(proto::UpdateBuffer {
1205 project_id,
1206 buffer_id: buffer.read(cx).remote_id(),
1207 operations: vec![language::proto::serialize_operation(&operation)],
1208 });
1209 cx.background().spawn(request).detach_and_log_err(cx);
1210 }
1211 BufferEvent::Edited { .. } => {
1212 let (_, language_server) = self
1213 .language_server_for_buffer(buffer.read(cx), cx)?
1214 .clone();
1215 let buffer = buffer.read(cx);
1216 let file = File::from_dyn(buffer.file())?;
1217 let abs_path = file.as_local()?.abs_path(cx);
1218 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1219 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1220 let (version, prev_snapshot) = buffer_snapshots.last()?;
1221 let next_snapshot = buffer.text_snapshot();
1222 let next_version = version + 1;
1223
1224 let content_changes = buffer
1225 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1226 .map(|edit| {
1227 let edit_start = edit.new.start.0;
1228 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1229 let new_text = next_snapshot
1230 .text_for_range(edit.new.start.1..edit.new.end.1)
1231 .collect();
1232 lsp::TextDocumentContentChangeEvent {
1233 range: Some(lsp::Range::new(
1234 point_to_lsp(edit_start),
1235 point_to_lsp(edit_end),
1236 )),
1237 range_length: None,
1238 text: new_text,
1239 }
1240 })
1241 .collect();
1242
1243 buffer_snapshots.push((next_version, next_snapshot));
1244
1245 language_server
1246 .notify::<lsp::notification::DidChangeTextDocument>(
1247 lsp::DidChangeTextDocumentParams {
1248 text_document: lsp::VersionedTextDocumentIdentifier::new(
1249 uri,
1250 next_version,
1251 ),
1252 content_changes,
1253 },
1254 )
1255 .log_err();
1256 }
1257 BufferEvent::Saved => {
1258 let file = File::from_dyn(buffer.read(cx).file())?;
1259 let worktree_id = file.worktree_id(cx);
1260 let abs_path = file.as_local()?.abs_path(cx);
1261 let text_document = lsp::TextDocumentIdentifier {
1262 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1263 };
1264
1265 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1266 server
1267 .notify::<lsp::notification::DidSaveTextDocument>(
1268 lsp::DidSaveTextDocumentParams {
1269 text_document: text_document.clone(),
1270 text: None,
1271 },
1272 )
1273 .log_err();
1274 }
1275 }
1276 _ => {}
1277 }
1278
1279 None
1280 }
1281
1282 fn language_servers_for_worktree(
1283 &self,
1284 worktree_id: WorktreeId,
1285 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1286 self.language_servers.iter().filter_map(
1287 move |((language_server_worktree_id, _), server)| {
1288 if *language_server_worktree_id == worktree_id {
1289 Some(server)
1290 } else {
1291 None
1292 }
1293 },
1294 )
1295 }
1296
1297 fn assign_language_to_buffer(
1298 &mut self,
1299 buffer: &ModelHandle<Buffer>,
1300 cx: &mut ModelContext<Self>,
1301 ) -> Option<()> {
1302 // If the buffer has a language, set it and start the language server if we haven't already.
1303 let full_path = buffer.read(cx).file()?.full_path(cx);
1304 let language = self.languages.select_language(&full_path)?;
1305 buffer.update(cx, |buffer, cx| {
1306 buffer.set_language(Some(language.clone()), cx);
1307 });
1308
1309 let file = File::from_dyn(buffer.read(cx).file())?;
1310 let worktree = file.worktree.read(cx).as_local()?;
1311 let worktree_id = worktree.id();
1312 let worktree_abs_path = worktree.abs_path().clone();
1313 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1314
1315 None
1316 }
1317
1318 fn start_language_server(
1319 &mut self,
1320 worktree_id: WorktreeId,
1321 worktree_path: Arc<Path>,
1322 language: Arc<Language>,
1323 cx: &mut ModelContext<Self>,
1324 ) {
1325 let adapter = if let Some(adapter) = language.lsp_adapter() {
1326 adapter
1327 } else {
1328 return;
1329 };
1330 let key = (worktree_id, adapter.name());
1331 self.started_language_servers
1332 .entry(key.clone())
1333 .or_insert_with(|| {
1334 let server_id = post_inc(&mut self.next_language_server_id);
1335 let language_server = self.languages.start_language_server(
1336 server_id,
1337 language.clone(),
1338 worktree_path,
1339 self.client.http_client(),
1340 cx,
1341 );
1342 cx.spawn_weak(|this, mut cx| async move {
1343 let language_server = language_server?.await.log_err()?;
1344 let language_server = language_server
1345 .initialize(adapter.initialization_options())
1346 .await
1347 .log_err()?;
1348 let this = this.upgrade(&cx)?;
1349 let disk_based_diagnostics_progress_token =
1350 adapter.disk_based_diagnostics_progress_token();
1351
1352 language_server
1353 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1354 let this = this.downgrade();
1355 let adapter = adapter.clone();
1356 move |params, mut cx| {
1357 if let Some(this) = this.upgrade(&cx) {
1358 this.update(&mut cx, |this, cx| {
1359 this.on_lsp_diagnostics_published(
1360 server_id,
1361 params,
1362 &adapter,
1363 disk_based_diagnostics_progress_token,
1364 cx,
1365 );
1366 });
1367 }
1368 }
1369 })
1370 .detach();
1371
1372 language_server
1373 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1374 let settings = this
1375 .read_with(&cx, |this, _| this.language_server_settings.clone());
1376 move |params, _| {
1377 let settings = settings.lock().clone();
1378 async move {
1379 Ok(params
1380 .items
1381 .into_iter()
1382 .map(|item| {
1383 if let Some(section) = &item.section {
1384 settings
1385 .get(section)
1386 .cloned()
1387 .unwrap_or(serde_json::Value::Null)
1388 } else {
1389 settings.clone()
1390 }
1391 })
1392 .collect())
1393 }
1394 }
1395 })
1396 .detach();
1397
1398 language_server
1399 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1400 let this = this.downgrade();
1401 let adapter = adapter.clone();
1402 let language_server = language_server.clone();
1403 move |params, cx| {
1404 Self::on_lsp_workspace_edit(
1405 this,
1406 params,
1407 server_id,
1408 adapter.clone(),
1409 language_server.clone(),
1410 cx,
1411 )
1412 }
1413 })
1414 .detach();
1415
1416 language_server
1417 .on_notification::<lsp::notification::Progress, _>({
1418 let this = this.downgrade();
1419 move |params, mut cx| {
1420 if let Some(this) = this.upgrade(&cx) {
1421 this.update(&mut cx, |this, cx| {
1422 this.on_lsp_progress(
1423 params,
1424 server_id,
1425 disk_based_diagnostics_progress_token,
1426 cx,
1427 );
1428 });
1429 }
1430 }
1431 })
1432 .detach();
1433
1434 this.update(&mut cx, |this, cx| {
1435 this.language_servers
1436 .insert(key.clone(), (adapter, language_server.clone()));
1437 this.language_server_statuses.insert(
1438 server_id,
1439 LanguageServerStatus {
1440 name: language_server.name().to_string(),
1441 pending_work: Default::default(),
1442 pending_diagnostic_updates: 0,
1443 },
1444 );
1445 language_server
1446 .notify::<lsp::notification::DidChangeConfiguration>(
1447 lsp::DidChangeConfigurationParams {
1448 settings: this.language_server_settings.lock().clone(),
1449 },
1450 )
1451 .ok();
1452
1453 if let Some(project_id) = this.remote_id() {
1454 this.client
1455 .send(proto::StartLanguageServer {
1456 project_id,
1457 server: Some(proto::LanguageServer {
1458 id: server_id as u64,
1459 name: language_server.name().to_string(),
1460 }),
1461 })
1462 .log_err();
1463 }
1464
1465 // Tell the language server about every open buffer in the worktree that matches the language.
1466 for buffer in this.opened_buffers.values() {
1467 if let Some(buffer_handle) = buffer.upgrade(cx) {
1468 let buffer = buffer_handle.read(cx);
1469 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1470 file
1471 } else {
1472 continue;
1473 };
1474 let language = if let Some(language) = buffer.language() {
1475 language
1476 } else {
1477 continue;
1478 };
1479 if file.worktree.read(cx).id() != key.0
1480 || language.lsp_adapter().map(|a| a.name())
1481 != Some(key.1.clone())
1482 {
1483 continue;
1484 }
1485
1486 let file = file.as_local()?;
1487 let versions = this
1488 .buffer_snapshots
1489 .entry(buffer.remote_id())
1490 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1491 let (version, initial_snapshot) = versions.last().unwrap();
1492 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1493 language_server
1494 .notify::<lsp::notification::DidOpenTextDocument>(
1495 lsp::DidOpenTextDocumentParams {
1496 text_document: lsp::TextDocumentItem::new(
1497 uri,
1498 Default::default(),
1499 *version,
1500 initial_snapshot.text(),
1501 ),
1502 },
1503 )
1504 .log_err()?;
1505 buffer_handle.update(cx, |buffer, cx| {
1506 buffer.set_completion_triggers(
1507 language_server
1508 .capabilities()
1509 .completion_provider
1510 .as_ref()
1511 .and_then(|provider| {
1512 provider.trigger_characters.clone()
1513 })
1514 .unwrap_or(Vec::new()),
1515 cx,
1516 )
1517 });
1518 }
1519 }
1520
1521 cx.notify();
1522 Some(())
1523 });
1524
1525 Some(language_server)
1526 })
1527 });
1528 }
1529
1530 pub fn restart_language_servers_for_buffers(
1531 &mut self,
1532 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1533 cx: &mut ModelContext<Self>,
1534 ) -> Option<()> {
1535 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1536 .into_iter()
1537 .filter_map(|buffer| {
1538 let file = File::from_dyn(buffer.read(cx).file())?;
1539 let worktree = file.worktree.read(cx).as_local()?;
1540 let worktree_id = worktree.id();
1541 let worktree_abs_path = worktree.abs_path().clone();
1542 let full_path = file.full_path(cx);
1543 Some((worktree_id, worktree_abs_path, full_path))
1544 })
1545 .collect();
1546 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1547 let language = self.languages.select_language(&full_path)?;
1548 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1549 }
1550
1551 None
1552 }
1553
1554 fn restart_language_server(
1555 &mut self,
1556 worktree_id: WorktreeId,
1557 worktree_path: Arc<Path>,
1558 language: Arc<Language>,
1559 cx: &mut ModelContext<Self>,
1560 ) {
1561 let adapter = if let Some(adapter) = language.lsp_adapter() {
1562 adapter
1563 } else {
1564 return;
1565 };
1566 let key = (worktree_id, adapter.name());
1567 let server_to_shutdown = self.language_servers.remove(&key);
1568 self.started_language_servers.remove(&key);
1569 server_to_shutdown
1570 .as_ref()
1571 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1572 cx.spawn_weak(|this, mut cx| async move {
1573 if let Some(this) = this.upgrade(&cx) {
1574 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1575 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1576 shutdown_task.await;
1577 }
1578 }
1579
1580 this.update(&mut cx, |this, cx| {
1581 this.start_language_server(worktree_id, worktree_path, language, cx);
1582 });
1583 }
1584 })
1585 .detach();
1586 }
1587
1588 fn on_lsp_diagnostics_published(
1589 &mut self,
1590 server_id: usize,
1591 mut params: lsp::PublishDiagnosticsParams,
1592 adapter: &Arc<dyn LspAdapter>,
1593 disk_based_diagnostics_progress_token: Option<&str>,
1594 cx: &mut ModelContext<Self>,
1595 ) {
1596 adapter.process_diagnostics(&mut params);
1597 if disk_based_diagnostics_progress_token.is_none() {
1598 self.disk_based_diagnostics_started(cx);
1599 self.broadcast_language_server_update(
1600 server_id,
1601 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1602 proto::LspDiskBasedDiagnosticsUpdating {},
1603 ),
1604 );
1605 }
1606 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1607 .log_err();
1608 if disk_based_diagnostics_progress_token.is_none() {
1609 self.disk_based_diagnostics_finished(cx);
1610 self.broadcast_language_server_update(
1611 server_id,
1612 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1613 proto::LspDiskBasedDiagnosticsUpdated {},
1614 ),
1615 );
1616 }
1617 }
1618
1619 fn on_lsp_progress(
1620 &mut self,
1621 progress: lsp::ProgressParams,
1622 server_id: usize,
1623 disk_based_diagnostics_progress_token: Option<&str>,
1624 cx: &mut ModelContext<Self>,
1625 ) {
1626 let token = match progress.token {
1627 lsp::NumberOrString::String(token) => token,
1628 lsp::NumberOrString::Number(token) => {
1629 log::info!("skipping numeric progress token {}", token);
1630 return;
1631 }
1632 };
1633
1634 match progress.value {
1635 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1636 lsp::WorkDoneProgress::Begin(_) => {
1637 let language_server_status =
1638 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1639 status
1640 } else {
1641 return;
1642 };
1643
1644 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1645 language_server_status.pending_diagnostic_updates += 1;
1646 if language_server_status.pending_diagnostic_updates == 1 {
1647 self.disk_based_diagnostics_started(cx);
1648 self.broadcast_language_server_update(
1649 server_id,
1650 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1651 proto::LspDiskBasedDiagnosticsUpdating {},
1652 ),
1653 );
1654 }
1655 } else {
1656 self.on_lsp_work_start(server_id, token.clone(), cx);
1657 self.broadcast_language_server_update(
1658 server_id,
1659 proto::update_language_server::Variant::WorkStart(
1660 proto::LspWorkStart { token },
1661 ),
1662 );
1663 }
1664 }
1665 lsp::WorkDoneProgress::Report(report) => {
1666 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1667 self.on_lsp_work_progress(
1668 server_id,
1669 token.clone(),
1670 LanguageServerProgress {
1671 message: report.message.clone(),
1672 percentage: report.percentage.map(|p| p as usize),
1673 last_update_at: Instant::now(),
1674 },
1675 cx,
1676 );
1677 self.broadcast_language_server_update(
1678 server_id,
1679 proto::update_language_server::Variant::WorkProgress(
1680 proto::LspWorkProgress {
1681 token,
1682 message: report.message,
1683 percentage: report.percentage.map(|p| p as u32),
1684 },
1685 ),
1686 );
1687 }
1688 }
1689 lsp::WorkDoneProgress::End(_) => {
1690 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1691 let language_server_status = if let Some(status) =
1692 self.language_server_statuses.get_mut(&server_id)
1693 {
1694 status
1695 } else {
1696 return;
1697 };
1698
1699 language_server_status.pending_diagnostic_updates -= 1;
1700 if language_server_status.pending_diagnostic_updates == 0 {
1701 self.disk_based_diagnostics_finished(cx);
1702 self.broadcast_language_server_update(
1703 server_id,
1704 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1705 proto::LspDiskBasedDiagnosticsUpdated {},
1706 ),
1707 );
1708 }
1709 } else {
1710 self.on_lsp_work_end(server_id, token.clone(), cx);
1711 self.broadcast_language_server_update(
1712 server_id,
1713 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1714 token,
1715 }),
1716 );
1717 }
1718 }
1719 },
1720 }
1721 }
1722
1723 fn on_lsp_work_start(
1724 &mut self,
1725 language_server_id: usize,
1726 token: String,
1727 cx: &mut ModelContext<Self>,
1728 ) {
1729 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1730 status.pending_work.insert(
1731 token,
1732 LanguageServerProgress {
1733 message: None,
1734 percentage: None,
1735 last_update_at: Instant::now(),
1736 },
1737 );
1738 cx.notify();
1739 }
1740 }
1741
1742 fn on_lsp_work_progress(
1743 &mut self,
1744 language_server_id: usize,
1745 token: String,
1746 progress: LanguageServerProgress,
1747 cx: &mut ModelContext<Self>,
1748 ) {
1749 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1750 status.pending_work.insert(token, progress);
1751 cx.notify();
1752 }
1753 }
1754
1755 fn on_lsp_work_end(
1756 &mut self,
1757 language_server_id: usize,
1758 token: String,
1759 cx: &mut ModelContext<Self>,
1760 ) {
1761 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1762 status.pending_work.remove(&token);
1763 cx.notify();
1764 }
1765 }
1766
1767 async fn on_lsp_workspace_edit(
1768 this: WeakModelHandle<Self>,
1769 params: lsp::ApplyWorkspaceEditParams,
1770 server_id: usize,
1771 adapter: Arc<dyn LspAdapter>,
1772 language_server: Arc<LanguageServer>,
1773 mut cx: AsyncAppContext,
1774 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1775 let this = this
1776 .upgrade(&cx)
1777 .ok_or_else(|| anyhow!("project project closed"))?;
1778 let transaction = Self::deserialize_workspace_edit(
1779 this.clone(),
1780 params.edit,
1781 true,
1782 adapter.clone(),
1783 language_server.clone(),
1784 &mut cx,
1785 )
1786 .await
1787 .log_err();
1788 this.update(&mut cx, |this, _| {
1789 if let Some(transaction) = transaction {
1790 this.last_workspace_edits_by_language_server
1791 .insert(server_id, transaction);
1792 }
1793 });
1794 Ok(lsp::ApplyWorkspaceEditResponse {
1795 applied: true,
1796 failed_change: None,
1797 failure_reason: None,
1798 })
1799 }
1800
1801 fn broadcast_language_server_update(
1802 &self,
1803 language_server_id: usize,
1804 event: proto::update_language_server::Variant,
1805 ) {
1806 if let Some(project_id) = self.remote_id() {
1807 self.client
1808 .send(proto::UpdateLanguageServer {
1809 project_id,
1810 language_server_id: language_server_id as u64,
1811 variant: Some(event),
1812 })
1813 .log_err();
1814 }
1815 }
1816
1817 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1818 for (_, server) in self.language_servers.values() {
1819 server
1820 .notify::<lsp::notification::DidChangeConfiguration>(
1821 lsp::DidChangeConfigurationParams {
1822 settings: settings.clone(),
1823 },
1824 )
1825 .ok();
1826 }
1827 *self.language_server_settings.lock() = settings;
1828 }
1829
1830 pub fn language_server_statuses(
1831 &self,
1832 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1833 self.language_server_statuses.values()
1834 }
1835
1836 pub fn update_diagnostics(
1837 &mut self,
1838 params: lsp::PublishDiagnosticsParams,
1839 disk_based_sources: &[&str],
1840 cx: &mut ModelContext<Self>,
1841 ) -> Result<()> {
1842 let abs_path = params
1843 .uri
1844 .to_file_path()
1845 .map_err(|_| anyhow!("URI is not a file"))?;
1846 let mut next_group_id = 0;
1847 let mut diagnostics = Vec::default();
1848 let mut primary_diagnostic_group_ids = HashMap::default();
1849 let mut sources_by_group_id = HashMap::default();
1850 let mut supporting_diagnostics = HashMap::default();
1851 for diagnostic in ¶ms.diagnostics {
1852 let source = diagnostic.source.as_ref();
1853 let code = diagnostic.code.as_ref().map(|code| match code {
1854 lsp::NumberOrString::Number(code) => code.to_string(),
1855 lsp::NumberOrString::String(code) => code.clone(),
1856 });
1857 let range = range_from_lsp(diagnostic.range);
1858 let is_supporting = diagnostic
1859 .related_information
1860 .as_ref()
1861 .map_or(false, |infos| {
1862 infos.iter().any(|info| {
1863 primary_diagnostic_group_ids.contains_key(&(
1864 source,
1865 code.clone(),
1866 range_from_lsp(info.location.range),
1867 ))
1868 })
1869 });
1870
1871 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1872 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1873 });
1874
1875 if is_supporting {
1876 supporting_diagnostics.insert(
1877 (source, code.clone(), range),
1878 (diagnostic.severity, is_unnecessary),
1879 );
1880 } else {
1881 let group_id = post_inc(&mut next_group_id);
1882 let is_disk_based = source.map_or(false, |source| {
1883 disk_based_sources.contains(&source.as_str())
1884 });
1885
1886 sources_by_group_id.insert(group_id, source);
1887 primary_diagnostic_group_ids
1888 .insert((source, code.clone(), range.clone()), group_id);
1889
1890 diagnostics.push(DiagnosticEntry {
1891 range,
1892 diagnostic: Diagnostic {
1893 code: code.clone(),
1894 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1895 message: diagnostic.message.clone(),
1896 group_id,
1897 is_primary: true,
1898 is_valid: true,
1899 is_disk_based,
1900 is_unnecessary,
1901 },
1902 });
1903 if let Some(infos) = &diagnostic.related_information {
1904 for info in infos {
1905 if info.location.uri == params.uri && !info.message.is_empty() {
1906 let range = range_from_lsp(info.location.range);
1907 diagnostics.push(DiagnosticEntry {
1908 range,
1909 diagnostic: Diagnostic {
1910 code: code.clone(),
1911 severity: DiagnosticSeverity::INFORMATION,
1912 message: info.message.clone(),
1913 group_id,
1914 is_primary: false,
1915 is_valid: true,
1916 is_disk_based,
1917 is_unnecessary: false,
1918 },
1919 });
1920 }
1921 }
1922 }
1923 }
1924 }
1925
1926 for entry in &mut diagnostics {
1927 let diagnostic = &mut entry.diagnostic;
1928 if !diagnostic.is_primary {
1929 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1930 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1931 source,
1932 diagnostic.code.clone(),
1933 entry.range.clone(),
1934 )) {
1935 if let Some(severity) = severity {
1936 diagnostic.severity = severity;
1937 }
1938 diagnostic.is_unnecessary = is_unnecessary;
1939 }
1940 }
1941 }
1942
1943 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1944 Ok(())
1945 }
1946
1947 pub fn update_diagnostic_entries(
1948 &mut self,
1949 abs_path: PathBuf,
1950 version: Option<i32>,
1951 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1952 cx: &mut ModelContext<Project>,
1953 ) -> Result<(), anyhow::Error> {
1954 let (worktree, relative_path) = self
1955 .find_local_worktree(&abs_path, cx)
1956 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1957 if !worktree.read(cx).is_visible() {
1958 return Ok(());
1959 }
1960
1961 let project_path = ProjectPath {
1962 worktree_id: worktree.read(cx).id(),
1963 path: relative_path.into(),
1964 };
1965
1966 for buffer in self.opened_buffers.values() {
1967 if let Some(buffer) = buffer.upgrade(cx) {
1968 if buffer
1969 .read(cx)
1970 .file()
1971 .map_or(false, |file| *file.path() == project_path.path)
1972 {
1973 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1974 break;
1975 }
1976 }
1977 }
1978 worktree.update(cx, |worktree, cx| {
1979 worktree
1980 .as_local_mut()
1981 .ok_or_else(|| anyhow!("not a local worktree"))?
1982 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1983 })?;
1984 cx.emit(Event::DiagnosticsUpdated(project_path));
1985 Ok(())
1986 }
1987
1988 fn update_buffer_diagnostics(
1989 &mut self,
1990 buffer: &ModelHandle<Buffer>,
1991 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1992 version: Option<i32>,
1993 cx: &mut ModelContext<Self>,
1994 ) -> Result<()> {
1995 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1996 Ordering::Equal
1997 .then_with(|| b.is_primary.cmp(&a.is_primary))
1998 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1999 .then_with(|| a.severity.cmp(&b.severity))
2000 .then_with(|| a.message.cmp(&b.message))
2001 }
2002
2003 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2004
2005 diagnostics.sort_unstable_by(|a, b| {
2006 Ordering::Equal
2007 .then_with(|| a.range.start.cmp(&b.range.start))
2008 .then_with(|| b.range.end.cmp(&a.range.end))
2009 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2010 });
2011
2012 let mut sanitized_diagnostics = Vec::new();
2013 let edits_since_save = Patch::new(
2014 snapshot
2015 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2016 .collect(),
2017 );
2018 for entry in diagnostics {
2019 let start;
2020 let end;
2021 if entry.diagnostic.is_disk_based {
2022 // Some diagnostics are based on files on disk instead of buffers'
2023 // current contents. Adjust these diagnostics' ranges to reflect
2024 // any unsaved edits.
2025 start = edits_since_save.old_to_new(entry.range.start);
2026 end = edits_since_save.old_to_new(entry.range.end);
2027 } else {
2028 start = entry.range.start;
2029 end = entry.range.end;
2030 }
2031
2032 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2033 ..snapshot.clip_point_utf16(end, Bias::Right);
2034
2035 // Expand empty ranges by one character
2036 if range.start == range.end {
2037 range.end.column += 1;
2038 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2039 if range.start == range.end && range.end.column > 0 {
2040 range.start.column -= 1;
2041 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2042 }
2043 }
2044
2045 sanitized_diagnostics.push(DiagnosticEntry {
2046 range,
2047 diagnostic: entry.diagnostic,
2048 });
2049 }
2050 drop(edits_since_save);
2051
2052 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2053 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2054 Ok(())
2055 }
2056
2057 pub fn reload_buffers(
2058 &self,
2059 buffers: HashSet<ModelHandle<Buffer>>,
2060 push_to_history: bool,
2061 cx: &mut ModelContext<Self>,
2062 ) -> Task<Result<ProjectTransaction>> {
2063 let mut local_buffers = Vec::new();
2064 let mut remote_buffers = None;
2065 for buffer_handle in buffers {
2066 let buffer = buffer_handle.read(cx);
2067 if buffer.is_dirty() {
2068 if let Some(file) = File::from_dyn(buffer.file()) {
2069 if file.is_local() {
2070 local_buffers.push(buffer_handle);
2071 } else {
2072 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2073 }
2074 }
2075 }
2076 }
2077
2078 let remote_buffers = self.remote_id().zip(remote_buffers);
2079 let client = self.client.clone();
2080
2081 cx.spawn(|this, mut cx| async move {
2082 let mut project_transaction = ProjectTransaction::default();
2083
2084 if let Some((project_id, remote_buffers)) = remote_buffers {
2085 let response = client
2086 .request(proto::ReloadBuffers {
2087 project_id,
2088 buffer_ids: remote_buffers
2089 .iter()
2090 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2091 .collect(),
2092 })
2093 .await?
2094 .transaction
2095 .ok_or_else(|| anyhow!("missing transaction"))?;
2096 project_transaction = this
2097 .update(&mut cx, |this, cx| {
2098 this.deserialize_project_transaction(response, push_to_history, cx)
2099 })
2100 .await?;
2101 }
2102
2103 for buffer in local_buffers {
2104 let transaction = buffer
2105 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2106 .await?;
2107 buffer.update(&mut cx, |buffer, cx| {
2108 if let Some(transaction) = transaction {
2109 if !push_to_history {
2110 buffer.forget_transaction(transaction.id);
2111 }
2112 project_transaction.0.insert(cx.handle(), transaction);
2113 }
2114 });
2115 }
2116
2117 Ok(project_transaction)
2118 })
2119 }
2120
2121 pub fn format(
2122 &self,
2123 buffers: HashSet<ModelHandle<Buffer>>,
2124 push_to_history: bool,
2125 cx: &mut ModelContext<Project>,
2126 ) -> Task<Result<ProjectTransaction>> {
2127 let mut local_buffers = Vec::new();
2128 let mut remote_buffers = None;
2129 for buffer_handle in buffers {
2130 let buffer = buffer_handle.read(cx);
2131 if let Some(file) = File::from_dyn(buffer.file()) {
2132 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2133 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2134 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2135 }
2136 } else {
2137 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2138 }
2139 } else {
2140 return Task::ready(Ok(Default::default()));
2141 }
2142 }
2143
2144 let remote_buffers = self.remote_id().zip(remote_buffers);
2145 let client = self.client.clone();
2146
2147 cx.spawn(|this, mut cx| async move {
2148 let mut project_transaction = ProjectTransaction::default();
2149
2150 if let Some((project_id, remote_buffers)) = remote_buffers {
2151 let response = client
2152 .request(proto::FormatBuffers {
2153 project_id,
2154 buffer_ids: remote_buffers
2155 .iter()
2156 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2157 .collect(),
2158 })
2159 .await?
2160 .transaction
2161 .ok_or_else(|| anyhow!("missing transaction"))?;
2162 project_transaction = this
2163 .update(&mut cx, |this, cx| {
2164 this.deserialize_project_transaction(response, push_to_history, cx)
2165 })
2166 .await?;
2167 }
2168
2169 for (buffer, buffer_abs_path, language_server) in local_buffers {
2170 let text_document = lsp::TextDocumentIdentifier::new(
2171 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2172 );
2173 let capabilities = &language_server.capabilities();
2174 let lsp_edits = if capabilities
2175 .document_formatting_provider
2176 .as_ref()
2177 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2178 {
2179 language_server
2180 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2181 text_document,
2182 options: lsp::FormattingOptions {
2183 tab_size: 4,
2184 insert_spaces: true,
2185 insert_final_newline: Some(true),
2186 ..Default::default()
2187 },
2188 work_done_progress_params: Default::default(),
2189 })
2190 .await?
2191 } else if capabilities
2192 .document_range_formatting_provider
2193 .as_ref()
2194 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2195 {
2196 let buffer_start = lsp::Position::new(0, 0);
2197 let buffer_end =
2198 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2199 language_server
2200 .request::<lsp::request::RangeFormatting>(
2201 lsp::DocumentRangeFormattingParams {
2202 text_document,
2203 range: lsp::Range::new(buffer_start, buffer_end),
2204 options: lsp::FormattingOptions {
2205 tab_size: 4,
2206 insert_spaces: true,
2207 insert_final_newline: Some(true),
2208 ..Default::default()
2209 },
2210 work_done_progress_params: Default::default(),
2211 },
2212 )
2213 .await?
2214 } else {
2215 continue;
2216 };
2217
2218 if let Some(lsp_edits) = lsp_edits {
2219 let edits = this
2220 .update(&mut cx, |this, cx| {
2221 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2222 })
2223 .await?;
2224 buffer.update(&mut cx, |buffer, cx| {
2225 buffer.finalize_last_transaction();
2226 buffer.start_transaction();
2227 for (range, text) in edits {
2228 buffer.edit([range], text, cx);
2229 }
2230 if buffer.end_transaction(cx).is_some() {
2231 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2232 if !push_to_history {
2233 buffer.forget_transaction(transaction.id);
2234 }
2235 project_transaction.0.insert(cx.handle(), transaction);
2236 }
2237 });
2238 }
2239 }
2240
2241 Ok(project_transaction)
2242 })
2243 }
2244
2245 pub fn definition<T: ToPointUtf16>(
2246 &self,
2247 buffer: &ModelHandle<Buffer>,
2248 position: T,
2249 cx: &mut ModelContext<Self>,
2250 ) -> Task<Result<Vec<Location>>> {
2251 let position = position.to_point_utf16(buffer.read(cx));
2252 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2253 }
2254
2255 pub fn references<T: ToPointUtf16>(
2256 &self,
2257 buffer: &ModelHandle<Buffer>,
2258 position: T,
2259 cx: &mut ModelContext<Self>,
2260 ) -> Task<Result<Vec<Location>>> {
2261 let position = position.to_point_utf16(buffer.read(cx));
2262 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2263 }
2264
2265 pub fn document_highlights<T: ToPointUtf16>(
2266 &self,
2267 buffer: &ModelHandle<Buffer>,
2268 position: T,
2269 cx: &mut ModelContext<Self>,
2270 ) -> Task<Result<Vec<DocumentHighlight>>> {
2271 let position = position.to_point_utf16(buffer.read(cx));
2272
2273 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2274 }
2275
2276 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2277 if self.is_local() {
2278 let mut language_servers = HashMap::default();
2279 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2280 if let Some(worktree) = self
2281 .worktree_for_id(*worktree_id, cx)
2282 .and_then(|worktree| worktree.read(cx).as_local())
2283 {
2284 language_servers
2285 .entry(Arc::as_ptr(language_server))
2286 .or_insert((
2287 lsp_adapter.clone(),
2288 language_server.clone(),
2289 *worktree_id,
2290 worktree.abs_path().clone(),
2291 ));
2292 }
2293 }
2294
2295 let mut requests = Vec::new();
2296 for (_, language_server, _, _) in language_servers.values() {
2297 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2298 lsp::WorkspaceSymbolParams {
2299 query: query.to_string(),
2300 ..Default::default()
2301 },
2302 ));
2303 }
2304
2305 cx.spawn_weak(|this, cx| async move {
2306 let responses = futures::future::try_join_all(requests).await?;
2307
2308 let mut symbols = Vec::new();
2309 if let Some(this) = this.upgrade(&cx) {
2310 this.read_with(&cx, |this, cx| {
2311 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2312 language_servers.into_values().zip(responses)
2313 {
2314 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2315 |lsp_symbol| {
2316 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2317 let mut worktree_id = source_worktree_id;
2318 let path;
2319 if let Some((worktree, rel_path)) =
2320 this.find_local_worktree(&abs_path, cx)
2321 {
2322 worktree_id = worktree.read(cx).id();
2323 path = rel_path;
2324 } else {
2325 path = relativize_path(&worktree_abs_path, &abs_path);
2326 }
2327
2328 let label = this
2329 .languages
2330 .select_language(&path)
2331 .and_then(|language| {
2332 language
2333 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2334 })
2335 .unwrap_or_else(|| {
2336 CodeLabel::plain(lsp_symbol.name.clone(), None)
2337 });
2338 let signature = this.symbol_signature(worktree_id, &path);
2339
2340 Some(Symbol {
2341 source_worktree_id,
2342 worktree_id,
2343 language_server_name: adapter.name(),
2344 name: lsp_symbol.name,
2345 kind: lsp_symbol.kind,
2346 label,
2347 path,
2348 range: range_from_lsp(lsp_symbol.location.range),
2349 signature,
2350 })
2351 },
2352 ));
2353 }
2354 })
2355 }
2356
2357 Ok(symbols)
2358 })
2359 } else if let Some(project_id) = self.remote_id() {
2360 let request = self.client.request(proto::GetProjectSymbols {
2361 project_id,
2362 query: query.to_string(),
2363 });
2364 cx.spawn_weak(|this, cx| async move {
2365 let response = request.await?;
2366 let mut symbols = Vec::new();
2367 if let Some(this) = this.upgrade(&cx) {
2368 this.read_with(&cx, |this, _| {
2369 symbols.extend(
2370 response
2371 .symbols
2372 .into_iter()
2373 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2374 );
2375 })
2376 }
2377 Ok(symbols)
2378 })
2379 } else {
2380 Task::ready(Ok(Default::default()))
2381 }
2382 }
2383
2384 pub fn open_buffer_for_symbol(
2385 &mut self,
2386 symbol: &Symbol,
2387 cx: &mut ModelContext<Self>,
2388 ) -> Task<Result<ModelHandle<Buffer>>> {
2389 if self.is_local() {
2390 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2391 symbol.source_worktree_id,
2392 symbol.language_server_name.clone(),
2393 )) {
2394 server.clone()
2395 } else {
2396 return Task::ready(Err(anyhow!(
2397 "language server for worktree and language not found"
2398 )));
2399 };
2400
2401 let worktree_abs_path = if let Some(worktree_abs_path) = self
2402 .worktree_for_id(symbol.worktree_id, cx)
2403 .and_then(|worktree| worktree.read(cx).as_local())
2404 .map(|local_worktree| local_worktree.abs_path())
2405 {
2406 worktree_abs_path
2407 } else {
2408 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2409 };
2410 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2411 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2412 uri
2413 } else {
2414 return Task::ready(Err(anyhow!("invalid symbol path")));
2415 };
2416
2417 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2418 } else if let Some(project_id) = self.remote_id() {
2419 let request = self.client.request(proto::OpenBufferForSymbol {
2420 project_id,
2421 symbol: Some(serialize_symbol(symbol)),
2422 });
2423 cx.spawn(|this, mut cx| async move {
2424 let response = request.await?;
2425 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2426 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2427 .await
2428 })
2429 } else {
2430 Task::ready(Err(anyhow!("project does not have a remote id")))
2431 }
2432 }
2433
2434 pub fn completions<T: ToPointUtf16>(
2435 &self,
2436 source_buffer_handle: &ModelHandle<Buffer>,
2437 position: T,
2438 cx: &mut ModelContext<Self>,
2439 ) -> Task<Result<Vec<Completion>>> {
2440 let source_buffer_handle = source_buffer_handle.clone();
2441 let source_buffer = source_buffer_handle.read(cx);
2442 let buffer_id = source_buffer.remote_id();
2443 let language = source_buffer.language().cloned();
2444 let worktree;
2445 let buffer_abs_path;
2446 if let Some(file) = File::from_dyn(source_buffer.file()) {
2447 worktree = file.worktree.clone();
2448 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2449 } else {
2450 return Task::ready(Ok(Default::default()));
2451 };
2452
2453 let position = position.to_point_utf16(source_buffer);
2454 let anchor = source_buffer.anchor_after(position);
2455
2456 if worktree.read(cx).as_local().is_some() {
2457 let buffer_abs_path = buffer_abs_path.unwrap();
2458 let (_, lang_server) =
2459 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2460 server.clone()
2461 } else {
2462 return Task::ready(Ok(Default::default()));
2463 };
2464
2465 cx.spawn(|_, cx| async move {
2466 let completions = lang_server
2467 .request::<lsp::request::Completion>(lsp::CompletionParams {
2468 text_document_position: lsp::TextDocumentPositionParams::new(
2469 lsp::TextDocumentIdentifier::new(
2470 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2471 ),
2472 point_to_lsp(position),
2473 ),
2474 context: Default::default(),
2475 work_done_progress_params: Default::default(),
2476 partial_result_params: Default::default(),
2477 })
2478 .await
2479 .context("lsp completion request failed")?;
2480
2481 let completions = if let Some(completions) = completions {
2482 match completions {
2483 lsp::CompletionResponse::Array(completions) => completions,
2484 lsp::CompletionResponse::List(list) => list.items,
2485 }
2486 } else {
2487 Default::default()
2488 };
2489
2490 source_buffer_handle.read_with(&cx, |this, _| {
2491 Ok(completions
2492 .into_iter()
2493 .filter_map(|lsp_completion| {
2494 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2495 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2496 (range_from_lsp(edit.range), edit.new_text.clone())
2497 }
2498 None => {
2499 let clipped_position =
2500 this.clip_point_utf16(position, Bias::Left);
2501 if position != clipped_position {
2502 log::info!("completion out of expected range");
2503 return None;
2504 }
2505 (
2506 this.common_prefix_at(
2507 clipped_position,
2508 &lsp_completion.label,
2509 ),
2510 lsp_completion.label.clone(),
2511 )
2512 }
2513 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2514 log::info!("unsupported insert/replace completion");
2515 return None;
2516 }
2517 };
2518
2519 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2520 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2521 if clipped_start == old_range.start && clipped_end == old_range.end {
2522 Some(Completion {
2523 old_range: this.anchor_before(old_range.start)
2524 ..this.anchor_after(old_range.end),
2525 new_text,
2526 label: language
2527 .as_ref()
2528 .and_then(|l| l.label_for_completion(&lsp_completion))
2529 .unwrap_or_else(|| {
2530 CodeLabel::plain(
2531 lsp_completion.label.clone(),
2532 lsp_completion.filter_text.as_deref(),
2533 )
2534 }),
2535 lsp_completion,
2536 })
2537 } else {
2538 log::info!("completion out of expected range");
2539 None
2540 }
2541 })
2542 .collect())
2543 })
2544 })
2545 } else if let Some(project_id) = self.remote_id() {
2546 let rpc = self.client.clone();
2547 let message = proto::GetCompletions {
2548 project_id,
2549 buffer_id,
2550 position: Some(language::proto::serialize_anchor(&anchor)),
2551 version: serialize_version(&source_buffer.version()),
2552 };
2553 cx.spawn_weak(|_, mut cx| async move {
2554 let response = rpc.request(message).await?;
2555
2556 source_buffer_handle
2557 .update(&mut cx, |buffer, _| {
2558 buffer.wait_for_version(deserialize_version(response.version))
2559 })
2560 .await;
2561
2562 response
2563 .completions
2564 .into_iter()
2565 .map(|completion| {
2566 language::proto::deserialize_completion(completion, language.as_ref())
2567 })
2568 .collect()
2569 })
2570 } else {
2571 Task::ready(Ok(Default::default()))
2572 }
2573 }
2574
2575 pub fn apply_additional_edits_for_completion(
2576 &self,
2577 buffer_handle: ModelHandle<Buffer>,
2578 completion: Completion,
2579 push_to_history: bool,
2580 cx: &mut ModelContext<Self>,
2581 ) -> Task<Result<Option<Transaction>>> {
2582 let buffer = buffer_handle.read(cx);
2583 let buffer_id = buffer.remote_id();
2584
2585 if self.is_local() {
2586 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2587 {
2588 server.clone()
2589 } else {
2590 return Task::ready(Ok(Default::default()));
2591 };
2592
2593 cx.spawn(|this, mut cx| async move {
2594 let resolved_completion = lang_server
2595 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2596 .await?;
2597 if let Some(edits) = resolved_completion.additional_text_edits {
2598 let edits = this
2599 .update(&mut cx, |this, cx| {
2600 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2601 })
2602 .await?;
2603 buffer_handle.update(&mut cx, |buffer, cx| {
2604 buffer.finalize_last_transaction();
2605 buffer.start_transaction();
2606 for (range, text) in edits {
2607 buffer.edit([range], text, cx);
2608 }
2609 let transaction = if buffer.end_transaction(cx).is_some() {
2610 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2611 if !push_to_history {
2612 buffer.forget_transaction(transaction.id);
2613 }
2614 Some(transaction)
2615 } else {
2616 None
2617 };
2618 Ok(transaction)
2619 })
2620 } else {
2621 Ok(None)
2622 }
2623 })
2624 } else if let Some(project_id) = self.remote_id() {
2625 let client = self.client.clone();
2626 cx.spawn(|_, mut cx| async move {
2627 let response = client
2628 .request(proto::ApplyCompletionAdditionalEdits {
2629 project_id,
2630 buffer_id,
2631 completion: Some(language::proto::serialize_completion(&completion)),
2632 })
2633 .await?;
2634
2635 if let Some(transaction) = response.transaction {
2636 let transaction = language::proto::deserialize_transaction(transaction)?;
2637 buffer_handle
2638 .update(&mut cx, |buffer, _| {
2639 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2640 })
2641 .await;
2642 if push_to_history {
2643 buffer_handle.update(&mut cx, |buffer, _| {
2644 buffer.push_transaction(transaction.clone(), Instant::now());
2645 });
2646 }
2647 Ok(Some(transaction))
2648 } else {
2649 Ok(None)
2650 }
2651 })
2652 } else {
2653 Task::ready(Err(anyhow!("project does not have a remote id")))
2654 }
2655 }
2656
2657 pub fn code_actions<T: Clone + ToOffset>(
2658 &self,
2659 buffer_handle: &ModelHandle<Buffer>,
2660 range: Range<T>,
2661 cx: &mut ModelContext<Self>,
2662 ) -> Task<Result<Vec<CodeAction>>> {
2663 let buffer_handle = buffer_handle.clone();
2664 let buffer = buffer_handle.read(cx);
2665 let snapshot = buffer.snapshot();
2666 let relevant_diagnostics = snapshot
2667 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2668 .map(|entry| entry.to_lsp_diagnostic_stub())
2669 .collect();
2670 let buffer_id = buffer.remote_id();
2671 let worktree;
2672 let buffer_abs_path;
2673 if let Some(file) = File::from_dyn(buffer.file()) {
2674 worktree = file.worktree.clone();
2675 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2676 } else {
2677 return Task::ready(Ok(Default::default()));
2678 };
2679 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2680
2681 if worktree.read(cx).as_local().is_some() {
2682 let buffer_abs_path = buffer_abs_path.unwrap();
2683 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2684 {
2685 server.clone()
2686 } else {
2687 return Task::ready(Ok(Default::default()));
2688 };
2689
2690 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2691 cx.foreground().spawn(async move {
2692 if !lang_server.capabilities().code_action_provider.is_some() {
2693 return Ok(Default::default());
2694 }
2695
2696 Ok(lang_server
2697 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2698 text_document: lsp::TextDocumentIdentifier::new(
2699 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2700 ),
2701 range: lsp_range,
2702 work_done_progress_params: Default::default(),
2703 partial_result_params: Default::default(),
2704 context: lsp::CodeActionContext {
2705 diagnostics: relevant_diagnostics,
2706 only: Some(vec![
2707 lsp::CodeActionKind::QUICKFIX,
2708 lsp::CodeActionKind::REFACTOR,
2709 lsp::CodeActionKind::REFACTOR_EXTRACT,
2710 lsp::CodeActionKind::SOURCE,
2711 ]),
2712 },
2713 })
2714 .await?
2715 .unwrap_or_default()
2716 .into_iter()
2717 .filter_map(|entry| {
2718 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2719 Some(CodeAction {
2720 range: range.clone(),
2721 lsp_action,
2722 })
2723 } else {
2724 None
2725 }
2726 })
2727 .collect())
2728 })
2729 } else if let Some(project_id) = self.remote_id() {
2730 let rpc = self.client.clone();
2731 let version = buffer.version();
2732 cx.spawn_weak(|_, mut cx| async move {
2733 let response = rpc
2734 .request(proto::GetCodeActions {
2735 project_id,
2736 buffer_id,
2737 start: Some(language::proto::serialize_anchor(&range.start)),
2738 end: Some(language::proto::serialize_anchor(&range.end)),
2739 version: serialize_version(&version),
2740 })
2741 .await?;
2742
2743 buffer_handle
2744 .update(&mut cx, |buffer, _| {
2745 buffer.wait_for_version(deserialize_version(response.version))
2746 })
2747 .await;
2748
2749 response
2750 .actions
2751 .into_iter()
2752 .map(language::proto::deserialize_code_action)
2753 .collect()
2754 })
2755 } else {
2756 Task::ready(Ok(Default::default()))
2757 }
2758 }
2759
2760 pub fn apply_code_action(
2761 &self,
2762 buffer_handle: ModelHandle<Buffer>,
2763 mut action: CodeAction,
2764 push_to_history: bool,
2765 cx: &mut ModelContext<Self>,
2766 ) -> Task<Result<ProjectTransaction>> {
2767 if self.is_local() {
2768 let buffer = buffer_handle.read(cx);
2769 let (lsp_adapter, lang_server) =
2770 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2771 server.clone()
2772 } else {
2773 return Task::ready(Ok(Default::default()));
2774 };
2775 let range = action.range.to_point_utf16(buffer);
2776
2777 cx.spawn(|this, mut cx| async move {
2778 if let Some(lsp_range) = action
2779 .lsp_action
2780 .data
2781 .as_mut()
2782 .and_then(|d| d.get_mut("codeActionParams"))
2783 .and_then(|d| d.get_mut("range"))
2784 {
2785 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2786 action.lsp_action = lang_server
2787 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2788 .await?;
2789 } else {
2790 let actions = this
2791 .update(&mut cx, |this, cx| {
2792 this.code_actions(&buffer_handle, action.range, cx)
2793 })
2794 .await?;
2795 action.lsp_action = actions
2796 .into_iter()
2797 .find(|a| a.lsp_action.title == action.lsp_action.title)
2798 .ok_or_else(|| anyhow!("code action is outdated"))?
2799 .lsp_action;
2800 }
2801
2802 if let Some(edit) = action.lsp_action.edit {
2803 Self::deserialize_workspace_edit(
2804 this,
2805 edit,
2806 push_to_history,
2807 lsp_adapter,
2808 lang_server,
2809 &mut cx,
2810 )
2811 .await
2812 } else if let Some(command) = action.lsp_action.command {
2813 this.update(&mut cx, |this, _| {
2814 this.last_workspace_edits_by_language_server
2815 .remove(&lang_server.server_id());
2816 });
2817 lang_server
2818 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2819 command: command.command,
2820 arguments: command.arguments.unwrap_or_default(),
2821 ..Default::default()
2822 })
2823 .await?;
2824 Ok(this.update(&mut cx, |this, _| {
2825 this.last_workspace_edits_by_language_server
2826 .remove(&lang_server.server_id())
2827 .unwrap_or_default()
2828 }))
2829 } else {
2830 Ok(ProjectTransaction::default())
2831 }
2832 })
2833 } else if let Some(project_id) = self.remote_id() {
2834 let client = self.client.clone();
2835 let request = proto::ApplyCodeAction {
2836 project_id,
2837 buffer_id: buffer_handle.read(cx).remote_id(),
2838 action: Some(language::proto::serialize_code_action(&action)),
2839 };
2840 cx.spawn(|this, mut cx| async move {
2841 let response = client
2842 .request(request)
2843 .await?
2844 .transaction
2845 .ok_or_else(|| anyhow!("missing transaction"))?;
2846 this.update(&mut cx, |this, cx| {
2847 this.deserialize_project_transaction(response, push_to_history, cx)
2848 })
2849 .await
2850 })
2851 } else {
2852 Task::ready(Err(anyhow!("project does not have a remote id")))
2853 }
2854 }
2855
2856 async fn deserialize_workspace_edit(
2857 this: ModelHandle<Self>,
2858 edit: lsp::WorkspaceEdit,
2859 push_to_history: bool,
2860 lsp_adapter: Arc<dyn LspAdapter>,
2861 language_server: Arc<LanguageServer>,
2862 cx: &mut AsyncAppContext,
2863 ) -> Result<ProjectTransaction> {
2864 let fs = this.read_with(cx, |this, _| this.fs.clone());
2865 let mut operations = Vec::new();
2866 if let Some(document_changes) = edit.document_changes {
2867 match document_changes {
2868 lsp::DocumentChanges::Edits(edits) => {
2869 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2870 }
2871 lsp::DocumentChanges::Operations(ops) => operations = ops,
2872 }
2873 } else if let Some(changes) = edit.changes {
2874 operations.extend(changes.into_iter().map(|(uri, edits)| {
2875 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2876 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2877 uri,
2878 version: None,
2879 },
2880 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2881 })
2882 }));
2883 }
2884
2885 let mut project_transaction = ProjectTransaction::default();
2886 for operation in operations {
2887 match operation {
2888 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2889 let abs_path = op
2890 .uri
2891 .to_file_path()
2892 .map_err(|_| anyhow!("can't convert URI to path"))?;
2893
2894 if let Some(parent_path) = abs_path.parent() {
2895 fs.create_dir(parent_path).await?;
2896 }
2897 if abs_path.ends_with("/") {
2898 fs.create_dir(&abs_path).await?;
2899 } else {
2900 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2901 .await?;
2902 }
2903 }
2904 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2905 let source_abs_path = op
2906 .old_uri
2907 .to_file_path()
2908 .map_err(|_| anyhow!("can't convert URI to path"))?;
2909 let target_abs_path = op
2910 .new_uri
2911 .to_file_path()
2912 .map_err(|_| anyhow!("can't convert URI to path"))?;
2913 fs.rename(
2914 &source_abs_path,
2915 &target_abs_path,
2916 op.options.map(Into::into).unwrap_or_default(),
2917 )
2918 .await?;
2919 }
2920 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2921 let abs_path = op
2922 .uri
2923 .to_file_path()
2924 .map_err(|_| anyhow!("can't convert URI to path"))?;
2925 let options = op.options.map(Into::into).unwrap_or_default();
2926 if abs_path.ends_with("/") {
2927 fs.remove_dir(&abs_path, options).await?;
2928 } else {
2929 fs.remove_file(&abs_path, options).await?;
2930 }
2931 }
2932 lsp::DocumentChangeOperation::Edit(op) => {
2933 let buffer_to_edit = this
2934 .update(cx, |this, cx| {
2935 this.open_local_buffer_via_lsp(
2936 op.text_document.uri,
2937 lsp_adapter.clone(),
2938 language_server.clone(),
2939 cx,
2940 )
2941 })
2942 .await?;
2943
2944 let edits = this
2945 .update(cx, |this, cx| {
2946 let edits = op.edits.into_iter().map(|edit| match edit {
2947 lsp::OneOf::Left(edit) => edit,
2948 lsp::OneOf::Right(edit) => edit.text_edit,
2949 });
2950 this.edits_from_lsp(
2951 &buffer_to_edit,
2952 edits,
2953 op.text_document.version,
2954 cx,
2955 )
2956 })
2957 .await?;
2958
2959 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2960 buffer.finalize_last_transaction();
2961 buffer.start_transaction();
2962 for (range, text) in edits {
2963 buffer.edit([range], text, cx);
2964 }
2965 let transaction = if buffer.end_transaction(cx).is_some() {
2966 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2967 if !push_to_history {
2968 buffer.forget_transaction(transaction.id);
2969 }
2970 Some(transaction)
2971 } else {
2972 None
2973 };
2974
2975 transaction
2976 });
2977 if let Some(transaction) = transaction {
2978 project_transaction.0.insert(buffer_to_edit, transaction);
2979 }
2980 }
2981 }
2982 }
2983
2984 Ok(project_transaction)
2985 }
2986
2987 pub fn prepare_rename<T: ToPointUtf16>(
2988 &self,
2989 buffer: ModelHandle<Buffer>,
2990 position: T,
2991 cx: &mut ModelContext<Self>,
2992 ) -> Task<Result<Option<Range<Anchor>>>> {
2993 let position = position.to_point_utf16(buffer.read(cx));
2994 self.request_lsp(buffer, PrepareRename { position }, cx)
2995 }
2996
2997 pub fn perform_rename<T: ToPointUtf16>(
2998 &self,
2999 buffer: ModelHandle<Buffer>,
3000 position: T,
3001 new_name: String,
3002 push_to_history: bool,
3003 cx: &mut ModelContext<Self>,
3004 ) -> Task<Result<ProjectTransaction>> {
3005 let position = position.to_point_utf16(buffer.read(cx));
3006 self.request_lsp(
3007 buffer,
3008 PerformRename {
3009 position,
3010 new_name,
3011 push_to_history,
3012 },
3013 cx,
3014 )
3015 }
3016
3017 pub fn search(
3018 &self,
3019 query: SearchQuery,
3020 cx: &mut ModelContext<Self>,
3021 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3022 if self.is_local() {
3023 let snapshots = self
3024 .visible_worktrees(cx)
3025 .filter_map(|tree| {
3026 let tree = tree.read(cx).as_local()?;
3027 Some(tree.snapshot())
3028 })
3029 .collect::<Vec<_>>();
3030
3031 let background = cx.background().clone();
3032 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3033 if path_count == 0 {
3034 return Task::ready(Ok(Default::default()));
3035 }
3036 let workers = background.num_cpus().min(path_count);
3037 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3038 cx.background()
3039 .spawn({
3040 let fs = self.fs.clone();
3041 let background = cx.background().clone();
3042 let query = query.clone();
3043 async move {
3044 let fs = &fs;
3045 let query = &query;
3046 let matching_paths_tx = &matching_paths_tx;
3047 let paths_per_worker = (path_count + workers - 1) / workers;
3048 let snapshots = &snapshots;
3049 background
3050 .scoped(|scope| {
3051 for worker_ix in 0..workers {
3052 let worker_start_ix = worker_ix * paths_per_worker;
3053 let worker_end_ix = worker_start_ix + paths_per_worker;
3054 scope.spawn(async move {
3055 let mut snapshot_start_ix = 0;
3056 let mut abs_path = PathBuf::new();
3057 for snapshot in snapshots {
3058 let snapshot_end_ix =
3059 snapshot_start_ix + snapshot.visible_file_count();
3060 if worker_end_ix <= snapshot_start_ix {
3061 break;
3062 } else if worker_start_ix > snapshot_end_ix {
3063 snapshot_start_ix = snapshot_end_ix;
3064 continue;
3065 } else {
3066 let start_in_snapshot = worker_start_ix
3067 .saturating_sub(snapshot_start_ix);
3068 let end_in_snapshot =
3069 cmp::min(worker_end_ix, snapshot_end_ix)
3070 - snapshot_start_ix;
3071
3072 for entry in snapshot
3073 .files(false, start_in_snapshot)
3074 .take(end_in_snapshot - start_in_snapshot)
3075 {
3076 if matching_paths_tx.is_closed() {
3077 break;
3078 }
3079
3080 abs_path.clear();
3081 abs_path.push(&snapshot.abs_path());
3082 abs_path.push(&entry.path);
3083 let matches = if let Some(file) =
3084 fs.open_sync(&abs_path).await.log_err()
3085 {
3086 query.detect(file).unwrap_or(false)
3087 } else {
3088 false
3089 };
3090
3091 if matches {
3092 let project_path =
3093 (snapshot.id(), entry.path.clone());
3094 if matching_paths_tx
3095 .send(project_path)
3096 .await
3097 .is_err()
3098 {
3099 break;
3100 }
3101 }
3102 }
3103
3104 snapshot_start_ix = snapshot_end_ix;
3105 }
3106 }
3107 });
3108 }
3109 })
3110 .await;
3111 }
3112 })
3113 .detach();
3114
3115 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3116 let open_buffers = self
3117 .opened_buffers
3118 .values()
3119 .filter_map(|b| b.upgrade(cx))
3120 .collect::<HashSet<_>>();
3121 cx.spawn(|this, cx| async move {
3122 for buffer in &open_buffers {
3123 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3124 buffers_tx.send((buffer.clone(), snapshot)).await?;
3125 }
3126
3127 let open_buffers = Rc::new(RefCell::new(open_buffers));
3128 while let Some(project_path) = matching_paths_rx.next().await {
3129 if buffers_tx.is_closed() {
3130 break;
3131 }
3132
3133 let this = this.clone();
3134 let open_buffers = open_buffers.clone();
3135 let buffers_tx = buffers_tx.clone();
3136 cx.spawn(|mut cx| async move {
3137 if let Some(buffer) = this
3138 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3139 .await
3140 .log_err()
3141 {
3142 if open_buffers.borrow_mut().insert(buffer.clone()) {
3143 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3144 buffers_tx.send((buffer, snapshot)).await?;
3145 }
3146 }
3147
3148 Ok::<_, anyhow::Error>(())
3149 })
3150 .detach();
3151 }
3152
3153 Ok::<_, anyhow::Error>(())
3154 })
3155 .detach_and_log_err(cx);
3156
3157 let background = cx.background().clone();
3158 cx.background().spawn(async move {
3159 let query = &query;
3160 let mut matched_buffers = Vec::new();
3161 for _ in 0..workers {
3162 matched_buffers.push(HashMap::default());
3163 }
3164 background
3165 .scoped(|scope| {
3166 for worker_matched_buffers in matched_buffers.iter_mut() {
3167 let mut buffers_rx = buffers_rx.clone();
3168 scope.spawn(async move {
3169 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3170 let buffer_matches = query
3171 .search(snapshot.as_rope())
3172 .await
3173 .iter()
3174 .map(|range| {
3175 snapshot.anchor_before(range.start)
3176 ..snapshot.anchor_after(range.end)
3177 })
3178 .collect::<Vec<_>>();
3179 if !buffer_matches.is_empty() {
3180 worker_matched_buffers
3181 .insert(buffer.clone(), buffer_matches);
3182 }
3183 }
3184 });
3185 }
3186 })
3187 .await;
3188 Ok(matched_buffers.into_iter().flatten().collect())
3189 })
3190 } else if let Some(project_id) = self.remote_id() {
3191 let request = self.client.request(query.to_proto(project_id));
3192 cx.spawn(|this, mut cx| async move {
3193 let response = request.await?;
3194 let mut result = HashMap::default();
3195 for location in response.locations {
3196 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3197 let target_buffer = this
3198 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3199 .await?;
3200 let start = location
3201 .start
3202 .and_then(deserialize_anchor)
3203 .ok_or_else(|| anyhow!("missing target start"))?;
3204 let end = location
3205 .end
3206 .and_then(deserialize_anchor)
3207 .ok_or_else(|| anyhow!("missing target end"))?;
3208 result
3209 .entry(target_buffer)
3210 .or_insert(Vec::new())
3211 .push(start..end)
3212 }
3213 Ok(result)
3214 })
3215 } else {
3216 Task::ready(Ok(Default::default()))
3217 }
3218 }
3219
3220 fn request_lsp<R: LspCommand>(
3221 &self,
3222 buffer_handle: ModelHandle<Buffer>,
3223 request: R,
3224 cx: &mut ModelContext<Self>,
3225 ) -> Task<Result<R::Response>>
3226 where
3227 <R::LspRequest as lsp::request::Request>::Result: Send,
3228 {
3229 let buffer = buffer_handle.read(cx);
3230 if self.is_local() {
3231 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3232 if let Some((file, (_, language_server))) =
3233 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3234 {
3235 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3236 return cx.spawn(|this, cx| async move {
3237 if !request.check_capabilities(&language_server.capabilities()) {
3238 return Ok(Default::default());
3239 }
3240
3241 let response = language_server
3242 .request::<R::LspRequest>(lsp_params)
3243 .await
3244 .context("lsp request failed")?;
3245 request
3246 .response_from_lsp(response, this, buffer_handle, cx)
3247 .await
3248 });
3249 }
3250 } else if let Some(project_id) = self.remote_id() {
3251 let rpc = self.client.clone();
3252 let message = request.to_proto(project_id, buffer);
3253 return cx.spawn(|this, cx| async move {
3254 let response = rpc.request(message).await?;
3255 request
3256 .response_from_proto(response, this, buffer_handle, cx)
3257 .await
3258 });
3259 }
3260 Task::ready(Ok(Default::default()))
3261 }
3262
3263 pub fn find_or_create_local_worktree(
3264 &mut self,
3265 abs_path: impl AsRef<Path>,
3266 visible: bool,
3267 cx: &mut ModelContext<Self>,
3268 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3269 let abs_path = abs_path.as_ref();
3270 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3271 Task::ready(Ok((tree.clone(), relative_path.into())))
3272 } else {
3273 let worktree = self.create_local_worktree(abs_path, visible, cx);
3274 cx.foreground()
3275 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3276 }
3277 }
3278
3279 pub fn find_local_worktree(
3280 &self,
3281 abs_path: &Path,
3282 cx: &AppContext,
3283 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3284 for tree in self.worktrees(cx) {
3285 if let Some(relative_path) = tree
3286 .read(cx)
3287 .as_local()
3288 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3289 {
3290 return Some((tree.clone(), relative_path.into()));
3291 }
3292 }
3293 None
3294 }
3295
3296 pub fn is_shared(&self) -> bool {
3297 match &self.client_state {
3298 ProjectClientState::Local { is_shared, .. } => *is_shared,
3299 ProjectClientState::Remote { .. } => false,
3300 }
3301 }
3302
3303 fn create_local_worktree(
3304 &mut self,
3305 abs_path: impl AsRef<Path>,
3306 visible: bool,
3307 cx: &mut ModelContext<Self>,
3308 ) -> Task<Result<ModelHandle<Worktree>>> {
3309 let fs = self.fs.clone();
3310 let client = self.client.clone();
3311 let next_entry_id = self.next_entry_id.clone();
3312 let path: Arc<Path> = abs_path.as_ref().into();
3313 let task = self
3314 .loading_local_worktrees
3315 .entry(path.clone())
3316 .or_insert_with(|| {
3317 cx.spawn(|project, mut cx| {
3318 async move {
3319 let worktree = Worktree::local(
3320 client.clone(),
3321 path.clone(),
3322 visible,
3323 fs,
3324 next_entry_id,
3325 &mut cx,
3326 )
3327 .await;
3328 project.update(&mut cx, |project, _| {
3329 project.loading_local_worktrees.remove(&path);
3330 });
3331 let worktree = worktree?;
3332
3333 let (remote_project_id, is_shared) =
3334 project.update(&mut cx, |project, cx| {
3335 project.add_worktree(&worktree, cx);
3336 (project.remote_id(), project.is_shared())
3337 });
3338
3339 if let Some(project_id) = remote_project_id {
3340 if is_shared {
3341 worktree
3342 .update(&mut cx, |worktree, cx| {
3343 worktree.as_local_mut().unwrap().share(project_id, cx)
3344 })
3345 .await?;
3346 } else {
3347 worktree
3348 .update(&mut cx, |worktree, cx| {
3349 worktree.as_local_mut().unwrap().register(project_id, cx)
3350 })
3351 .await?;
3352 }
3353 }
3354
3355 Ok(worktree)
3356 }
3357 .map_err(|err| Arc::new(err))
3358 })
3359 .shared()
3360 })
3361 .clone();
3362 cx.foreground().spawn(async move {
3363 match task.await {
3364 Ok(worktree) => Ok(worktree),
3365 Err(err) => Err(anyhow!("{}", err)),
3366 }
3367 })
3368 }
3369
3370 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3371 self.worktrees.retain(|worktree| {
3372 worktree
3373 .upgrade(cx)
3374 .map_or(false, |w| w.read(cx).id() != id)
3375 });
3376 cx.notify();
3377 }
3378
3379 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3380 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3381 if worktree.read(cx).is_local() {
3382 cx.subscribe(&worktree, |this, worktree, _, cx| {
3383 this.update_local_worktree_buffers(worktree, cx);
3384 })
3385 .detach();
3386 }
3387
3388 let push_strong_handle = {
3389 let worktree = worktree.read(cx);
3390 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3391 };
3392 if push_strong_handle {
3393 self.worktrees
3394 .push(WorktreeHandle::Strong(worktree.clone()));
3395 } else {
3396 cx.observe_release(&worktree, |this, _, cx| {
3397 this.worktrees
3398 .retain(|worktree| worktree.upgrade(cx).is_some());
3399 cx.notify();
3400 })
3401 .detach();
3402 self.worktrees
3403 .push(WorktreeHandle::Weak(worktree.downgrade()));
3404 }
3405 cx.notify();
3406 }
3407
3408 fn update_local_worktree_buffers(
3409 &mut self,
3410 worktree_handle: ModelHandle<Worktree>,
3411 cx: &mut ModelContext<Self>,
3412 ) {
3413 let snapshot = worktree_handle.read(cx).snapshot();
3414 let mut buffers_to_delete = Vec::new();
3415 let mut renamed_buffers = Vec::new();
3416 for (buffer_id, buffer) in &self.opened_buffers {
3417 if let Some(buffer) = buffer.upgrade(cx) {
3418 buffer.update(cx, |buffer, cx| {
3419 if let Some(old_file) = File::from_dyn(buffer.file()) {
3420 if old_file.worktree != worktree_handle {
3421 return;
3422 }
3423
3424 let new_file = if let Some(entry) = old_file
3425 .entry_id
3426 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3427 {
3428 File {
3429 is_local: true,
3430 entry_id: Some(entry.id),
3431 mtime: entry.mtime,
3432 path: entry.path.clone(),
3433 worktree: worktree_handle.clone(),
3434 }
3435 } else if let Some(entry) =
3436 snapshot.entry_for_path(old_file.path().as_ref())
3437 {
3438 File {
3439 is_local: true,
3440 entry_id: Some(entry.id),
3441 mtime: entry.mtime,
3442 path: entry.path.clone(),
3443 worktree: worktree_handle.clone(),
3444 }
3445 } else {
3446 File {
3447 is_local: true,
3448 entry_id: None,
3449 path: old_file.path().clone(),
3450 mtime: old_file.mtime(),
3451 worktree: worktree_handle.clone(),
3452 }
3453 };
3454
3455 let old_path = old_file.abs_path(cx);
3456 if new_file.abs_path(cx) != old_path {
3457 renamed_buffers.push((cx.handle(), old_path));
3458 }
3459
3460 if let Some(project_id) = self.remote_id() {
3461 self.client
3462 .send(proto::UpdateBufferFile {
3463 project_id,
3464 buffer_id: *buffer_id as u64,
3465 file: Some(new_file.to_proto()),
3466 })
3467 .log_err();
3468 }
3469 buffer.file_updated(Box::new(new_file), cx).detach();
3470 }
3471 });
3472 } else {
3473 buffers_to_delete.push(*buffer_id);
3474 }
3475 }
3476
3477 for buffer_id in buffers_to_delete {
3478 self.opened_buffers.remove(&buffer_id);
3479 }
3480
3481 for (buffer, old_path) in renamed_buffers {
3482 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3483 self.assign_language_to_buffer(&buffer, cx);
3484 self.register_buffer_with_language_server(&buffer, cx);
3485 }
3486 }
3487
3488 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3489 let new_active_entry = entry.and_then(|project_path| {
3490 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3491 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3492 Some(entry.id)
3493 });
3494 if new_active_entry != self.active_entry {
3495 self.active_entry = new_active_entry;
3496 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3497 }
3498 }
3499
3500 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3501 self.language_servers_with_diagnostics_running > 0
3502 }
3503
3504 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3505 let mut summary = DiagnosticSummary::default();
3506 for (_, path_summary) in self.diagnostic_summaries(cx) {
3507 summary.error_count += path_summary.error_count;
3508 summary.warning_count += path_summary.warning_count;
3509 summary.info_count += path_summary.info_count;
3510 summary.hint_count += path_summary.hint_count;
3511 }
3512 summary
3513 }
3514
3515 pub fn diagnostic_summaries<'a>(
3516 &'a self,
3517 cx: &'a AppContext,
3518 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3519 self.worktrees(cx).flat_map(move |worktree| {
3520 let worktree = worktree.read(cx);
3521 let worktree_id = worktree.id();
3522 worktree
3523 .diagnostic_summaries()
3524 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3525 })
3526 }
3527
3528 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3529 self.language_servers_with_diagnostics_running += 1;
3530 if self.language_servers_with_diagnostics_running == 1 {
3531 cx.emit(Event::DiskBasedDiagnosticsStarted);
3532 }
3533 }
3534
3535 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3536 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3537 self.language_servers_with_diagnostics_running -= 1;
3538 if self.language_servers_with_diagnostics_running == 0 {
3539 cx.emit(Event::DiskBasedDiagnosticsFinished);
3540 }
3541 }
3542
3543 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3544 self.active_entry
3545 }
3546
3547 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3548 self.worktree_for_id(path.worktree_id, cx)?
3549 .read(cx)
3550 .entry_for_path(&path.path)
3551 .map(|entry| entry.id)
3552 }
3553
3554 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3555 let worktree = self.worktree_for_entry(entry_id, cx)?;
3556 let worktree = worktree.read(cx);
3557 let worktree_id = worktree.id();
3558 let path = worktree.entry_for_id(entry_id)?.path.clone();
3559 Some(ProjectPath { worktree_id, path })
3560 }
3561
3562 // RPC message handlers
3563
3564 async fn handle_unshare_project(
3565 this: ModelHandle<Self>,
3566 _: TypedEnvelope<proto::UnshareProject>,
3567 _: Arc<Client>,
3568 mut cx: AsyncAppContext,
3569 ) -> Result<()> {
3570 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3571 Ok(())
3572 }
3573
3574 async fn handle_add_collaborator(
3575 this: ModelHandle<Self>,
3576 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3577 _: Arc<Client>,
3578 mut cx: AsyncAppContext,
3579 ) -> Result<()> {
3580 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3581 let collaborator = envelope
3582 .payload
3583 .collaborator
3584 .take()
3585 .ok_or_else(|| anyhow!("empty collaborator"))?;
3586
3587 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3588 this.update(&mut cx, |this, cx| {
3589 this.collaborators
3590 .insert(collaborator.peer_id, collaborator);
3591 cx.notify();
3592 });
3593
3594 Ok(())
3595 }
3596
3597 async fn handle_remove_collaborator(
3598 this: ModelHandle<Self>,
3599 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3600 _: Arc<Client>,
3601 mut cx: AsyncAppContext,
3602 ) -> Result<()> {
3603 this.update(&mut cx, |this, cx| {
3604 let peer_id = PeerId(envelope.payload.peer_id);
3605 let replica_id = this
3606 .collaborators
3607 .remove(&peer_id)
3608 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3609 .replica_id;
3610 for (_, buffer) in &this.opened_buffers {
3611 if let Some(buffer) = buffer.upgrade(cx) {
3612 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3613 }
3614 }
3615 cx.emit(Event::CollaboratorLeft(peer_id));
3616 cx.notify();
3617 Ok(())
3618 })
3619 }
3620
3621 async fn handle_register_worktree(
3622 this: ModelHandle<Self>,
3623 envelope: TypedEnvelope<proto::RegisterWorktree>,
3624 client: Arc<Client>,
3625 mut cx: AsyncAppContext,
3626 ) -> Result<()> {
3627 this.update(&mut cx, |this, cx| {
3628 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3629 let replica_id = this.replica_id();
3630 let worktree = proto::Worktree {
3631 id: envelope.payload.worktree_id,
3632 root_name: envelope.payload.root_name,
3633 entries: Default::default(),
3634 diagnostic_summaries: Default::default(),
3635 visible: envelope.payload.visible,
3636 };
3637 let (worktree, load_task) =
3638 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3639 this.add_worktree(&worktree, cx);
3640 load_task.detach();
3641 Ok(())
3642 })
3643 }
3644
3645 async fn handle_unregister_worktree(
3646 this: ModelHandle<Self>,
3647 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3648 _: Arc<Client>,
3649 mut cx: AsyncAppContext,
3650 ) -> Result<()> {
3651 this.update(&mut cx, |this, cx| {
3652 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3653 this.remove_worktree(worktree_id, cx);
3654 Ok(())
3655 })
3656 }
3657
3658 async fn handle_update_worktree(
3659 this: ModelHandle<Self>,
3660 envelope: TypedEnvelope<proto::UpdateWorktree>,
3661 _: Arc<Client>,
3662 mut cx: AsyncAppContext,
3663 ) -> Result<()> {
3664 this.update(&mut cx, |this, cx| {
3665 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3666 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3667 worktree.update(cx, |worktree, _| {
3668 let worktree = worktree.as_remote_mut().unwrap();
3669 worktree.update_from_remote(envelope)
3670 })?;
3671 }
3672 Ok(())
3673 })
3674 }
3675
3676 async fn handle_update_diagnostic_summary(
3677 this: ModelHandle<Self>,
3678 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3679 _: Arc<Client>,
3680 mut cx: AsyncAppContext,
3681 ) -> Result<()> {
3682 this.update(&mut cx, |this, cx| {
3683 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3684 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3685 if let Some(summary) = envelope.payload.summary {
3686 let project_path = ProjectPath {
3687 worktree_id,
3688 path: Path::new(&summary.path).into(),
3689 };
3690 worktree.update(cx, |worktree, _| {
3691 worktree
3692 .as_remote_mut()
3693 .unwrap()
3694 .update_diagnostic_summary(project_path.path.clone(), &summary);
3695 });
3696 cx.emit(Event::DiagnosticsUpdated(project_path));
3697 }
3698 }
3699 Ok(())
3700 })
3701 }
3702
3703 async fn handle_start_language_server(
3704 this: ModelHandle<Self>,
3705 envelope: TypedEnvelope<proto::StartLanguageServer>,
3706 _: Arc<Client>,
3707 mut cx: AsyncAppContext,
3708 ) -> Result<()> {
3709 let server = envelope
3710 .payload
3711 .server
3712 .ok_or_else(|| anyhow!("invalid server"))?;
3713 this.update(&mut cx, |this, cx| {
3714 this.language_server_statuses.insert(
3715 server.id as usize,
3716 LanguageServerStatus {
3717 name: server.name,
3718 pending_work: Default::default(),
3719 pending_diagnostic_updates: 0,
3720 },
3721 );
3722 cx.notify();
3723 });
3724 Ok(())
3725 }
3726
3727 async fn handle_update_language_server(
3728 this: ModelHandle<Self>,
3729 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3730 _: Arc<Client>,
3731 mut cx: AsyncAppContext,
3732 ) -> Result<()> {
3733 let language_server_id = envelope.payload.language_server_id as usize;
3734 match envelope
3735 .payload
3736 .variant
3737 .ok_or_else(|| anyhow!("invalid variant"))?
3738 {
3739 proto::update_language_server::Variant::WorkStart(payload) => {
3740 this.update(&mut cx, |this, cx| {
3741 this.on_lsp_work_start(language_server_id, payload.token, cx);
3742 })
3743 }
3744 proto::update_language_server::Variant::WorkProgress(payload) => {
3745 this.update(&mut cx, |this, cx| {
3746 this.on_lsp_work_progress(
3747 language_server_id,
3748 payload.token,
3749 LanguageServerProgress {
3750 message: payload.message,
3751 percentage: payload.percentage.map(|p| p as usize),
3752 last_update_at: Instant::now(),
3753 },
3754 cx,
3755 );
3756 })
3757 }
3758 proto::update_language_server::Variant::WorkEnd(payload) => {
3759 this.update(&mut cx, |this, cx| {
3760 this.on_lsp_work_end(language_server_id, payload.token, cx);
3761 })
3762 }
3763 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3764 this.update(&mut cx, |this, cx| {
3765 this.disk_based_diagnostics_started(cx);
3766 })
3767 }
3768 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3769 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3770 }
3771 }
3772
3773 Ok(())
3774 }
3775
3776 async fn handle_update_buffer(
3777 this: ModelHandle<Self>,
3778 envelope: TypedEnvelope<proto::UpdateBuffer>,
3779 _: Arc<Client>,
3780 mut cx: AsyncAppContext,
3781 ) -> Result<()> {
3782 this.update(&mut cx, |this, cx| {
3783 let payload = envelope.payload.clone();
3784 let buffer_id = payload.buffer_id;
3785 let ops = payload
3786 .operations
3787 .into_iter()
3788 .map(|op| language::proto::deserialize_operation(op))
3789 .collect::<Result<Vec<_>, _>>()?;
3790 match this.opened_buffers.entry(buffer_id) {
3791 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3792 OpenBuffer::Strong(buffer) => {
3793 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3794 }
3795 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3796 OpenBuffer::Weak(_) => {}
3797 },
3798 hash_map::Entry::Vacant(e) => {
3799 e.insert(OpenBuffer::Loading(ops));
3800 }
3801 }
3802 Ok(())
3803 })
3804 }
3805
3806 async fn handle_update_buffer_file(
3807 this: ModelHandle<Self>,
3808 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3809 _: Arc<Client>,
3810 mut cx: AsyncAppContext,
3811 ) -> Result<()> {
3812 this.update(&mut cx, |this, cx| {
3813 let payload = envelope.payload.clone();
3814 let buffer_id = payload.buffer_id;
3815 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3816 let worktree = this
3817 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3818 .ok_or_else(|| anyhow!("no such worktree"))?;
3819 let file = File::from_proto(file, worktree.clone(), cx)?;
3820 let buffer = this
3821 .opened_buffers
3822 .get_mut(&buffer_id)
3823 .and_then(|b| b.upgrade(cx))
3824 .ok_or_else(|| anyhow!("no such buffer"))?;
3825 buffer.update(cx, |buffer, cx| {
3826 buffer.file_updated(Box::new(file), cx).detach();
3827 });
3828 Ok(())
3829 })
3830 }
3831
3832 async fn handle_save_buffer(
3833 this: ModelHandle<Self>,
3834 envelope: TypedEnvelope<proto::SaveBuffer>,
3835 _: Arc<Client>,
3836 mut cx: AsyncAppContext,
3837 ) -> Result<proto::BufferSaved> {
3838 let buffer_id = envelope.payload.buffer_id;
3839 let requested_version = deserialize_version(envelope.payload.version);
3840
3841 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3842 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3843 let buffer = this
3844 .opened_buffers
3845 .get(&buffer_id)
3846 .map(|buffer| buffer.upgrade(cx).unwrap())
3847 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3848 Ok::<_, anyhow::Error>((project_id, buffer))
3849 })?;
3850 buffer
3851 .update(&mut cx, |buffer, _| {
3852 buffer.wait_for_version(requested_version)
3853 })
3854 .await;
3855
3856 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3857 Ok(proto::BufferSaved {
3858 project_id,
3859 buffer_id,
3860 version: serialize_version(&saved_version),
3861 mtime: Some(mtime.into()),
3862 })
3863 }
3864
3865 async fn handle_reload_buffers(
3866 this: ModelHandle<Self>,
3867 envelope: TypedEnvelope<proto::ReloadBuffers>,
3868 _: Arc<Client>,
3869 mut cx: AsyncAppContext,
3870 ) -> Result<proto::ReloadBuffersResponse> {
3871 let sender_id = envelope.original_sender_id()?;
3872 let reload = this.update(&mut cx, |this, cx| {
3873 let mut buffers = HashSet::default();
3874 for buffer_id in &envelope.payload.buffer_ids {
3875 buffers.insert(
3876 this.opened_buffers
3877 .get(buffer_id)
3878 .map(|buffer| buffer.upgrade(cx).unwrap())
3879 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3880 );
3881 }
3882 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3883 })?;
3884
3885 let project_transaction = reload.await?;
3886 let project_transaction = this.update(&mut cx, |this, cx| {
3887 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3888 });
3889 Ok(proto::ReloadBuffersResponse {
3890 transaction: Some(project_transaction),
3891 })
3892 }
3893
3894 async fn handle_format_buffers(
3895 this: ModelHandle<Self>,
3896 envelope: TypedEnvelope<proto::FormatBuffers>,
3897 _: Arc<Client>,
3898 mut cx: AsyncAppContext,
3899 ) -> Result<proto::FormatBuffersResponse> {
3900 let sender_id = envelope.original_sender_id()?;
3901 let format = this.update(&mut cx, |this, cx| {
3902 let mut buffers = HashSet::default();
3903 for buffer_id in &envelope.payload.buffer_ids {
3904 buffers.insert(
3905 this.opened_buffers
3906 .get(buffer_id)
3907 .map(|buffer| buffer.upgrade(cx).unwrap())
3908 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3909 );
3910 }
3911 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3912 })?;
3913
3914 let project_transaction = format.await?;
3915 let project_transaction = this.update(&mut cx, |this, cx| {
3916 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3917 });
3918 Ok(proto::FormatBuffersResponse {
3919 transaction: Some(project_transaction),
3920 })
3921 }
3922
3923 async fn handle_get_completions(
3924 this: ModelHandle<Self>,
3925 envelope: TypedEnvelope<proto::GetCompletions>,
3926 _: Arc<Client>,
3927 mut cx: AsyncAppContext,
3928 ) -> Result<proto::GetCompletionsResponse> {
3929 let position = envelope
3930 .payload
3931 .position
3932 .and_then(language::proto::deserialize_anchor)
3933 .ok_or_else(|| anyhow!("invalid position"))?;
3934 let version = deserialize_version(envelope.payload.version);
3935 let buffer = this.read_with(&cx, |this, cx| {
3936 this.opened_buffers
3937 .get(&envelope.payload.buffer_id)
3938 .map(|buffer| buffer.upgrade(cx).unwrap())
3939 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3940 })?;
3941 buffer
3942 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3943 .await;
3944 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3945 let completions = this
3946 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3947 .await?;
3948
3949 Ok(proto::GetCompletionsResponse {
3950 completions: completions
3951 .iter()
3952 .map(language::proto::serialize_completion)
3953 .collect(),
3954 version: serialize_version(&version),
3955 })
3956 }
3957
3958 async fn handle_apply_additional_edits_for_completion(
3959 this: ModelHandle<Self>,
3960 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3961 _: Arc<Client>,
3962 mut cx: AsyncAppContext,
3963 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3964 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3965 let buffer = this
3966 .opened_buffers
3967 .get(&envelope.payload.buffer_id)
3968 .map(|buffer| buffer.upgrade(cx).unwrap())
3969 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3970 let language = buffer.read(cx).language();
3971 let completion = language::proto::deserialize_completion(
3972 envelope
3973 .payload
3974 .completion
3975 .ok_or_else(|| anyhow!("invalid completion"))?,
3976 language,
3977 )?;
3978 Ok::<_, anyhow::Error>(
3979 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3980 )
3981 })?;
3982
3983 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3984 transaction: apply_additional_edits
3985 .await?
3986 .as_ref()
3987 .map(language::proto::serialize_transaction),
3988 })
3989 }
3990
3991 async fn handle_get_code_actions(
3992 this: ModelHandle<Self>,
3993 envelope: TypedEnvelope<proto::GetCodeActions>,
3994 _: Arc<Client>,
3995 mut cx: AsyncAppContext,
3996 ) -> Result<proto::GetCodeActionsResponse> {
3997 let start = envelope
3998 .payload
3999 .start
4000 .and_then(language::proto::deserialize_anchor)
4001 .ok_or_else(|| anyhow!("invalid start"))?;
4002 let end = envelope
4003 .payload
4004 .end
4005 .and_then(language::proto::deserialize_anchor)
4006 .ok_or_else(|| anyhow!("invalid end"))?;
4007 let buffer = this.update(&mut cx, |this, cx| {
4008 this.opened_buffers
4009 .get(&envelope.payload.buffer_id)
4010 .map(|buffer| buffer.upgrade(cx).unwrap())
4011 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4012 })?;
4013 buffer
4014 .update(&mut cx, |buffer, _| {
4015 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4016 })
4017 .await;
4018
4019 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4020 let code_actions = this.update(&mut cx, |this, cx| {
4021 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4022 })?;
4023
4024 Ok(proto::GetCodeActionsResponse {
4025 actions: code_actions
4026 .await?
4027 .iter()
4028 .map(language::proto::serialize_code_action)
4029 .collect(),
4030 version: serialize_version(&version),
4031 })
4032 }
4033
4034 async fn handle_apply_code_action(
4035 this: ModelHandle<Self>,
4036 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4037 _: Arc<Client>,
4038 mut cx: AsyncAppContext,
4039 ) -> Result<proto::ApplyCodeActionResponse> {
4040 let sender_id = envelope.original_sender_id()?;
4041 let action = language::proto::deserialize_code_action(
4042 envelope
4043 .payload
4044 .action
4045 .ok_or_else(|| anyhow!("invalid action"))?,
4046 )?;
4047 let apply_code_action = this.update(&mut cx, |this, cx| {
4048 let buffer = this
4049 .opened_buffers
4050 .get(&envelope.payload.buffer_id)
4051 .map(|buffer| buffer.upgrade(cx).unwrap())
4052 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4053 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4054 })?;
4055
4056 let project_transaction = apply_code_action.await?;
4057 let project_transaction = this.update(&mut cx, |this, cx| {
4058 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4059 });
4060 Ok(proto::ApplyCodeActionResponse {
4061 transaction: Some(project_transaction),
4062 })
4063 }
4064
4065 async fn handle_lsp_command<T: LspCommand>(
4066 this: ModelHandle<Self>,
4067 envelope: TypedEnvelope<T::ProtoRequest>,
4068 _: Arc<Client>,
4069 mut cx: AsyncAppContext,
4070 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4071 where
4072 <T::LspRequest as lsp::request::Request>::Result: Send,
4073 {
4074 let sender_id = envelope.original_sender_id()?;
4075 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4076 let buffer_handle = this.read_with(&cx, |this, _| {
4077 this.opened_buffers
4078 .get(&buffer_id)
4079 .and_then(|buffer| buffer.upgrade(&cx))
4080 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4081 })?;
4082 let request = T::from_proto(
4083 envelope.payload,
4084 this.clone(),
4085 buffer_handle.clone(),
4086 cx.clone(),
4087 )
4088 .await?;
4089 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4090 let response = this
4091 .update(&mut cx, |this, cx| {
4092 this.request_lsp(buffer_handle, request, cx)
4093 })
4094 .await?;
4095 this.update(&mut cx, |this, cx| {
4096 Ok(T::response_to_proto(
4097 response,
4098 this,
4099 sender_id,
4100 &buffer_version,
4101 cx,
4102 ))
4103 })
4104 }
4105
4106 async fn handle_get_project_symbols(
4107 this: ModelHandle<Self>,
4108 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4109 _: Arc<Client>,
4110 mut cx: AsyncAppContext,
4111 ) -> Result<proto::GetProjectSymbolsResponse> {
4112 let symbols = this
4113 .update(&mut cx, |this, cx| {
4114 this.symbols(&envelope.payload.query, cx)
4115 })
4116 .await?;
4117
4118 Ok(proto::GetProjectSymbolsResponse {
4119 symbols: symbols.iter().map(serialize_symbol).collect(),
4120 })
4121 }
4122
4123 async fn handle_search_project(
4124 this: ModelHandle<Self>,
4125 envelope: TypedEnvelope<proto::SearchProject>,
4126 _: Arc<Client>,
4127 mut cx: AsyncAppContext,
4128 ) -> Result<proto::SearchProjectResponse> {
4129 let peer_id = envelope.original_sender_id()?;
4130 let query = SearchQuery::from_proto(envelope.payload)?;
4131 let result = this
4132 .update(&mut cx, |this, cx| this.search(query, cx))
4133 .await?;
4134
4135 this.update(&mut cx, |this, cx| {
4136 let mut locations = Vec::new();
4137 for (buffer, ranges) in result {
4138 for range in ranges {
4139 let start = serialize_anchor(&range.start);
4140 let end = serialize_anchor(&range.end);
4141 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4142 locations.push(proto::Location {
4143 buffer: Some(buffer),
4144 start: Some(start),
4145 end: Some(end),
4146 });
4147 }
4148 }
4149 Ok(proto::SearchProjectResponse { locations })
4150 })
4151 }
4152
4153 async fn handle_open_buffer_for_symbol(
4154 this: ModelHandle<Self>,
4155 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4156 _: Arc<Client>,
4157 mut cx: AsyncAppContext,
4158 ) -> Result<proto::OpenBufferForSymbolResponse> {
4159 let peer_id = envelope.original_sender_id()?;
4160 let symbol = envelope
4161 .payload
4162 .symbol
4163 .ok_or_else(|| anyhow!("invalid symbol"))?;
4164 let symbol = this.read_with(&cx, |this, _| {
4165 let symbol = this.deserialize_symbol(symbol)?;
4166 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4167 if signature == symbol.signature {
4168 Ok(symbol)
4169 } else {
4170 Err(anyhow!("invalid symbol signature"))
4171 }
4172 })?;
4173 let buffer = this
4174 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4175 .await?;
4176
4177 Ok(proto::OpenBufferForSymbolResponse {
4178 buffer: Some(this.update(&mut cx, |this, cx| {
4179 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4180 })),
4181 })
4182 }
4183
4184 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4185 let mut hasher = Sha256::new();
4186 hasher.update(worktree_id.to_proto().to_be_bytes());
4187 hasher.update(path.to_string_lossy().as_bytes());
4188 hasher.update(self.nonce.to_be_bytes());
4189 hasher.finalize().as_slice().try_into().unwrap()
4190 }
4191
4192 async fn handle_open_buffer_by_id(
4193 this: ModelHandle<Self>,
4194 envelope: TypedEnvelope<proto::OpenBufferById>,
4195 _: Arc<Client>,
4196 mut cx: AsyncAppContext,
4197 ) -> Result<proto::OpenBufferResponse> {
4198 let peer_id = envelope.original_sender_id()?;
4199 let buffer = this
4200 .update(&mut cx, |this, cx| {
4201 this.open_buffer_by_id(envelope.payload.id, cx)
4202 })
4203 .await?;
4204 this.update(&mut cx, |this, cx| {
4205 Ok(proto::OpenBufferResponse {
4206 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4207 })
4208 })
4209 }
4210
4211 async fn handle_open_buffer_by_path(
4212 this: ModelHandle<Self>,
4213 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4214 _: Arc<Client>,
4215 mut cx: AsyncAppContext,
4216 ) -> Result<proto::OpenBufferResponse> {
4217 let peer_id = envelope.original_sender_id()?;
4218 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4219 let open_buffer = this.update(&mut cx, |this, cx| {
4220 this.open_buffer(
4221 ProjectPath {
4222 worktree_id,
4223 path: PathBuf::from(envelope.payload.path).into(),
4224 },
4225 cx,
4226 )
4227 });
4228
4229 let buffer = open_buffer.await?;
4230 this.update(&mut cx, |this, cx| {
4231 Ok(proto::OpenBufferResponse {
4232 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4233 })
4234 })
4235 }
4236
4237 fn serialize_project_transaction_for_peer(
4238 &mut self,
4239 project_transaction: ProjectTransaction,
4240 peer_id: PeerId,
4241 cx: &AppContext,
4242 ) -> proto::ProjectTransaction {
4243 let mut serialized_transaction = proto::ProjectTransaction {
4244 buffers: Default::default(),
4245 transactions: Default::default(),
4246 };
4247 for (buffer, transaction) in project_transaction.0 {
4248 serialized_transaction
4249 .buffers
4250 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4251 serialized_transaction
4252 .transactions
4253 .push(language::proto::serialize_transaction(&transaction));
4254 }
4255 serialized_transaction
4256 }
4257
4258 fn deserialize_project_transaction(
4259 &mut self,
4260 message: proto::ProjectTransaction,
4261 push_to_history: bool,
4262 cx: &mut ModelContext<Self>,
4263 ) -> Task<Result<ProjectTransaction>> {
4264 cx.spawn(|this, mut cx| async move {
4265 let mut project_transaction = ProjectTransaction::default();
4266 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4267 let buffer = this
4268 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4269 .await?;
4270 let transaction = language::proto::deserialize_transaction(transaction)?;
4271 project_transaction.0.insert(buffer, transaction);
4272 }
4273
4274 for (buffer, transaction) in &project_transaction.0 {
4275 buffer
4276 .update(&mut cx, |buffer, _| {
4277 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4278 })
4279 .await;
4280
4281 if push_to_history {
4282 buffer.update(&mut cx, |buffer, _| {
4283 buffer.push_transaction(transaction.clone(), Instant::now());
4284 });
4285 }
4286 }
4287
4288 Ok(project_transaction)
4289 })
4290 }
4291
4292 fn serialize_buffer_for_peer(
4293 &mut self,
4294 buffer: &ModelHandle<Buffer>,
4295 peer_id: PeerId,
4296 cx: &AppContext,
4297 ) -> proto::Buffer {
4298 let buffer_id = buffer.read(cx).remote_id();
4299 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4300 if shared_buffers.insert(buffer_id) {
4301 proto::Buffer {
4302 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4303 }
4304 } else {
4305 proto::Buffer {
4306 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4307 }
4308 }
4309 }
4310
4311 fn deserialize_buffer(
4312 &mut self,
4313 buffer: proto::Buffer,
4314 cx: &mut ModelContext<Self>,
4315 ) -> Task<Result<ModelHandle<Buffer>>> {
4316 let replica_id = self.replica_id();
4317
4318 let opened_buffer_tx = self.opened_buffer.0.clone();
4319 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4320 cx.spawn(|this, mut cx| async move {
4321 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4322 proto::buffer::Variant::Id(id) => {
4323 let buffer = loop {
4324 let buffer = this.read_with(&cx, |this, cx| {
4325 this.opened_buffers
4326 .get(&id)
4327 .and_then(|buffer| buffer.upgrade(cx))
4328 });
4329 if let Some(buffer) = buffer {
4330 break buffer;
4331 }
4332 opened_buffer_rx
4333 .next()
4334 .await
4335 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4336 };
4337 Ok(buffer)
4338 }
4339 proto::buffer::Variant::State(mut buffer) => {
4340 let mut buffer_worktree = None;
4341 let mut buffer_file = None;
4342 if let Some(file) = buffer.file.take() {
4343 this.read_with(&cx, |this, cx| {
4344 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4345 let worktree =
4346 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4347 anyhow!("no worktree found for id {}", file.worktree_id)
4348 })?;
4349 buffer_file =
4350 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4351 as Box<dyn language::File>);
4352 buffer_worktree = Some(worktree);
4353 Ok::<_, anyhow::Error>(())
4354 })?;
4355 }
4356
4357 let buffer = cx.add_model(|cx| {
4358 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4359 });
4360
4361 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4362
4363 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4364 Ok(buffer)
4365 }
4366 }
4367 })
4368 }
4369
4370 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4371 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4372 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4373 let start = serialized_symbol
4374 .start
4375 .ok_or_else(|| anyhow!("invalid start"))?;
4376 let end = serialized_symbol
4377 .end
4378 .ok_or_else(|| anyhow!("invalid end"))?;
4379 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4380 let path = PathBuf::from(serialized_symbol.path);
4381 let language = self.languages.select_language(&path);
4382 Ok(Symbol {
4383 source_worktree_id,
4384 worktree_id,
4385 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4386 label: language
4387 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4388 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4389 name: serialized_symbol.name,
4390 path,
4391 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4392 kind,
4393 signature: serialized_symbol
4394 .signature
4395 .try_into()
4396 .map_err(|_| anyhow!("invalid signature"))?,
4397 })
4398 }
4399
4400 async fn handle_buffer_saved(
4401 this: ModelHandle<Self>,
4402 envelope: TypedEnvelope<proto::BufferSaved>,
4403 _: Arc<Client>,
4404 mut cx: AsyncAppContext,
4405 ) -> Result<()> {
4406 let version = deserialize_version(envelope.payload.version);
4407 let mtime = envelope
4408 .payload
4409 .mtime
4410 .ok_or_else(|| anyhow!("missing mtime"))?
4411 .into();
4412
4413 this.update(&mut cx, |this, cx| {
4414 let buffer = this
4415 .opened_buffers
4416 .get(&envelope.payload.buffer_id)
4417 .and_then(|buffer| buffer.upgrade(cx));
4418 if let Some(buffer) = buffer {
4419 buffer.update(cx, |buffer, cx| {
4420 buffer.did_save(version, mtime, None, cx);
4421 });
4422 }
4423 Ok(())
4424 })
4425 }
4426
4427 async fn handle_buffer_reloaded(
4428 this: ModelHandle<Self>,
4429 envelope: TypedEnvelope<proto::BufferReloaded>,
4430 _: Arc<Client>,
4431 mut cx: AsyncAppContext,
4432 ) -> Result<()> {
4433 let payload = envelope.payload.clone();
4434 let version = deserialize_version(payload.version);
4435 let mtime = payload
4436 .mtime
4437 .ok_or_else(|| anyhow!("missing mtime"))?
4438 .into();
4439 this.update(&mut cx, |this, cx| {
4440 let buffer = this
4441 .opened_buffers
4442 .get(&payload.buffer_id)
4443 .and_then(|buffer| buffer.upgrade(cx));
4444 if let Some(buffer) = buffer {
4445 buffer.update(cx, |buffer, cx| {
4446 buffer.did_reload(version, mtime, cx);
4447 });
4448 }
4449 Ok(())
4450 })
4451 }
4452
4453 pub fn match_paths<'a>(
4454 &self,
4455 query: &'a str,
4456 include_ignored: bool,
4457 smart_case: bool,
4458 max_results: usize,
4459 cancel_flag: &'a AtomicBool,
4460 cx: &AppContext,
4461 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4462 let worktrees = self
4463 .worktrees(cx)
4464 .filter(|worktree| worktree.read(cx).is_visible())
4465 .collect::<Vec<_>>();
4466 let include_root_name = worktrees.len() > 1;
4467 let candidate_sets = worktrees
4468 .into_iter()
4469 .map(|worktree| CandidateSet {
4470 snapshot: worktree.read(cx).snapshot(),
4471 include_ignored,
4472 include_root_name,
4473 })
4474 .collect::<Vec<_>>();
4475
4476 let background = cx.background().clone();
4477 async move {
4478 fuzzy::match_paths(
4479 candidate_sets.as_slice(),
4480 query,
4481 smart_case,
4482 max_results,
4483 cancel_flag,
4484 background,
4485 )
4486 .await
4487 }
4488 }
4489
4490 fn edits_from_lsp(
4491 &mut self,
4492 buffer: &ModelHandle<Buffer>,
4493 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4494 version: Option<i32>,
4495 cx: &mut ModelContext<Self>,
4496 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4497 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4498 cx.background().spawn(async move {
4499 let snapshot = snapshot?;
4500 let mut lsp_edits = lsp_edits
4501 .into_iter()
4502 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4503 .peekable();
4504
4505 let mut edits = Vec::new();
4506 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4507 // Combine any LSP edits that are adjacent.
4508 //
4509 // Also, combine LSP edits that are separated from each other by only
4510 // a newline. This is important because for some code actions,
4511 // Rust-analyzer rewrites the entire buffer via a series of edits that
4512 // are separated by unchanged newline characters.
4513 //
4514 // In order for the diffing logic below to work properly, any edits that
4515 // cancel each other out must be combined into one.
4516 while let Some((next_range, next_text)) = lsp_edits.peek() {
4517 if next_range.start > range.end {
4518 if next_range.start.row > range.end.row + 1
4519 || next_range.start.column > 0
4520 || snapshot.clip_point_utf16(
4521 PointUtf16::new(range.end.row, u32::MAX),
4522 Bias::Left,
4523 ) > range.end
4524 {
4525 break;
4526 }
4527 new_text.push('\n');
4528 }
4529 range.end = next_range.end;
4530 new_text.push_str(&next_text);
4531 lsp_edits.next();
4532 }
4533
4534 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4535 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4536 {
4537 return Err(anyhow!("invalid edits received from language server"));
4538 }
4539
4540 // For multiline edits, perform a diff of the old and new text so that
4541 // we can identify the changes more precisely, preserving the locations
4542 // of any anchors positioned in the unchanged regions.
4543 if range.end.row > range.start.row {
4544 let mut offset = range.start.to_offset(&snapshot);
4545 let old_text = snapshot.text_for_range(range).collect::<String>();
4546
4547 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4548 let mut moved_since_edit = true;
4549 for change in diff.iter_all_changes() {
4550 let tag = change.tag();
4551 let value = change.value();
4552 match tag {
4553 ChangeTag::Equal => {
4554 offset += value.len();
4555 moved_since_edit = true;
4556 }
4557 ChangeTag::Delete => {
4558 let start = snapshot.anchor_after(offset);
4559 let end = snapshot.anchor_before(offset + value.len());
4560 if moved_since_edit {
4561 edits.push((start..end, String::new()));
4562 } else {
4563 edits.last_mut().unwrap().0.end = end;
4564 }
4565 offset += value.len();
4566 moved_since_edit = false;
4567 }
4568 ChangeTag::Insert => {
4569 if moved_since_edit {
4570 let anchor = snapshot.anchor_after(offset);
4571 edits.push((anchor.clone()..anchor, value.to_string()));
4572 } else {
4573 edits.last_mut().unwrap().1.push_str(value);
4574 }
4575 moved_since_edit = false;
4576 }
4577 }
4578 }
4579 } else if range.end == range.start {
4580 let anchor = snapshot.anchor_after(range.start);
4581 edits.push((anchor.clone()..anchor, new_text));
4582 } else {
4583 let edit_start = snapshot.anchor_after(range.start);
4584 let edit_end = snapshot.anchor_before(range.end);
4585 edits.push((edit_start..edit_end, new_text));
4586 }
4587 }
4588
4589 Ok(edits)
4590 })
4591 }
4592
4593 fn buffer_snapshot_for_lsp_version(
4594 &mut self,
4595 buffer: &ModelHandle<Buffer>,
4596 version: Option<i32>,
4597 cx: &AppContext,
4598 ) -> Result<TextBufferSnapshot> {
4599 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4600
4601 if let Some(version) = version {
4602 let buffer_id = buffer.read(cx).remote_id();
4603 let snapshots = self
4604 .buffer_snapshots
4605 .get_mut(&buffer_id)
4606 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4607 let mut found_snapshot = None;
4608 snapshots.retain(|(snapshot_version, snapshot)| {
4609 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4610 false
4611 } else {
4612 if *snapshot_version == version {
4613 found_snapshot = Some(snapshot.clone());
4614 }
4615 true
4616 }
4617 });
4618
4619 found_snapshot.ok_or_else(|| {
4620 anyhow!(
4621 "snapshot not found for buffer {} at version {}",
4622 buffer_id,
4623 version
4624 )
4625 })
4626 } else {
4627 Ok((buffer.read(cx)).text_snapshot())
4628 }
4629 }
4630
4631 fn language_server_for_buffer(
4632 &self,
4633 buffer: &Buffer,
4634 cx: &AppContext,
4635 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4636 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4637 let worktree_id = file.worktree_id(cx);
4638 self.language_servers
4639 .get(&(worktree_id, language.lsp_adapter()?.name()))
4640 } else {
4641 None
4642 }
4643 }
4644}
4645
4646impl WorktreeHandle {
4647 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4648 match self {
4649 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4650 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4651 }
4652 }
4653}
4654
4655impl OpenBuffer {
4656 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4657 match self {
4658 OpenBuffer::Strong(handle) => Some(handle.clone()),
4659 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4660 OpenBuffer::Loading(_) => None,
4661 }
4662 }
4663}
4664
4665struct CandidateSet {
4666 snapshot: Snapshot,
4667 include_ignored: bool,
4668 include_root_name: bool,
4669}
4670
4671impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4672 type Candidates = CandidateSetIter<'a>;
4673
4674 fn id(&self) -> usize {
4675 self.snapshot.id().to_usize()
4676 }
4677
4678 fn len(&self) -> usize {
4679 if self.include_ignored {
4680 self.snapshot.file_count()
4681 } else {
4682 self.snapshot.visible_file_count()
4683 }
4684 }
4685
4686 fn prefix(&self) -> Arc<str> {
4687 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4688 self.snapshot.root_name().into()
4689 } else if self.include_root_name {
4690 format!("{}/", self.snapshot.root_name()).into()
4691 } else {
4692 "".into()
4693 }
4694 }
4695
4696 fn candidates(&'a self, start: usize) -> Self::Candidates {
4697 CandidateSetIter {
4698 traversal: self.snapshot.files(self.include_ignored, start),
4699 }
4700 }
4701}
4702
4703struct CandidateSetIter<'a> {
4704 traversal: Traversal<'a>,
4705}
4706
4707impl<'a> Iterator for CandidateSetIter<'a> {
4708 type Item = PathMatchCandidate<'a>;
4709
4710 fn next(&mut self) -> Option<Self::Item> {
4711 self.traversal.next().map(|entry| {
4712 if let EntryKind::File(char_bag) = entry.kind {
4713 PathMatchCandidate {
4714 path: &entry.path,
4715 char_bag,
4716 }
4717 } else {
4718 unreachable!()
4719 }
4720 })
4721 }
4722}
4723
4724impl Entity for Project {
4725 type Event = Event;
4726
4727 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4728 match &self.client_state {
4729 ProjectClientState::Local { remote_id_rx, .. } => {
4730 if let Some(project_id) = *remote_id_rx.borrow() {
4731 self.client
4732 .send(proto::UnregisterProject { project_id })
4733 .log_err();
4734 }
4735 }
4736 ProjectClientState::Remote { remote_id, .. } => {
4737 self.client
4738 .send(proto::LeaveProject {
4739 project_id: *remote_id,
4740 })
4741 .log_err();
4742 }
4743 }
4744 }
4745
4746 fn app_will_quit(
4747 &mut self,
4748 _: &mut MutableAppContext,
4749 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4750 let shutdown_futures = self
4751 .language_servers
4752 .drain()
4753 .filter_map(|(_, (_, server))| server.shutdown())
4754 .collect::<Vec<_>>();
4755 Some(
4756 async move {
4757 futures::future::join_all(shutdown_futures).await;
4758 }
4759 .boxed(),
4760 )
4761 }
4762}
4763
4764impl Collaborator {
4765 fn from_proto(
4766 message: proto::Collaborator,
4767 user_store: &ModelHandle<UserStore>,
4768 cx: &mut AsyncAppContext,
4769 ) -> impl Future<Output = Result<Self>> {
4770 let user = user_store.update(cx, |user_store, cx| {
4771 user_store.fetch_user(message.user_id, cx)
4772 });
4773
4774 async move {
4775 Ok(Self {
4776 peer_id: PeerId(message.peer_id),
4777 user: user.await?,
4778 replica_id: message.replica_id as ReplicaId,
4779 })
4780 }
4781 }
4782}
4783
4784impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4785 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4786 Self {
4787 worktree_id,
4788 path: path.as_ref().into(),
4789 }
4790 }
4791}
4792
4793impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4794 fn from(options: lsp::CreateFileOptions) -> Self {
4795 Self {
4796 overwrite: options.overwrite.unwrap_or(false),
4797 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4798 }
4799 }
4800}
4801
4802impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4803 fn from(options: lsp::RenameFileOptions) -> Self {
4804 Self {
4805 overwrite: options.overwrite.unwrap_or(false),
4806 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4807 }
4808 }
4809}
4810
4811impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4812 fn from(options: lsp::DeleteFileOptions) -> Self {
4813 Self {
4814 recursive: options.recursive.unwrap_or(false),
4815 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4816 }
4817 }
4818}
4819
4820fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4821 proto::Symbol {
4822 source_worktree_id: symbol.source_worktree_id.to_proto(),
4823 worktree_id: symbol.worktree_id.to_proto(),
4824 language_server_name: symbol.language_server_name.0.to_string(),
4825 name: symbol.name.clone(),
4826 kind: unsafe { mem::transmute(symbol.kind) },
4827 path: symbol.path.to_string_lossy().to_string(),
4828 start: Some(proto::Point {
4829 row: symbol.range.start.row,
4830 column: symbol.range.start.column,
4831 }),
4832 end: Some(proto::Point {
4833 row: symbol.range.end.row,
4834 column: symbol.range.end.column,
4835 }),
4836 signature: symbol.signature.to_vec(),
4837 }
4838}
4839
4840fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4841 let mut path_components = path.components();
4842 let mut base_components = base.components();
4843 let mut components: Vec<Component> = Vec::new();
4844 loop {
4845 match (path_components.next(), base_components.next()) {
4846 (None, None) => break,
4847 (Some(a), None) => {
4848 components.push(a);
4849 components.extend(path_components.by_ref());
4850 break;
4851 }
4852 (None, _) => components.push(Component::ParentDir),
4853 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4854 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4855 (Some(a), Some(_)) => {
4856 components.push(Component::ParentDir);
4857 for _ in base_components {
4858 components.push(Component::ParentDir);
4859 }
4860 components.push(a);
4861 components.extend(path_components.by_ref());
4862 break;
4863 }
4864 }
4865 }
4866 components.iter().map(|c| c.as_os_str()).collect()
4867}
4868
4869impl Item for Buffer {
4870 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4871 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4872 }
4873}
4874
4875#[cfg(test)]
4876mod tests {
4877 use super::{Event, *};
4878 use fs::RealFs;
4879 use futures::{future, StreamExt};
4880 use gpui::test::subscribe;
4881 use language::{
4882 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4883 ToPoint,
4884 };
4885 use lsp::Url;
4886 use serde_json::json;
4887 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4888 use unindent::Unindent as _;
4889 use util::{assert_set_eq, test::temp_tree};
4890 use worktree::WorktreeHandle as _;
4891
4892 #[gpui::test]
4893 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4894 let dir = temp_tree(json!({
4895 "root": {
4896 "apple": "",
4897 "banana": {
4898 "carrot": {
4899 "date": "",
4900 "endive": "",
4901 }
4902 },
4903 "fennel": {
4904 "grape": "",
4905 }
4906 }
4907 }));
4908
4909 let root_link_path = dir.path().join("root_link");
4910 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4911 unix::fs::symlink(
4912 &dir.path().join("root/fennel"),
4913 &dir.path().join("root/finnochio"),
4914 )
4915 .unwrap();
4916
4917 let project = Project::test(Arc::new(RealFs), cx);
4918
4919 let (tree, _) = project
4920 .update(cx, |project, cx| {
4921 project.find_or_create_local_worktree(&root_link_path, true, cx)
4922 })
4923 .await
4924 .unwrap();
4925
4926 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4927 .await;
4928 cx.read(|cx| {
4929 let tree = tree.read(cx);
4930 assert_eq!(tree.file_count(), 5);
4931 assert_eq!(
4932 tree.inode_for_path("fennel/grape"),
4933 tree.inode_for_path("finnochio/grape")
4934 );
4935 });
4936
4937 let cancel_flag = Default::default();
4938 let results = project
4939 .read_with(cx, |project, cx| {
4940 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4941 })
4942 .await;
4943 assert_eq!(
4944 results
4945 .into_iter()
4946 .map(|result| result.path)
4947 .collect::<Vec<Arc<Path>>>(),
4948 vec![
4949 PathBuf::from("banana/carrot/date").into(),
4950 PathBuf::from("banana/carrot/endive").into(),
4951 ]
4952 );
4953 }
4954
4955 #[gpui::test]
4956 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4957 cx.foreground().forbid_parking();
4958
4959 let mut rust_language = Language::new(
4960 LanguageConfig {
4961 name: "Rust".into(),
4962 path_suffixes: vec!["rs".to_string()],
4963 ..Default::default()
4964 },
4965 Some(tree_sitter_rust::language()),
4966 );
4967 let mut json_language = Language::new(
4968 LanguageConfig {
4969 name: "JSON".into(),
4970 path_suffixes: vec!["json".to_string()],
4971 ..Default::default()
4972 },
4973 None,
4974 );
4975 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4976 name: "the-rust-language-server",
4977 capabilities: lsp::ServerCapabilities {
4978 completion_provider: Some(lsp::CompletionOptions {
4979 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4980 ..Default::default()
4981 }),
4982 ..Default::default()
4983 },
4984 ..Default::default()
4985 });
4986 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4987 name: "the-json-language-server",
4988 capabilities: lsp::ServerCapabilities {
4989 completion_provider: Some(lsp::CompletionOptions {
4990 trigger_characters: Some(vec![":".to_string()]),
4991 ..Default::default()
4992 }),
4993 ..Default::default()
4994 },
4995 ..Default::default()
4996 });
4997
4998 let fs = FakeFs::new(cx.background());
4999 fs.insert_tree(
5000 "/the-root",
5001 json!({
5002 "test.rs": "const A: i32 = 1;",
5003 "test2.rs": "",
5004 "Cargo.toml": "a = 1",
5005 "package.json": "{\"a\": 1}",
5006 }),
5007 )
5008 .await;
5009
5010 let project = Project::test(fs.clone(), cx);
5011 project.update(cx, |project, _| {
5012 project.languages.add(Arc::new(rust_language));
5013 project.languages.add(Arc::new(json_language));
5014 });
5015
5016 let worktree_id = project
5017 .update(cx, |project, cx| {
5018 project.find_or_create_local_worktree("/the-root", true, cx)
5019 })
5020 .await
5021 .unwrap()
5022 .0
5023 .read_with(cx, |tree, _| tree.id());
5024
5025 // Open a buffer without an associated language server.
5026 let toml_buffer = project
5027 .update(cx, |project, cx| {
5028 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5029 })
5030 .await
5031 .unwrap();
5032
5033 // Open a buffer with an associated language server.
5034 let rust_buffer = project
5035 .update(cx, |project, cx| {
5036 project.open_buffer((worktree_id, "test.rs"), cx)
5037 })
5038 .await
5039 .unwrap();
5040
5041 // A server is started up, and it is notified about Rust files.
5042 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5043 assert_eq!(
5044 fake_rust_server
5045 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5046 .await
5047 .text_document,
5048 lsp::TextDocumentItem {
5049 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5050 version: 0,
5051 text: "const A: i32 = 1;".to_string(),
5052 language_id: Default::default()
5053 }
5054 );
5055
5056 // The buffer is configured based on the language server's capabilities.
5057 rust_buffer.read_with(cx, |buffer, _| {
5058 assert_eq!(
5059 buffer.completion_triggers(),
5060 &[".".to_string(), "::".to_string()]
5061 );
5062 });
5063 toml_buffer.read_with(cx, |buffer, _| {
5064 assert!(buffer.completion_triggers().is_empty());
5065 });
5066
5067 // Edit a buffer. The changes are reported to the language server.
5068 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5069 assert_eq!(
5070 fake_rust_server
5071 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5072 .await
5073 .text_document,
5074 lsp::VersionedTextDocumentIdentifier::new(
5075 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5076 1
5077 )
5078 );
5079
5080 // Open a third buffer with a different associated language server.
5081 let json_buffer = project
5082 .update(cx, |project, cx| {
5083 project.open_buffer((worktree_id, "package.json"), cx)
5084 })
5085 .await
5086 .unwrap();
5087
5088 // A json language server is started up and is only notified about the json buffer.
5089 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5090 assert_eq!(
5091 fake_json_server
5092 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5093 .await
5094 .text_document,
5095 lsp::TextDocumentItem {
5096 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5097 version: 0,
5098 text: "{\"a\": 1}".to_string(),
5099 language_id: Default::default()
5100 }
5101 );
5102
5103 // This buffer is configured based on the second language server's
5104 // capabilities.
5105 json_buffer.read_with(cx, |buffer, _| {
5106 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5107 });
5108
5109 // When opening another buffer whose language server is already running,
5110 // it is also configured based on the existing language server's capabilities.
5111 let rust_buffer2 = project
5112 .update(cx, |project, cx| {
5113 project.open_buffer((worktree_id, "test2.rs"), cx)
5114 })
5115 .await
5116 .unwrap();
5117 rust_buffer2.read_with(cx, |buffer, _| {
5118 assert_eq!(
5119 buffer.completion_triggers(),
5120 &[".".to_string(), "::".to_string()]
5121 );
5122 });
5123
5124 // Changes are reported only to servers matching the buffer's language.
5125 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5126 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5127 assert_eq!(
5128 fake_rust_server
5129 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5130 .await
5131 .text_document,
5132 lsp::VersionedTextDocumentIdentifier::new(
5133 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5134 1
5135 )
5136 );
5137
5138 // Save notifications are reported to all servers.
5139 toml_buffer
5140 .update(cx, |buffer, cx| buffer.save(cx))
5141 .await
5142 .unwrap();
5143 assert_eq!(
5144 fake_rust_server
5145 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5146 .await
5147 .text_document,
5148 lsp::TextDocumentIdentifier::new(
5149 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5150 )
5151 );
5152 assert_eq!(
5153 fake_json_server
5154 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5155 .await
5156 .text_document,
5157 lsp::TextDocumentIdentifier::new(
5158 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5159 )
5160 );
5161
5162 // Renames are reported only to servers matching the buffer's language.
5163 fs.rename(
5164 Path::new("/the-root/test2.rs"),
5165 Path::new("/the-root/test3.rs"),
5166 Default::default(),
5167 )
5168 .await
5169 .unwrap();
5170 assert_eq!(
5171 fake_rust_server
5172 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5173 .await
5174 .text_document,
5175 lsp::TextDocumentIdentifier::new(
5176 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5177 ),
5178 );
5179 assert_eq!(
5180 fake_rust_server
5181 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5182 .await
5183 .text_document,
5184 lsp::TextDocumentItem {
5185 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5186 version: 0,
5187 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5188 language_id: Default::default()
5189 },
5190 );
5191
5192 // When the rename changes the extension of the file, the buffer gets closed on the old
5193 // language server and gets opened on the new one.
5194 fs.rename(
5195 Path::new("/the-root/test3.rs"),
5196 Path::new("/the-root/test3.json"),
5197 Default::default(),
5198 )
5199 .await
5200 .unwrap();
5201 assert_eq!(
5202 fake_rust_server
5203 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5204 .await
5205 .text_document,
5206 lsp::TextDocumentIdentifier::new(
5207 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5208 ),
5209 );
5210 assert_eq!(
5211 fake_json_server
5212 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5213 .await
5214 .text_document,
5215 lsp::TextDocumentItem {
5216 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5217 version: 0,
5218 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5219 language_id: Default::default()
5220 },
5221 );
5222
5223 // The renamed file's version resets after changing language server.
5224 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5225 assert_eq!(
5226 fake_json_server
5227 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5228 .await
5229 .text_document,
5230 lsp::VersionedTextDocumentIdentifier::new(
5231 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5232 1
5233 )
5234 );
5235
5236 // Restart language servers
5237 project.update(cx, |project, cx| {
5238 project.restart_language_servers_for_buffers(
5239 vec![rust_buffer.clone(), json_buffer.clone()],
5240 cx,
5241 );
5242 });
5243
5244 let mut rust_shutdown_requests = fake_rust_server
5245 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5246 let mut json_shutdown_requests = fake_json_server
5247 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5248 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5249
5250 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5251 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5252
5253 // Ensure rust document is reopened in new rust language server
5254 assert_eq!(
5255 fake_rust_server
5256 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5257 .await
5258 .text_document,
5259 lsp::TextDocumentItem {
5260 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5261 version: 1,
5262 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5263 language_id: Default::default()
5264 }
5265 );
5266
5267 // Ensure json documents are reopened in new json language server
5268 assert_set_eq!(
5269 [
5270 fake_json_server
5271 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5272 .await
5273 .text_document,
5274 fake_json_server
5275 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5276 .await
5277 .text_document,
5278 ],
5279 [
5280 lsp::TextDocumentItem {
5281 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5282 version: 0,
5283 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5284 language_id: Default::default()
5285 },
5286 lsp::TextDocumentItem {
5287 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5288 version: 1,
5289 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5290 language_id: Default::default()
5291 }
5292 ]
5293 );
5294
5295 // Close notifications are reported only to servers matching the buffer's language.
5296 cx.update(|_| drop(json_buffer));
5297 let close_message = lsp::DidCloseTextDocumentParams {
5298 text_document: lsp::TextDocumentIdentifier::new(
5299 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5300 ),
5301 };
5302 assert_eq!(
5303 fake_json_server
5304 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5305 .await,
5306 close_message,
5307 );
5308 }
5309
5310 #[gpui::test]
5311 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5312 cx.foreground().forbid_parking();
5313
5314 let progress_token = "the-progress-token";
5315 let mut language = Language::new(
5316 LanguageConfig {
5317 name: "Rust".into(),
5318 path_suffixes: vec!["rs".to_string()],
5319 ..Default::default()
5320 },
5321 Some(tree_sitter_rust::language()),
5322 );
5323 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5324 disk_based_diagnostics_progress_token: Some(progress_token),
5325 disk_based_diagnostics_sources: &["disk"],
5326 ..Default::default()
5327 });
5328
5329 let fs = FakeFs::new(cx.background());
5330 fs.insert_tree(
5331 "/dir",
5332 json!({
5333 "a.rs": "fn a() { A }",
5334 "b.rs": "const y: i32 = 1",
5335 }),
5336 )
5337 .await;
5338
5339 let project = Project::test(fs, cx);
5340 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5341
5342 let (tree, _) = project
5343 .update(cx, |project, cx| {
5344 project.find_or_create_local_worktree("/dir", true, cx)
5345 })
5346 .await
5347 .unwrap();
5348 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5349
5350 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5351 .await;
5352
5353 // Cause worktree to start the fake language server
5354 let _buffer = project
5355 .update(cx, |project, cx| {
5356 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5357 })
5358 .await
5359 .unwrap();
5360
5361 let mut events = subscribe(&project, cx);
5362
5363 let mut fake_server = fake_servers.next().await.unwrap();
5364 fake_server.start_progress(progress_token).await;
5365 assert_eq!(
5366 events.next().await.unwrap(),
5367 Event::DiskBasedDiagnosticsStarted
5368 );
5369
5370 fake_server.start_progress(progress_token).await;
5371 fake_server.end_progress(progress_token).await;
5372 fake_server.start_progress(progress_token).await;
5373
5374 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5375 lsp::PublishDiagnosticsParams {
5376 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5377 version: None,
5378 diagnostics: vec![lsp::Diagnostic {
5379 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5380 severity: Some(lsp::DiagnosticSeverity::ERROR),
5381 message: "undefined variable 'A'".to_string(),
5382 ..Default::default()
5383 }],
5384 },
5385 );
5386 assert_eq!(
5387 events.next().await.unwrap(),
5388 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5389 );
5390
5391 fake_server.end_progress(progress_token).await;
5392 fake_server.end_progress(progress_token).await;
5393 assert_eq!(
5394 events.next().await.unwrap(),
5395 Event::DiskBasedDiagnosticsUpdated
5396 );
5397 assert_eq!(
5398 events.next().await.unwrap(),
5399 Event::DiskBasedDiagnosticsFinished
5400 );
5401
5402 let buffer = project
5403 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5404 .await
5405 .unwrap();
5406
5407 buffer.read_with(cx, |buffer, _| {
5408 let snapshot = buffer.snapshot();
5409 let diagnostics = snapshot
5410 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5411 .collect::<Vec<_>>();
5412 assert_eq!(
5413 diagnostics,
5414 &[DiagnosticEntry {
5415 range: Point::new(0, 9)..Point::new(0, 10),
5416 diagnostic: Diagnostic {
5417 severity: lsp::DiagnosticSeverity::ERROR,
5418 message: "undefined variable 'A'".to_string(),
5419 group_id: 0,
5420 is_primary: true,
5421 ..Default::default()
5422 }
5423 }]
5424 )
5425 });
5426 }
5427
5428 #[gpui::test]
5429 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5430 cx.foreground().forbid_parking();
5431
5432 let mut language = Language::new(
5433 LanguageConfig {
5434 name: "Rust".into(),
5435 path_suffixes: vec!["rs".to_string()],
5436 ..Default::default()
5437 },
5438 Some(tree_sitter_rust::language()),
5439 );
5440 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5441 disk_based_diagnostics_sources: &["disk"],
5442 ..Default::default()
5443 });
5444
5445 let text = "
5446 fn a() { A }
5447 fn b() { BB }
5448 fn c() { CCC }
5449 "
5450 .unindent();
5451
5452 let fs = FakeFs::new(cx.background());
5453 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5454
5455 let project = Project::test(fs, cx);
5456 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5457
5458 let worktree_id = project
5459 .update(cx, |project, cx| {
5460 project.find_or_create_local_worktree("/dir", true, cx)
5461 })
5462 .await
5463 .unwrap()
5464 .0
5465 .read_with(cx, |tree, _| tree.id());
5466
5467 let buffer = project
5468 .update(cx, |project, cx| {
5469 project.open_buffer((worktree_id, "a.rs"), cx)
5470 })
5471 .await
5472 .unwrap();
5473
5474 let mut fake_server = fake_servers.next().await.unwrap();
5475 let open_notification = fake_server
5476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5477 .await;
5478
5479 // Edit the buffer, moving the content down
5480 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5481 let change_notification_1 = fake_server
5482 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5483 .await;
5484 assert!(
5485 change_notification_1.text_document.version > open_notification.text_document.version
5486 );
5487
5488 // Report some diagnostics for the initial version of the buffer
5489 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5490 lsp::PublishDiagnosticsParams {
5491 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5492 version: Some(open_notification.text_document.version),
5493 diagnostics: vec![
5494 lsp::Diagnostic {
5495 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5496 severity: Some(DiagnosticSeverity::ERROR),
5497 message: "undefined variable 'A'".to_string(),
5498 source: Some("disk".to_string()),
5499 ..Default::default()
5500 },
5501 lsp::Diagnostic {
5502 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5503 severity: Some(DiagnosticSeverity::ERROR),
5504 message: "undefined variable 'BB'".to_string(),
5505 source: Some("disk".to_string()),
5506 ..Default::default()
5507 },
5508 lsp::Diagnostic {
5509 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5510 severity: Some(DiagnosticSeverity::ERROR),
5511 source: Some("disk".to_string()),
5512 message: "undefined variable 'CCC'".to_string(),
5513 ..Default::default()
5514 },
5515 ],
5516 },
5517 );
5518
5519 // The diagnostics have moved down since they were created.
5520 buffer.next_notification(cx).await;
5521 buffer.read_with(cx, |buffer, _| {
5522 assert_eq!(
5523 buffer
5524 .snapshot()
5525 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5526 .collect::<Vec<_>>(),
5527 &[
5528 DiagnosticEntry {
5529 range: Point::new(3, 9)..Point::new(3, 11),
5530 diagnostic: Diagnostic {
5531 severity: DiagnosticSeverity::ERROR,
5532 message: "undefined variable 'BB'".to_string(),
5533 is_disk_based: true,
5534 group_id: 1,
5535 is_primary: true,
5536 ..Default::default()
5537 },
5538 },
5539 DiagnosticEntry {
5540 range: Point::new(4, 9)..Point::new(4, 12),
5541 diagnostic: Diagnostic {
5542 severity: DiagnosticSeverity::ERROR,
5543 message: "undefined variable 'CCC'".to_string(),
5544 is_disk_based: true,
5545 group_id: 2,
5546 is_primary: true,
5547 ..Default::default()
5548 }
5549 }
5550 ]
5551 );
5552 assert_eq!(
5553 chunks_with_diagnostics(buffer, 0..buffer.len()),
5554 [
5555 ("\n\nfn a() { ".to_string(), None),
5556 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5557 (" }\nfn b() { ".to_string(), None),
5558 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5559 (" }\nfn c() { ".to_string(), None),
5560 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5561 (" }\n".to_string(), None),
5562 ]
5563 );
5564 assert_eq!(
5565 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5566 [
5567 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5568 (" }\nfn c() { ".to_string(), None),
5569 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5570 ]
5571 );
5572 });
5573
5574 // Ensure overlapping diagnostics are highlighted correctly.
5575 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5576 lsp::PublishDiagnosticsParams {
5577 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5578 version: Some(open_notification.text_document.version),
5579 diagnostics: vec![
5580 lsp::Diagnostic {
5581 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5582 severity: Some(DiagnosticSeverity::ERROR),
5583 message: "undefined variable 'A'".to_string(),
5584 source: Some("disk".to_string()),
5585 ..Default::default()
5586 },
5587 lsp::Diagnostic {
5588 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5589 severity: Some(DiagnosticSeverity::WARNING),
5590 message: "unreachable statement".to_string(),
5591 source: Some("disk".to_string()),
5592 ..Default::default()
5593 },
5594 ],
5595 },
5596 );
5597
5598 buffer.next_notification(cx).await;
5599 buffer.read_with(cx, |buffer, _| {
5600 assert_eq!(
5601 buffer
5602 .snapshot()
5603 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5604 .collect::<Vec<_>>(),
5605 &[
5606 DiagnosticEntry {
5607 range: Point::new(2, 9)..Point::new(2, 12),
5608 diagnostic: Diagnostic {
5609 severity: DiagnosticSeverity::WARNING,
5610 message: "unreachable statement".to_string(),
5611 is_disk_based: true,
5612 group_id: 1,
5613 is_primary: true,
5614 ..Default::default()
5615 }
5616 },
5617 DiagnosticEntry {
5618 range: Point::new(2, 9)..Point::new(2, 10),
5619 diagnostic: Diagnostic {
5620 severity: DiagnosticSeverity::ERROR,
5621 message: "undefined variable 'A'".to_string(),
5622 is_disk_based: true,
5623 group_id: 0,
5624 is_primary: true,
5625 ..Default::default()
5626 },
5627 }
5628 ]
5629 );
5630 assert_eq!(
5631 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5632 [
5633 ("fn a() { ".to_string(), None),
5634 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5635 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5636 ("\n".to_string(), None),
5637 ]
5638 );
5639 assert_eq!(
5640 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5641 [
5642 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5643 ("\n".to_string(), None),
5644 ]
5645 );
5646 });
5647
5648 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5649 // changes since the last save.
5650 buffer.update(cx, |buffer, cx| {
5651 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5652 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5653 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5654 });
5655 let change_notification_2 = fake_server
5656 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5657 .await;
5658 assert!(
5659 change_notification_2.text_document.version
5660 > change_notification_1.text_document.version
5661 );
5662
5663 // Handle out-of-order diagnostics
5664 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5665 lsp::PublishDiagnosticsParams {
5666 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5667 version: Some(change_notification_2.text_document.version),
5668 diagnostics: vec![
5669 lsp::Diagnostic {
5670 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5671 severity: Some(DiagnosticSeverity::ERROR),
5672 message: "undefined variable 'BB'".to_string(),
5673 source: Some("disk".to_string()),
5674 ..Default::default()
5675 },
5676 lsp::Diagnostic {
5677 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5678 severity: Some(DiagnosticSeverity::WARNING),
5679 message: "undefined variable 'A'".to_string(),
5680 source: Some("disk".to_string()),
5681 ..Default::default()
5682 },
5683 ],
5684 },
5685 );
5686
5687 buffer.next_notification(cx).await;
5688 buffer.read_with(cx, |buffer, _| {
5689 assert_eq!(
5690 buffer
5691 .snapshot()
5692 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5693 .collect::<Vec<_>>(),
5694 &[
5695 DiagnosticEntry {
5696 range: Point::new(2, 21)..Point::new(2, 22),
5697 diagnostic: Diagnostic {
5698 severity: DiagnosticSeverity::WARNING,
5699 message: "undefined variable 'A'".to_string(),
5700 is_disk_based: true,
5701 group_id: 1,
5702 is_primary: true,
5703 ..Default::default()
5704 }
5705 },
5706 DiagnosticEntry {
5707 range: Point::new(3, 9)..Point::new(3, 14),
5708 diagnostic: Diagnostic {
5709 severity: DiagnosticSeverity::ERROR,
5710 message: "undefined variable 'BB'".to_string(),
5711 is_disk_based: true,
5712 group_id: 0,
5713 is_primary: true,
5714 ..Default::default()
5715 },
5716 }
5717 ]
5718 );
5719 });
5720 }
5721
5722 #[gpui::test]
5723 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5724 cx.foreground().forbid_parking();
5725
5726 let text = concat!(
5727 "let one = ;\n", //
5728 "let two = \n",
5729 "let three = 3;\n",
5730 );
5731
5732 let fs = FakeFs::new(cx.background());
5733 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5734
5735 let project = Project::test(fs, cx);
5736 let worktree_id = project
5737 .update(cx, |project, cx| {
5738 project.find_or_create_local_worktree("/dir", true, cx)
5739 })
5740 .await
5741 .unwrap()
5742 .0
5743 .read_with(cx, |tree, _| tree.id());
5744
5745 let buffer = project
5746 .update(cx, |project, cx| {
5747 project.open_buffer((worktree_id, "a.rs"), cx)
5748 })
5749 .await
5750 .unwrap();
5751
5752 project.update(cx, |project, cx| {
5753 project
5754 .update_buffer_diagnostics(
5755 &buffer,
5756 vec![
5757 DiagnosticEntry {
5758 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5759 diagnostic: Diagnostic {
5760 severity: DiagnosticSeverity::ERROR,
5761 message: "syntax error 1".to_string(),
5762 ..Default::default()
5763 },
5764 },
5765 DiagnosticEntry {
5766 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5767 diagnostic: Diagnostic {
5768 severity: DiagnosticSeverity::ERROR,
5769 message: "syntax error 2".to_string(),
5770 ..Default::default()
5771 },
5772 },
5773 ],
5774 None,
5775 cx,
5776 )
5777 .unwrap();
5778 });
5779
5780 // An empty range is extended forward to include the following character.
5781 // At the end of a line, an empty range is extended backward to include
5782 // the preceding character.
5783 buffer.read_with(cx, |buffer, _| {
5784 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5785 assert_eq!(
5786 chunks
5787 .iter()
5788 .map(|(s, d)| (s.as_str(), *d))
5789 .collect::<Vec<_>>(),
5790 &[
5791 ("let one = ", None),
5792 (";", Some(DiagnosticSeverity::ERROR)),
5793 ("\nlet two =", None),
5794 (" ", Some(DiagnosticSeverity::ERROR)),
5795 ("\nlet three = 3;\n", None)
5796 ]
5797 );
5798 });
5799 }
5800
5801 #[gpui::test]
5802 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5803 cx.foreground().forbid_parking();
5804
5805 let mut language = Language::new(
5806 LanguageConfig {
5807 name: "Rust".into(),
5808 path_suffixes: vec!["rs".to_string()],
5809 ..Default::default()
5810 },
5811 Some(tree_sitter_rust::language()),
5812 );
5813 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5814
5815 let text = "
5816 fn a() {
5817 f1();
5818 }
5819 fn b() {
5820 f2();
5821 }
5822 fn c() {
5823 f3();
5824 }
5825 "
5826 .unindent();
5827
5828 let fs = FakeFs::new(cx.background());
5829 fs.insert_tree(
5830 "/dir",
5831 json!({
5832 "a.rs": text.clone(),
5833 }),
5834 )
5835 .await;
5836
5837 let project = Project::test(fs, cx);
5838 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5839
5840 let worktree_id = project
5841 .update(cx, |project, cx| {
5842 project.find_or_create_local_worktree("/dir", true, cx)
5843 })
5844 .await
5845 .unwrap()
5846 .0
5847 .read_with(cx, |tree, _| tree.id());
5848
5849 let buffer = project
5850 .update(cx, |project, cx| {
5851 project.open_buffer((worktree_id, "a.rs"), cx)
5852 })
5853 .await
5854 .unwrap();
5855
5856 let mut fake_server = fake_servers.next().await.unwrap();
5857 let lsp_document_version = fake_server
5858 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5859 .await
5860 .text_document
5861 .version;
5862
5863 // Simulate editing the buffer after the language server computes some edits.
5864 buffer.update(cx, |buffer, cx| {
5865 buffer.edit(
5866 [Point::new(0, 0)..Point::new(0, 0)],
5867 "// above first function\n",
5868 cx,
5869 );
5870 buffer.edit(
5871 [Point::new(2, 0)..Point::new(2, 0)],
5872 " // inside first function\n",
5873 cx,
5874 );
5875 buffer.edit(
5876 [Point::new(6, 4)..Point::new(6, 4)],
5877 "// inside second function ",
5878 cx,
5879 );
5880
5881 assert_eq!(
5882 buffer.text(),
5883 "
5884 // above first function
5885 fn a() {
5886 // inside first function
5887 f1();
5888 }
5889 fn b() {
5890 // inside second function f2();
5891 }
5892 fn c() {
5893 f3();
5894 }
5895 "
5896 .unindent()
5897 );
5898 });
5899
5900 let edits = project
5901 .update(cx, |project, cx| {
5902 project.edits_from_lsp(
5903 &buffer,
5904 vec![
5905 // replace body of first function
5906 lsp::TextEdit {
5907 range: lsp::Range::new(
5908 lsp::Position::new(0, 0),
5909 lsp::Position::new(3, 0),
5910 ),
5911 new_text: "
5912 fn a() {
5913 f10();
5914 }
5915 "
5916 .unindent(),
5917 },
5918 // edit inside second function
5919 lsp::TextEdit {
5920 range: lsp::Range::new(
5921 lsp::Position::new(4, 6),
5922 lsp::Position::new(4, 6),
5923 ),
5924 new_text: "00".into(),
5925 },
5926 // edit inside third function via two distinct edits
5927 lsp::TextEdit {
5928 range: lsp::Range::new(
5929 lsp::Position::new(7, 5),
5930 lsp::Position::new(7, 5),
5931 ),
5932 new_text: "4000".into(),
5933 },
5934 lsp::TextEdit {
5935 range: lsp::Range::new(
5936 lsp::Position::new(7, 5),
5937 lsp::Position::new(7, 6),
5938 ),
5939 new_text: "".into(),
5940 },
5941 ],
5942 Some(lsp_document_version),
5943 cx,
5944 )
5945 })
5946 .await
5947 .unwrap();
5948
5949 buffer.update(cx, |buffer, cx| {
5950 for (range, new_text) in edits {
5951 buffer.edit([range], new_text, cx);
5952 }
5953 assert_eq!(
5954 buffer.text(),
5955 "
5956 // above first function
5957 fn a() {
5958 // inside first function
5959 f10();
5960 }
5961 fn b() {
5962 // inside second function f200();
5963 }
5964 fn c() {
5965 f4000();
5966 }
5967 "
5968 .unindent()
5969 );
5970 });
5971 }
5972
5973 #[gpui::test]
5974 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5975 cx.foreground().forbid_parking();
5976
5977 let text = "
5978 use a::b;
5979 use a::c;
5980
5981 fn f() {
5982 b();
5983 c();
5984 }
5985 "
5986 .unindent();
5987
5988 let fs = FakeFs::new(cx.background());
5989 fs.insert_tree(
5990 "/dir",
5991 json!({
5992 "a.rs": text.clone(),
5993 }),
5994 )
5995 .await;
5996
5997 let project = Project::test(fs, cx);
5998 let worktree_id = project
5999 .update(cx, |project, cx| {
6000 project.find_or_create_local_worktree("/dir", true, cx)
6001 })
6002 .await
6003 .unwrap()
6004 .0
6005 .read_with(cx, |tree, _| tree.id());
6006
6007 let buffer = project
6008 .update(cx, |project, cx| {
6009 project.open_buffer((worktree_id, "a.rs"), cx)
6010 })
6011 .await
6012 .unwrap();
6013
6014 // Simulate the language server sending us a small edit in the form of a very large diff.
6015 // Rust-analyzer does this when performing a merge-imports code action.
6016 let edits = project
6017 .update(cx, |project, cx| {
6018 project.edits_from_lsp(
6019 &buffer,
6020 [
6021 // Replace the first use statement without editing the semicolon.
6022 lsp::TextEdit {
6023 range: lsp::Range::new(
6024 lsp::Position::new(0, 4),
6025 lsp::Position::new(0, 8),
6026 ),
6027 new_text: "a::{b, c}".into(),
6028 },
6029 // Reinsert the remainder of the file between the semicolon and the final
6030 // newline of the file.
6031 lsp::TextEdit {
6032 range: lsp::Range::new(
6033 lsp::Position::new(0, 9),
6034 lsp::Position::new(0, 9),
6035 ),
6036 new_text: "\n\n".into(),
6037 },
6038 lsp::TextEdit {
6039 range: lsp::Range::new(
6040 lsp::Position::new(0, 9),
6041 lsp::Position::new(0, 9),
6042 ),
6043 new_text: "
6044 fn f() {
6045 b();
6046 c();
6047 }"
6048 .unindent(),
6049 },
6050 // Delete everything after the first newline of the file.
6051 lsp::TextEdit {
6052 range: lsp::Range::new(
6053 lsp::Position::new(1, 0),
6054 lsp::Position::new(7, 0),
6055 ),
6056 new_text: "".into(),
6057 },
6058 ],
6059 None,
6060 cx,
6061 )
6062 })
6063 .await
6064 .unwrap();
6065
6066 buffer.update(cx, |buffer, cx| {
6067 let edits = edits
6068 .into_iter()
6069 .map(|(range, text)| {
6070 (
6071 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6072 text,
6073 )
6074 })
6075 .collect::<Vec<_>>();
6076
6077 assert_eq!(
6078 edits,
6079 [
6080 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6081 (Point::new(1, 0)..Point::new(2, 0), "".into())
6082 ]
6083 );
6084
6085 for (range, new_text) in edits {
6086 buffer.edit([range], new_text, cx);
6087 }
6088 assert_eq!(
6089 buffer.text(),
6090 "
6091 use a::{b, c};
6092
6093 fn f() {
6094 b();
6095 c();
6096 }
6097 "
6098 .unindent()
6099 );
6100 });
6101 }
6102
6103 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6104 buffer: &Buffer,
6105 range: Range<T>,
6106 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6107 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6108 for chunk in buffer.snapshot().chunks(range, true) {
6109 if chunks.last().map_or(false, |prev_chunk| {
6110 prev_chunk.1 == chunk.diagnostic_severity
6111 }) {
6112 chunks.last_mut().unwrap().0.push_str(chunk.text);
6113 } else {
6114 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6115 }
6116 }
6117 chunks
6118 }
6119
6120 #[gpui::test]
6121 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6122 let dir = temp_tree(json!({
6123 "root": {
6124 "dir1": {},
6125 "dir2": {
6126 "dir3": {}
6127 }
6128 }
6129 }));
6130
6131 let project = Project::test(Arc::new(RealFs), cx);
6132 let (tree, _) = project
6133 .update(cx, |project, cx| {
6134 project.find_or_create_local_worktree(&dir.path(), true, cx)
6135 })
6136 .await
6137 .unwrap();
6138
6139 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6140 .await;
6141
6142 let cancel_flag = Default::default();
6143 let results = project
6144 .read_with(cx, |project, cx| {
6145 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6146 })
6147 .await;
6148
6149 assert!(results.is_empty());
6150 }
6151
6152 #[gpui::test]
6153 async fn test_definition(cx: &mut gpui::TestAppContext) {
6154 let mut language = Language::new(
6155 LanguageConfig {
6156 name: "Rust".into(),
6157 path_suffixes: vec!["rs".to_string()],
6158 ..Default::default()
6159 },
6160 Some(tree_sitter_rust::language()),
6161 );
6162 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6163
6164 let fs = FakeFs::new(cx.background());
6165 fs.insert_tree(
6166 "/dir",
6167 json!({
6168 "a.rs": "const fn a() { A }",
6169 "b.rs": "const y: i32 = crate::a()",
6170 }),
6171 )
6172 .await;
6173
6174 let project = Project::test(fs, cx);
6175 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6176
6177 let (tree, _) = project
6178 .update(cx, |project, cx| {
6179 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6180 })
6181 .await
6182 .unwrap();
6183 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6184 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6185 .await;
6186
6187 let buffer = project
6188 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6189 .await
6190 .unwrap();
6191
6192 let fake_server = fake_servers.next().await.unwrap();
6193 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6194 let params = params.text_document_position_params;
6195 assert_eq!(
6196 params.text_document.uri.to_file_path().unwrap(),
6197 Path::new("/dir/b.rs"),
6198 );
6199 assert_eq!(params.position, lsp::Position::new(0, 22));
6200
6201 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6202 lsp::Location::new(
6203 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6204 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6205 ),
6206 )))
6207 });
6208
6209 let mut definitions = project
6210 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6211 .await
6212 .unwrap();
6213
6214 assert_eq!(definitions.len(), 1);
6215 let definition = definitions.pop().unwrap();
6216 cx.update(|cx| {
6217 let target_buffer = definition.buffer.read(cx);
6218 assert_eq!(
6219 target_buffer
6220 .file()
6221 .unwrap()
6222 .as_local()
6223 .unwrap()
6224 .abs_path(cx),
6225 Path::new("/dir/a.rs"),
6226 );
6227 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6228 assert_eq!(
6229 list_worktrees(&project, cx),
6230 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6231 );
6232
6233 drop(definition);
6234 });
6235 cx.read(|cx| {
6236 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6237 });
6238
6239 fn list_worktrees<'a>(
6240 project: &'a ModelHandle<Project>,
6241 cx: &'a AppContext,
6242 ) -> Vec<(&'a Path, bool)> {
6243 project
6244 .read(cx)
6245 .worktrees(cx)
6246 .map(|worktree| {
6247 let worktree = worktree.read(cx);
6248 (
6249 worktree.as_local().unwrap().abs_path().as_ref(),
6250 worktree.is_visible(),
6251 )
6252 })
6253 .collect::<Vec<_>>()
6254 }
6255 }
6256
6257 #[gpui::test(iterations = 10)]
6258 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6259 let mut language = Language::new(
6260 LanguageConfig {
6261 name: "TypeScript".into(),
6262 path_suffixes: vec!["ts".to_string()],
6263 ..Default::default()
6264 },
6265 None,
6266 );
6267 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6268
6269 let fs = FakeFs::new(cx.background());
6270 fs.insert_tree(
6271 "/dir",
6272 json!({
6273 "a.ts": "a",
6274 }),
6275 )
6276 .await;
6277
6278 let project = Project::test(fs, cx);
6279 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6280
6281 let (tree, _) = project
6282 .update(cx, |project, cx| {
6283 project.find_or_create_local_worktree("/dir", true, cx)
6284 })
6285 .await
6286 .unwrap();
6287 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6288 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6289 .await;
6290
6291 let buffer = project
6292 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6293 .await
6294 .unwrap();
6295
6296 let fake_server = fake_language_servers.next().await.unwrap();
6297
6298 // Language server returns code actions that contain commands, and not edits.
6299 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6300 fake_server
6301 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6302 Ok(Some(vec![
6303 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6304 title: "The code action".into(),
6305 command: Some(lsp::Command {
6306 title: "The command".into(),
6307 command: "_the/command".into(),
6308 arguments: Some(vec![json!("the-argument")]),
6309 }),
6310 ..Default::default()
6311 }),
6312 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6313 title: "two".into(),
6314 ..Default::default()
6315 }),
6316 ]))
6317 })
6318 .next()
6319 .await;
6320
6321 let action = actions.await.unwrap()[0].clone();
6322 let apply = project.update(cx, |project, cx| {
6323 project.apply_code_action(buffer.clone(), action, true, cx)
6324 });
6325
6326 // Resolving the code action does not populate its edits. In absence of
6327 // edits, we must execute the given command.
6328 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6329 |action, _| async move { Ok(action) },
6330 );
6331
6332 // While executing the command, the language server sends the editor
6333 // a `workspaceEdit` request.
6334 fake_server
6335 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6336 let fake = fake_server.clone();
6337 move |params, _| {
6338 assert_eq!(params.command, "_the/command");
6339 let fake = fake.clone();
6340 async move {
6341 fake.server
6342 .request::<lsp::request::ApplyWorkspaceEdit>(
6343 lsp::ApplyWorkspaceEditParams {
6344 label: None,
6345 edit: lsp::WorkspaceEdit {
6346 changes: Some(
6347 [(
6348 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6349 vec![lsp::TextEdit {
6350 range: lsp::Range::new(
6351 lsp::Position::new(0, 0),
6352 lsp::Position::new(0, 0),
6353 ),
6354 new_text: "X".into(),
6355 }],
6356 )]
6357 .into_iter()
6358 .collect(),
6359 ),
6360 ..Default::default()
6361 },
6362 },
6363 )
6364 .await
6365 .unwrap();
6366 Ok(Some(json!(null)))
6367 }
6368 }
6369 })
6370 .next()
6371 .await;
6372
6373 // Applying the code action returns a project transaction containing the edits
6374 // sent by the language server in its `workspaceEdit` request.
6375 let transaction = apply.await.unwrap();
6376 assert!(transaction.0.contains_key(&buffer));
6377 buffer.update(cx, |buffer, cx| {
6378 assert_eq!(buffer.text(), "Xa");
6379 buffer.undo(cx);
6380 assert_eq!(buffer.text(), "a");
6381 });
6382 }
6383
6384 #[gpui::test]
6385 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6386 let fs = FakeFs::new(cx.background());
6387 fs.insert_tree(
6388 "/dir",
6389 json!({
6390 "file1": "the old contents",
6391 }),
6392 )
6393 .await;
6394
6395 let project = Project::test(fs.clone(), cx);
6396 let worktree_id = project
6397 .update(cx, |p, cx| {
6398 p.find_or_create_local_worktree("/dir", true, cx)
6399 })
6400 .await
6401 .unwrap()
6402 .0
6403 .read_with(cx, |tree, _| tree.id());
6404
6405 let buffer = project
6406 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6407 .await
6408 .unwrap();
6409 buffer
6410 .update(cx, |buffer, cx| {
6411 assert_eq!(buffer.text(), "the old contents");
6412 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6413 buffer.save(cx)
6414 })
6415 .await
6416 .unwrap();
6417
6418 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6419 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6420 }
6421
6422 #[gpui::test]
6423 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6424 let fs = FakeFs::new(cx.background());
6425 fs.insert_tree(
6426 "/dir",
6427 json!({
6428 "file1": "the old contents",
6429 }),
6430 )
6431 .await;
6432
6433 let project = Project::test(fs.clone(), cx);
6434 let worktree_id = project
6435 .update(cx, |p, cx| {
6436 p.find_or_create_local_worktree("/dir/file1", true, cx)
6437 })
6438 .await
6439 .unwrap()
6440 .0
6441 .read_with(cx, |tree, _| tree.id());
6442
6443 let buffer = project
6444 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6445 .await
6446 .unwrap();
6447 buffer
6448 .update(cx, |buffer, cx| {
6449 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6450 buffer.save(cx)
6451 })
6452 .await
6453 .unwrap();
6454
6455 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6456 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6457 }
6458
6459 #[gpui::test]
6460 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6461 let fs = FakeFs::new(cx.background());
6462 fs.insert_tree("/dir", json!({})).await;
6463
6464 let project = Project::test(fs.clone(), cx);
6465 let (worktree, _) = project
6466 .update(cx, |project, cx| {
6467 project.find_or_create_local_worktree("/dir", true, cx)
6468 })
6469 .await
6470 .unwrap();
6471 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6472
6473 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6474 buffer.update(cx, |buffer, cx| {
6475 buffer.edit([0..0], "abc", cx);
6476 assert!(buffer.is_dirty());
6477 assert!(!buffer.has_conflict());
6478 });
6479 project
6480 .update(cx, |project, cx| {
6481 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6482 })
6483 .await
6484 .unwrap();
6485 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6486 buffer.read_with(cx, |buffer, cx| {
6487 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6488 assert!(!buffer.is_dirty());
6489 assert!(!buffer.has_conflict());
6490 });
6491
6492 let opened_buffer = project
6493 .update(cx, |project, cx| {
6494 project.open_buffer((worktree_id, "file1"), cx)
6495 })
6496 .await
6497 .unwrap();
6498 assert_eq!(opened_buffer, buffer);
6499 }
6500
6501 #[gpui::test(retries = 5)]
6502 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6503 let dir = temp_tree(json!({
6504 "a": {
6505 "file1": "",
6506 "file2": "",
6507 "file3": "",
6508 },
6509 "b": {
6510 "c": {
6511 "file4": "",
6512 "file5": "",
6513 }
6514 }
6515 }));
6516
6517 let project = Project::test(Arc::new(RealFs), cx);
6518 let rpc = project.read_with(cx, |p, _| p.client.clone());
6519
6520 let (tree, _) = project
6521 .update(cx, |p, cx| {
6522 p.find_or_create_local_worktree(dir.path(), true, cx)
6523 })
6524 .await
6525 .unwrap();
6526 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6527
6528 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6529 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6530 async move { buffer.await.unwrap() }
6531 };
6532 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6533 tree.read_with(cx, |tree, _| {
6534 tree.entry_for_path(path)
6535 .expect(&format!("no entry for path {}", path))
6536 .id
6537 })
6538 };
6539
6540 let buffer2 = buffer_for_path("a/file2", cx).await;
6541 let buffer3 = buffer_for_path("a/file3", cx).await;
6542 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6543 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6544
6545 let file2_id = id_for_path("a/file2", &cx);
6546 let file3_id = id_for_path("a/file3", &cx);
6547 let file4_id = id_for_path("b/c/file4", &cx);
6548
6549 // Wait for the initial scan.
6550 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6551 .await;
6552
6553 // Create a remote copy of this worktree.
6554 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6555 let (remote, load_task) = cx.update(|cx| {
6556 Worktree::remote(
6557 1,
6558 1,
6559 initial_snapshot.to_proto(&Default::default(), true),
6560 rpc.clone(),
6561 cx,
6562 )
6563 });
6564 load_task.await;
6565
6566 cx.read(|cx| {
6567 assert!(!buffer2.read(cx).is_dirty());
6568 assert!(!buffer3.read(cx).is_dirty());
6569 assert!(!buffer4.read(cx).is_dirty());
6570 assert!(!buffer5.read(cx).is_dirty());
6571 });
6572
6573 // Rename and delete files and directories.
6574 tree.flush_fs_events(&cx).await;
6575 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6576 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6577 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6578 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6579 tree.flush_fs_events(&cx).await;
6580
6581 let expected_paths = vec![
6582 "a",
6583 "a/file1",
6584 "a/file2.new",
6585 "b",
6586 "d",
6587 "d/file3",
6588 "d/file4",
6589 ];
6590
6591 cx.read(|app| {
6592 assert_eq!(
6593 tree.read(app)
6594 .paths()
6595 .map(|p| p.to_str().unwrap())
6596 .collect::<Vec<_>>(),
6597 expected_paths
6598 );
6599
6600 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6601 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6602 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6603
6604 assert_eq!(
6605 buffer2.read(app).file().unwrap().path().as_ref(),
6606 Path::new("a/file2.new")
6607 );
6608 assert_eq!(
6609 buffer3.read(app).file().unwrap().path().as_ref(),
6610 Path::new("d/file3")
6611 );
6612 assert_eq!(
6613 buffer4.read(app).file().unwrap().path().as_ref(),
6614 Path::new("d/file4")
6615 );
6616 assert_eq!(
6617 buffer5.read(app).file().unwrap().path().as_ref(),
6618 Path::new("b/c/file5")
6619 );
6620
6621 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6622 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6623 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6624 assert!(buffer5.read(app).file().unwrap().is_deleted());
6625 });
6626
6627 // Update the remote worktree. Check that it becomes consistent with the
6628 // local worktree.
6629 remote.update(cx, |remote, cx| {
6630 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6631 &initial_snapshot,
6632 1,
6633 1,
6634 true,
6635 );
6636 remote
6637 .as_remote_mut()
6638 .unwrap()
6639 .snapshot
6640 .apply_remote_update(update_message)
6641 .unwrap();
6642
6643 assert_eq!(
6644 remote
6645 .paths()
6646 .map(|p| p.to_str().unwrap())
6647 .collect::<Vec<_>>(),
6648 expected_paths
6649 );
6650 });
6651 }
6652
6653 #[gpui::test]
6654 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6655 let fs = FakeFs::new(cx.background());
6656 fs.insert_tree(
6657 "/the-dir",
6658 json!({
6659 "a.txt": "a-contents",
6660 "b.txt": "b-contents",
6661 }),
6662 )
6663 .await;
6664
6665 let project = Project::test(fs.clone(), cx);
6666 let worktree_id = project
6667 .update(cx, |p, cx| {
6668 p.find_or_create_local_worktree("/the-dir", true, cx)
6669 })
6670 .await
6671 .unwrap()
6672 .0
6673 .read_with(cx, |tree, _| tree.id());
6674
6675 // Spawn multiple tasks to open paths, repeating some paths.
6676 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6677 (
6678 p.open_buffer((worktree_id, "a.txt"), cx),
6679 p.open_buffer((worktree_id, "b.txt"), cx),
6680 p.open_buffer((worktree_id, "a.txt"), cx),
6681 )
6682 });
6683
6684 let buffer_a_1 = buffer_a_1.await.unwrap();
6685 let buffer_a_2 = buffer_a_2.await.unwrap();
6686 let buffer_b = buffer_b.await.unwrap();
6687 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6688 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6689
6690 // There is only one buffer per path.
6691 let buffer_a_id = buffer_a_1.id();
6692 assert_eq!(buffer_a_2.id(), buffer_a_id);
6693
6694 // Open the same path again while it is still open.
6695 drop(buffer_a_1);
6696 let buffer_a_3 = project
6697 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6698 .await
6699 .unwrap();
6700
6701 // There's still only one buffer per path.
6702 assert_eq!(buffer_a_3.id(), buffer_a_id);
6703 }
6704
6705 #[gpui::test]
6706 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6707 use std::fs;
6708
6709 let dir = temp_tree(json!({
6710 "file1": "abc",
6711 "file2": "def",
6712 "file3": "ghi",
6713 }));
6714
6715 let project = Project::test(Arc::new(RealFs), cx);
6716 let (worktree, _) = project
6717 .update(cx, |p, cx| {
6718 p.find_or_create_local_worktree(dir.path(), true, cx)
6719 })
6720 .await
6721 .unwrap();
6722 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6723
6724 worktree.flush_fs_events(&cx).await;
6725 worktree
6726 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6727 .await;
6728
6729 let buffer1 = project
6730 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6731 .await
6732 .unwrap();
6733 let events = Rc::new(RefCell::new(Vec::new()));
6734
6735 // initially, the buffer isn't dirty.
6736 buffer1.update(cx, |buffer, cx| {
6737 cx.subscribe(&buffer1, {
6738 let events = events.clone();
6739 move |_, _, event, _| match event {
6740 BufferEvent::Operation(_) => {}
6741 _ => events.borrow_mut().push(event.clone()),
6742 }
6743 })
6744 .detach();
6745
6746 assert!(!buffer.is_dirty());
6747 assert!(events.borrow().is_empty());
6748
6749 buffer.edit(vec![1..2], "", cx);
6750 });
6751
6752 // after the first edit, the buffer is dirty, and emits a dirtied event.
6753 buffer1.update(cx, |buffer, cx| {
6754 assert!(buffer.text() == "ac");
6755 assert!(buffer.is_dirty());
6756 assert_eq!(
6757 *events.borrow(),
6758 &[language::Event::Edited, language::Event::Dirtied]
6759 );
6760 events.borrow_mut().clear();
6761 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6762 });
6763
6764 // after saving, the buffer is not dirty, and emits a saved event.
6765 buffer1.update(cx, |buffer, cx| {
6766 assert!(!buffer.is_dirty());
6767 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6768 events.borrow_mut().clear();
6769
6770 buffer.edit(vec![1..1], "B", cx);
6771 buffer.edit(vec![2..2], "D", cx);
6772 });
6773
6774 // after editing again, the buffer is dirty, and emits another dirty event.
6775 buffer1.update(cx, |buffer, cx| {
6776 assert!(buffer.text() == "aBDc");
6777 assert!(buffer.is_dirty());
6778 assert_eq!(
6779 *events.borrow(),
6780 &[
6781 language::Event::Edited,
6782 language::Event::Dirtied,
6783 language::Event::Edited,
6784 ],
6785 );
6786 events.borrow_mut().clear();
6787
6788 // TODO - currently, after restoring the buffer to its
6789 // previously-saved state, the is still considered dirty.
6790 buffer.edit([1..3], "", cx);
6791 assert!(buffer.text() == "ac");
6792 assert!(buffer.is_dirty());
6793 });
6794
6795 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6796
6797 // When a file is deleted, the buffer is considered dirty.
6798 let events = Rc::new(RefCell::new(Vec::new()));
6799 let buffer2 = project
6800 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6801 .await
6802 .unwrap();
6803 buffer2.update(cx, |_, cx| {
6804 cx.subscribe(&buffer2, {
6805 let events = events.clone();
6806 move |_, _, event, _| events.borrow_mut().push(event.clone())
6807 })
6808 .detach();
6809 });
6810
6811 fs::remove_file(dir.path().join("file2")).unwrap();
6812 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6813 assert_eq!(
6814 *events.borrow(),
6815 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6816 );
6817
6818 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6819 let events = Rc::new(RefCell::new(Vec::new()));
6820 let buffer3 = project
6821 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6822 .await
6823 .unwrap();
6824 buffer3.update(cx, |_, cx| {
6825 cx.subscribe(&buffer3, {
6826 let events = events.clone();
6827 move |_, _, event, _| events.borrow_mut().push(event.clone())
6828 })
6829 .detach();
6830 });
6831
6832 worktree.flush_fs_events(&cx).await;
6833 buffer3.update(cx, |buffer, cx| {
6834 buffer.edit(Some(0..0), "x", cx);
6835 });
6836 events.borrow_mut().clear();
6837 fs::remove_file(dir.path().join("file3")).unwrap();
6838 buffer3
6839 .condition(&cx, |_, _| !events.borrow().is_empty())
6840 .await;
6841 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6842 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6843 }
6844
6845 #[gpui::test]
6846 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6847 use std::fs;
6848
6849 let initial_contents = "aaa\nbbbbb\nc\n";
6850 let dir = temp_tree(json!({ "the-file": initial_contents }));
6851
6852 let project = Project::test(Arc::new(RealFs), cx);
6853 let (worktree, _) = project
6854 .update(cx, |p, cx| {
6855 p.find_or_create_local_worktree(dir.path(), true, cx)
6856 })
6857 .await
6858 .unwrap();
6859 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6860
6861 worktree
6862 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6863 .await;
6864
6865 let abs_path = dir.path().join("the-file");
6866 let buffer = project
6867 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6868 .await
6869 .unwrap();
6870
6871 // TODO
6872 // Add a cursor on each row.
6873 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6874 // assert!(!buffer.is_dirty());
6875 // buffer.add_selection_set(
6876 // &(0..3)
6877 // .map(|row| Selection {
6878 // id: row as usize,
6879 // start: Point::new(row, 1),
6880 // end: Point::new(row, 1),
6881 // reversed: false,
6882 // goal: SelectionGoal::None,
6883 // })
6884 // .collect::<Vec<_>>(),
6885 // cx,
6886 // )
6887 // });
6888
6889 // Change the file on disk, adding two new lines of text, and removing
6890 // one line.
6891 buffer.read_with(cx, |buffer, _| {
6892 assert!(!buffer.is_dirty());
6893 assert!(!buffer.has_conflict());
6894 });
6895 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6896 fs::write(&abs_path, new_contents).unwrap();
6897
6898 // Because the buffer was not modified, it is reloaded from disk. Its
6899 // contents are edited according to the diff between the old and new
6900 // file contents.
6901 buffer
6902 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6903 .await;
6904
6905 buffer.update(cx, |buffer, _| {
6906 assert_eq!(buffer.text(), new_contents);
6907 assert!(!buffer.is_dirty());
6908 assert!(!buffer.has_conflict());
6909
6910 // TODO
6911 // let cursor_positions = buffer
6912 // .selection_set(selection_set_id)
6913 // .unwrap()
6914 // .selections::<Point>(&*buffer)
6915 // .map(|selection| {
6916 // assert_eq!(selection.start, selection.end);
6917 // selection.start
6918 // })
6919 // .collect::<Vec<_>>();
6920 // assert_eq!(
6921 // cursor_positions,
6922 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6923 // );
6924 });
6925
6926 // Modify the buffer
6927 buffer.update(cx, |buffer, cx| {
6928 buffer.edit(vec![0..0], " ", cx);
6929 assert!(buffer.is_dirty());
6930 assert!(!buffer.has_conflict());
6931 });
6932
6933 // Change the file on disk again, adding blank lines to the beginning.
6934 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6935
6936 // Because the buffer is modified, it doesn't reload from disk, but is
6937 // marked as having a conflict.
6938 buffer
6939 .condition(&cx, |buffer, _| buffer.has_conflict())
6940 .await;
6941 }
6942
6943 #[gpui::test]
6944 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6945 cx.foreground().forbid_parking();
6946
6947 let fs = FakeFs::new(cx.background());
6948 fs.insert_tree(
6949 "/the-dir",
6950 json!({
6951 "a.rs": "
6952 fn foo(mut v: Vec<usize>) {
6953 for x in &v {
6954 v.push(1);
6955 }
6956 }
6957 "
6958 .unindent(),
6959 }),
6960 )
6961 .await;
6962
6963 let project = Project::test(fs.clone(), cx);
6964 let (worktree, _) = project
6965 .update(cx, |p, cx| {
6966 p.find_or_create_local_worktree("/the-dir", true, cx)
6967 })
6968 .await
6969 .unwrap();
6970 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6971
6972 let buffer = project
6973 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6974 .await
6975 .unwrap();
6976
6977 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6978 let message = lsp::PublishDiagnosticsParams {
6979 uri: buffer_uri.clone(),
6980 diagnostics: vec![
6981 lsp::Diagnostic {
6982 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6983 severity: Some(DiagnosticSeverity::WARNING),
6984 message: "error 1".to_string(),
6985 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6986 location: lsp::Location {
6987 uri: buffer_uri.clone(),
6988 range: lsp::Range::new(
6989 lsp::Position::new(1, 8),
6990 lsp::Position::new(1, 9),
6991 ),
6992 },
6993 message: "error 1 hint 1".to_string(),
6994 }]),
6995 ..Default::default()
6996 },
6997 lsp::Diagnostic {
6998 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6999 severity: Some(DiagnosticSeverity::HINT),
7000 message: "error 1 hint 1".to_string(),
7001 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7002 location: lsp::Location {
7003 uri: buffer_uri.clone(),
7004 range: lsp::Range::new(
7005 lsp::Position::new(1, 8),
7006 lsp::Position::new(1, 9),
7007 ),
7008 },
7009 message: "original diagnostic".to_string(),
7010 }]),
7011 ..Default::default()
7012 },
7013 lsp::Diagnostic {
7014 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7015 severity: Some(DiagnosticSeverity::ERROR),
7016 message: "error 2".to_string(),
7017 related_information: Some(vec![
7018 lsp::DiagnosticRelatedInformation {
7019 location: lsp::Location {
7020 uri: buffer_uri.clone(),
7021 range: lsp::Range::new(
7022 lsp::Position::new(1, 13),
7023 lsp::Position::new(1, 15),
7024 ),
7025 },
7026 message: "error 2 hint 1".to_string(),
7027 },
7028 lsp::DiagnosticRelatedInformation {
7029 location: lsp::Location {
7030 uri: buffer_uri.clone(),
7031 range: lsp::Range::new(
7032 lsp::Position::new(1, 13),
7033 lsp::Position::new(1, 15),
7034 ),
7035 },
7036 message: "error 2 hint 2".to_string(),
7037 },
7038 ]),
7039 ..Default::default()
7040 },
7041 lsp::Diagnostic {
7042 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7043 severity: Some(DiagnosticSeverity::HINT),
7044 message: "error 2 hint 1".to_string(),
7045 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7046 location: lsp::Location {
7047 uri: buffer_uri.clone(),
7048 range: lsp::Range::new(
7049 lsp::Position::new(2, 8),
7050 lsp::Position::new(2, 17),
7051 ),
7052 },
7053 message: "original diagnostic".to_string(),
7054 }]),
7055 ..Default::default()
7056 },
7057 lsp::Diagnostic {
7058 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7059 severity: Some(DiagnosticSeverity::HINT),
7060 message: "error 2 hint 2".to_string(),
7061 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7062 location: lsp::Location {
7063 uri: buffer_uri.clone(),
7064 range: lsp::Range::new(
7065 lsp::Position::new(2, 8),
7066 lsp::Position::new(2, 17),
7067 ),
7068 },
7069 message: "original diagnostic".to_string(),
7070 }]),
7071 ..Default::default()
7072 },
7073 ],
7074 version: None,
7075 };
7076
7077 project
7078 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7079 .unwrap();
7080 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7081
7082 assert_eq!(
7083 buffer
7084 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7085 .collect::<Vec<_>>(),
7086 &[
7087 DiagnosticEntry {
7088 range: Point::new(1, 8)..Point::new(1, 9),
7089 diagnostic: Diagnostic {
7090 severity: DiagnosticSeverity::WARNING,
7091 message: "error 1".to_string(),
7092 group_id: 0,
7093 is_primary: true,
7094 ..Default::default()
7095 }
7096 },
7097 DiagnosticEntry {
7098 range: Point::new(1, 8)..Point::new(1, 9),
7099 diagnostic: Diagnostic {
7100 severity: DiagnosticSeverity::HINT,
7101 message: "error 1 hint 1".to_string(),
7102 group_id: 0,
7103 is_primary: false,
7104 ..Default::default()
7105 }
7106 },
7107 DiagnosticEntry {
7108 range: Point::new(1, 13)..Point::new(1, 15),
7109 diagnostic: Diagnostic {
7110 severity: DiagnosticSeverity::HINT,
7111 message: "error 2 hint 1".to_string(),
7112 group_id: 1,
7113 is_primary: false,
7114 ..Default::default()
7115 }
7116 },
7117 DiagnosticEntry {
7118 range: Point::new(1, 13)..Point::new(1, 15),
7119 diagnostic: Diagnostic {
7120 severity: DiagnosticSeverity::HINT,
7121 message: "error 2 hint 2".to_string(),
7122 group_id: 1,
7123 is_primary: false,
7124 ..Default::default()
7125 }
7126 },
7127 DiagnosticEntry {
7128 range: Point::new(2, 8)..Point::new(2, 17),
7129 diagnostic: Diagnostic {
7130 severity: DiagnosticSeverity::ERROR,
7131 message: "error 2".to_string(),
7132 group_id: 1,
7133 is_primary: true,
7134 ..Default::default()
7135 }
7136 }
7137 ]
7138 );
7139
7140 assert_eq!(
7141 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7142 &[
7143 DiagnosticEntry {
7144 range: Point::new(1, 8)..Point::new(1, 9),
7145 diagnostic: Diagnostic {
7146 severity: DiagnosticSeverity::WARNING,
7147 message: "error 1".to_string(),
7148 group_id: 0,
7149 is_primary: true,
7150 ..Default::default()
7151 }
7152 },
7153 DiagnosticEntry {
7154 range: Point::new(1, 8)..Point::new(1, 9),
7155 diagnostic: Diagnostic {
7156 severity: DiagnosticSeverity::HINT,
7157 message: "error 1 hint 1".to_string(),
7158 group_id: 0,
7159 is_primary: false,
7160 ..Default::default()
7161 }
7162 },
7163 ]
7164 );
7165 assert_eq!(
7166 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7167 &[
7168 DiagnosticEntry {
7169 range: Point::new(1, 13)..Point::new(1, 15),
7170 diagnostic: Diagnostic {
7171 severity: DiagnosticSeverity::HINT,
7172 message: "error 2 hint 1".to_string(),
7173 group_id: 1,
7174 is_primary: false,
7175 ..Default::default()
7176 }
7177 },
7178 DiagnosticEntry {
7179 range: Point::new(1, 13)..Point::new(1, 15),
7180 diagnostic: Diagnostic {
7181 severity: DiagnosticSeverity::HINT,
7182 message: "error 2 hint 2".to_string(),
7183 group_id: 1,
7184 is_primary: false,
7185 ..Default::default()
7186 }
7187 },
7188 DiagnosticEntry {
7189 range: Point::new(2, 8)..Point::new(2, 17),
7190 diagnostic: Diagnostic {
7191 severity: DiagnosticSeverity::ERROR,
7192 message: "error 2".to_string(),
7193 group_id: 1,
7194 is_primary: true,
7195 ..Default::default()
7196 }
7197 }
7198 ]
7199 );
7200 }
7201
7202 #[gpui::test]
7203 async fn test_rename(cx: &mut gpui::TestAppContext) {
7204 cx.foreground().forbid_parking();
7205
7206 let mut language = Language::new(
7207 LanguageConfig {
7208 name: "Rust".into(),
7209 path_suffixes: vec!["rs".to_string()],
7210 ..Default::default()
7211 },
7212 Some(tree_sitter_rust::language()),
7213 );
7214 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7215
7216 let fs = FakeFs::new(cx.background());
7217 fs.insert_tree(
7218 "/dir",
7219 json!({
7220 "one.rs": "const ONE: usize = 1;",
7221 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7222 }),
7223 )
7224 .await;
7225
7226 let project = Project::test(fs.clone(), cx);
7227 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7228
7229 let (tree, _) = project
7230 .update(cx, |project, cx| {
7231 project.find_or_create_local_worktree("/dir", true, cx)
7232 })
7233 .await
7234 .unwrap();
7235 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7236 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7237 .await;
7238
7239 let buffer = project
7240 .update(cx, |project, cx| {
7241 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7242 })
7243 .await
7244 .unwrap();
7245
7246 let fake_server = fake_servers.next().await.unwrap();
7247
7248 let response = project.update(cx, |project, cx| {
7249 project.prepare_rename(buffer.clone(), 7, cx)
7250 });
7251 fake_server
7252 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7253 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7254 assert_eq!(params.position, lsp::Position::new(0, 7));
7255 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7256 lsp::Position::new(0, 6),
7257 lsp::Position::new(0, 9),
7258 ))))
7259 })
7260 .next()
7261 .await
7262 .unwrap();
7263 let range = response.await.unwrap().unwrap();
7264 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7265 assert_eq!(range, 6..9);
7266
7267 let response = project.update(cx, |project, cx| {
7268 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7269 });
7270 fake_server
7271 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7272 assert_eq!(
7273 params.text_document_position.text_document.uri.as_str(),
7274 "file:///dir/one.rs"
7275 );
7276 assert_eq!(
7277 params.text_document_position.position,
7278 lsp::Position::new(0, 7)
7279 );
7280 assert_eq!(params.new_name, "THREE");
7281 Ok(Some(lsp::WorkspaceEdit {
7282 changes: Some(
7283 [
7284 (
7285 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7286 vec![lsp::TextEdit::new(
7287 lsp::Range::new(
7288 lsp::Position::new(0, 6),
7289 lsp::Position::new(0, 9),
7290 ),
7291 "THREE".to_string(),
7292 )],
7293 ),
7294 (
7295 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7296 vec![
7297 lsp::TextEdit::new(
7298 lsp::Range::new(
7299 lsp::Position::new(0, 24),
7300 lsp::Position::new(0, 27),
7301 ),
7302 "THREE".to_string(),
7303 ),
7304 lsp::TextEdit::new(
7305 lsp::Range::new(
7306 lsp::Position::new(0, 35),
7307 lsp::Position::new(0, 38),
7308 ),
7309 "THREE".to_string(),
7310 ),
7311 ],
7312 ),
7313 ]
7314 .into_iter()
7315 .collect(),
7316 ),
7317 ..Default::default()
7318 }))
7319 })
7320 .next()
7321 .await
7322 .unwrap();
7323 let mut transaction = response.await.unwrap().0;
7324 assert_eq!(transaction.len(), 2);
7325 assert_eq!(
7326 transaction
7327 .remove_entry(&buffer)
7328 .unwrap()
7329 .0
7330 .read_with(cx, |buffer, _| buffer.text()),
7331 "const THREE: usize = 1;"
7332 );
7333 assert_eq!(
7334 transaction
7335 .into_keys()
7336 .next()
7337 .unwrap()
7338 .read_with(cx, |buffer, _| buffer.text()),
7339 "const TWO: usize = one::THREE + one::THREE;"
7340 );
7341 }
7342
7343 #[gpui::test]
7344 async fn test_search(cx: &mut gpui::TestAppContext) {
7345 let fs = FakeFs::new(cx.background());
7346 fs.insert_tree(
7347 "/dir",
7348 json!({
7349 "one.rs": "const ONE: usize = 1;",
7350 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7351 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7352 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7353 }),
7354 )
7355 .await;
7356 let project = Project::test(fs.clone(), cx);
7357 let (tree, _) = project
7358 .update(cx, |project, cx| {
7359 project.find_or_create_local_worktree("/dir", true, cx)
7360 })
7361 .await
7362 .unwrap();
7363 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7364 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7365 .await;
7366
7367 assert_eq!(
7368 search(&project, SearchQuery::text("TWO", false, true), cx)
7369 .await
7370 .unwrap(),
7371 HashMap::from_iter([
7372 ("two.rs".to_string(), vec![6..9]),
7373 ("three.rs".to_string(), vec![37..40])
7374 ])
7375 );
7376
7377 let buffer_4 = project
7378 .update(cx, |project, cx| {
7379 project.open_buffer((worktree_id, "four.rs"), cx)
7380 })
7381 .await
7382 .unwrap();
7383 buffer_4.update(cx, |buffer, cx| {
7384 buffer.edit([20..28, 31..43], "two::TWO", cx);
7385 });
7386
7387 assert_eq!(
7388 search(&project, SearchQuery::text("TWO", false, true), cx)
7389 .await
7390 .unwrap(),
7391 HashMap::from_iter([
7392 ("two.rs".to_string(), vec![6..9]),
7393 ("three.rs".to_string(), vec![37..40]),
7394 ("four.rs".to_string(), vec![25..28, 36..39])
7395 ])
7396 );
7397
7398 async fn search(
7399 project: &ModelHandle<Project>,
7400 query: SearchQuery,
7401 cx: &mut gpui::TestAppContext,
7402 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7403 let results = project
7404 .update(cx, |project, cx| project.search(query, cx))
7405 .await?;
7406
7407 Ok(results
7408 .into_iter()
7409 .map(|(buffer, ranges)| {
7410 buffer.read_with(cx, |buffer, _| {
7411 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7412 let ranges = ranges
7413 .into_iter()
7414 .map(|range| range.to_offset(buffer))
7415 .collect::<Vec<_>>();
7416 (path, ranges)
7417 })
7418 })
7419 .collect())
7420 }
7421 }
7422}