1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 hash::Hash,
40 mem,
41 ops::Range,
42 path::{Component, Path, PathBuf},
43 rc::Rc,
44 sync::{
45 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
46 Arc,
47 },
48 time::Instant,
49};
50use util::{post_inc, ResultExt, TryFutureExt as _};
51
52pub use fs::*;
53pub use worktree::*;
54
55pub trait Item: Entity {
56 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
57}
58
59pub struct Project {
60 worktrees: Vec<WorktreeHandle>,
61 active_entry: Option<ProjectEntryId>,
62 languages: Arc<LanguageRegistry>,
63 language_servers:
64 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
65 started_language_servers:
66 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
67 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
68 language_server_settings: Arc<Mutex<serde_json::Value>>,
69 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
70 next_language_server_id: usize,
71 client: Arc<client::Client>,
72 next_entry_id: Arc<AtomicUsize>,
73 user_store: ModelHandle<UserStore>,
74 fs: Arc<dyn Fs>,
75 client_state: ProjectClientState,
76 collaborators: HashMap<PeerId, Collaborator>,
77 subscriptions: Vec<client::Subscription>,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136#[derive(Serialize)]
137pub struct LanguageServerStatus {
138 pub name: String,
139 pub pending_work: BTreeMap<String, LanguageServerProgress>,
140 pub pending_diagnostic_updates: isize,
141}
142
143#[derive(Clone, Debug, Serialize)]
144pub struct LanguageServerProgress {
145 pub message: Option<String>,
146 pub percentage: Option<usize>,
147 #[serde(skip_serializing)]
148 pub last_update_at: Instant,
149}
150
151#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
152pub struct ProjectPath {
153 pub worktree_id: WorktreeId,
154 pub path: Arc<Path>,
155}
156
157#[derive(Clone, Debug, Default, PartialEq, Serialize)]
158pub struct DiagnosticSummary {
159 pub error_count: usize,
160 pub warning_count: usize,
161 pub info_count: usize,
162 pub hint_count: usize,
163}
164
165#[derive(Debug)]
166pub struct Location {
167 pub buffer: ModelHandle<Buffer>,
168 pub range: Range<language::Anchor>,
169}
170
171#[derive(Debug)]
172pub struct DocumentHighlight {
173 pub range: Range<language::Anchor>,
174 pub kind: DocumentHighlightKind,
175}
176
177#[derive(Clone, Debug)]
178pub struct Symbol {
179 pub source_worktree_id: WorktreeId,
180 pub worktree_id: WorktreeId,
181 pub language_server_name: LanguageServerName,
182 pub path: PathBuf,
183 pub label: CodeLabel,
184 pub name: String,
185 pub kind: lsp::SymbolKind,
186 pub range: Range<PointUtf16>,
187 pub signature: [u8; 32],
188}
189
190#[derive(Default)]
191pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
192
193impl DiagnosticSummary {
194 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
195 let mut this = Self {
196 error_count: 0,
197 warning_count: 0,
198 info_count: 0,
199 hint_count: 0,
200 };
201
202 for entry in diagnostics {
203 if entry.diagnostic.is_primary {
204 match entry.diagnostic.severity {
205 DiagnosticSeverity::ERROR => this.error_count += 1,
206 DiagnosticSeverity::WARNING => this.warning_count += 1,
207 DiagnosticSeverity::INFORMATION => this.info_count += 1,
208 DiagnosticSeverity::HINT => this.hint_count += 1,
209 _ => {}
210 }
211 }
212 }
213
214 this
215 }
216
217 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
218 proto::DiagnosticSummary {
219 path: path.to_string_lossy().to_string(),
220 error_count: self.error_count as u32,
221 warning_count: self.warning_count as u32,
222 info_count: self.info_count as u32,
223 hint_count: self.hint_count as u32,
224 }
225 }
226}
227
228#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
229pub struct ProjectEntryId(usize);
230
231impl ProjectEntryId {
232 pub fn new(counter: &AtomicUsize) -> Self {
233 Self(counter.fetch_add(1, SeqCst))
234 }
235
236 pub fn from_proto(id: u64) -> Self {
237 Self(id as usize)
238 }
239
240 pub fn to_proto(&self) -> u64 {
241 self.0 as u64
242 }
243
244 pub fn to_usize(&self) -> usize {
245 self.0
246 }
247}
248
249impl Project {
250 pub fn init(client: &Arc<Client>) {
251 client.add_model_message_handler(Self::handle_add_collaborator);
252 client.add_model_message_handler(Self::handle_buffer_reloaded);
253 client.add_model_message_handler(Self::handle_buffer_saved);
254 client.add_model_message_handler(Self::handle_start_language_server);
255 client.add_model_message_handler(Self::handle_update_language_server);
256 client.add_model_message_handler(Self::handle_remove_collaborator);
257 client.add_model_message_handler(Self::handle_register_worktree);
258 client.add_model_message_handler(Self::handle_unregister_worktree);
259 client.add_model_message_handler(Self::handle_unshare_project);
260 client.add_model_message_handler(Self::handle_update_buffer_file);
261 client.add_model_message_handler(Self::handle_update_buffer);
262 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
263 client.add_model_message_handler(Self::handle_update_worktree);
264 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
265 client.add_model_request_handler(Self::handle_apply_code_action);
266 client.add_model_request_handler(Self::handle_reload_buffers);
267 client.add_model_request_handler(Self::handle_format_buffers);
268 client.add_model_request_handler(Self::handle_get_code_actions);
269 client.add_model_request_handler(Self::handle_get_completions);
270 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
271 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
272 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
273 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
274 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
275 client.add_model_request_handler(Self::handle_search_project);
276 client.add_model_request_handler(Self::handle_get_project_symbols);
277 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
278 client.add_model_request_handler(Self::handle_open_buffer_by_id);
279 client.add_model_request_handler(Self::handle_open_buffer_by_path);
280 client.add_model_request_handler(Self::handle_save_buffer);
281 }
282
283 pub fn local(
284 client: Arc<Client>,
285 user_store: ModelHandle<UserStore>,
286 languages: Arc<LanguageRegistry>,
287 fs: Arc<dyn Fs>,
288 cx: &mut MutableAppContext,
289 ) -> ModelHandle<Self> {
290 cx.add_model(|cx: &mut ModelContext<Self>| {
291 let (remote_id_tx, remote_id_rx) = watch::channel();
292 let _maintain_remote_id_task = cx.spawn_weak({
293 let rpc = client.clone();
294 move |this, mut cx| {
295 async move {
296 let mut status = rpc.status();
297 while let Some(status) = status.next().await {
298 if let Some(this) = this.upgrade(&cx) {
299 if status.is_connected() {
300 this.update(&mut cx, |this, cx| this.register(cx)).await?;
301 } else {
302 this.update(&mut cx, |this, cx| this.unregister(cx));
303 }
304 }
305 }
306 Ok(())
307 }
308 .log_err()
309 }
310 });
311
312 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
313 Self {
314 worktrees: Default::default(),
315 collaborators: Default::default(),
316 opened_buffers: Default::default(),
317 shared_buffers: Default::default(),
318 loading_buffers: Default::default(),
319 loading_local_worktrees: Default::default(),
320 buffer_snapshots: Default::default(),
321 client_state: ProjectClientState::Local {
322 is_shared: false,
323 remote_id_tx,
324 remote_id_rx,
325 _maintain_remote_id_task,
326 },
327 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
328 subscriptions: Vec::new(),
329 active_entry: None,
330 languages,
331 client,
332 user_store,
333 fs,
334 next_entry_id: Default::default(),
335 language_servers: Default::default(),
336 started_language_servers: Default::default(),
337 language_server_statuses: Default::default(),
338 last_workspace_edits_by_language_server: Default::default(),
339 language_server_settings: Default::default(),
340 next_language_server_id: 0,
341 nonce: StdRng::from_entropy().gen(),
342 }
343 })
344 }
345
346 pub async fn remote(
347 remote_id: u64,
348 client: Arc<Client>,
349 user_store: ModelHandle<UserStore>,
350 languages: Arc<LanguageRegistry>,
351 fs: Arc<dyn Fs>,
352 cx: &mut AsyncAppContext,
353 ) -> Result<ModelHandle<Self>> {
354 client.authenticate_and_connect(true, &cx).await?;
355
356 let response = client
357 .request(proto::JoinProject {
358 project_id: remote_id,
359 })
360 .await?;
361
362 let replica_id = response.replica_id as ReplicaId;
363
364 let mut worktrees = Vec::new();
365 for worktree in response.worktrees {
366 let (worktree, load_task) = cx
367 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
368 worktrees.push(worktree);
369 load_task.detach();
370 }
371
372 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
373 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
374 let mut this = Self {
375 worktrees: Vec::new(),
376 loading_buffers: Default::default(),
377 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
378 shared_buffers: Default::default(),
379 loading_local_worktrees: Default::default(),
380 active_entry: None,
381 collaborators: Default::default(),
382 languages,
383 user_store: user_store.clone(),
384 fs,
385 next_entry_id: Default::default(),
386 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
387 client: client.clone(),
388 client_state: ProjectClientState::Remote {
389 sharing_has_stopped: false,
390 remote_id,
391 replica_id,
392 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
393 async move {
394 let mut status = client.status();
395 let is_connected =
396 status.next().await.map_or(false, |s| s.is_connected());
397 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
398 if !is_connected || status.next().await.is_some() {
399 if let Some(this) = this.upgrade(&cx) {
400 this.update(&mut cx, |this, cx| this.project_unshared(cx))
401 }
402 }
403 Ok(())
404 }
405 .log_err()
406 }),
407 },
408 language_servers: Default::default(),
409 started_language_servers: Default::default(),
410 language_server_settings: Default::default(),
411 language_server_statuses: response
412 .language_servers
413 .into_iter()
414 .map(|server| {
415 (
416 server.id as usize,
417 LanguageServerStatus {
418 name: server.name,
419 pending_work: Default::default(),
420 pending_diagnostic_updates: 0,
421 },
422 )
423 })
424 .collect(),
425 last_workspace_edits_by_language_server: Default::default(),
426 next_language_server_id: 0,
427 opened_buffers: Default::default(),
428 buffer_snapshots: Default::default(),
429 nonce: StdRng::from_entropy().gen(),
430 };
431 for worktree in worktrees {
432 this.add_worktree(&worktree, cx);
433 }
434 this
435 });
436
437 let user_ids = response
438 .collaborators
439 .iter()
440 .map(|peer| peer.user_id)
441 .collect();
442 user_store
443 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
444 .await?;
445 let mut collaborators = HashMap::default();
446 for message in response.collaborators {
447 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
448 collaborators.insert(collaborator.peer_id, collaborator);
449 }
450
451 this.update(cx, |this, _| {
452 this.collaborators = collaborators;
453 });
454
455 Ok(this)
456 }
457
458 #[cfg(any(test, feature = "test-support"))]
459 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
460 let languages = Arc::new(LanguageRegistry::test());
461 let http_client = client::test::FakeHttpClient::with_404_response();
462 let client = client::Client::new(http_client.clone());
463 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
464 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
465 }
466
467 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
468 self.opened_buffers
469 .get(&remote_id)
470 .and_then(|buffer| buffer.upgrade(cx))
471 }
472
473 pub fn languages(&self) -> &Arc<LanguageRegistry> {
474 &self.languages
475 }
476
477 #[cfg(any(test, feature = "test-support"))]
478 pub fn check_invariants(&self, cx: &AppContext) {
479 if self.is_local() {
480 let mut worktree_root_paths = HashMap::default();
481 for worktree in self.worktrees(cx) {
482 let worktree = worktree.read(cx);
483 let abs_path = worktree.as_local().unwrap().abs_path().clone();
484 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
485 assert_eq!(
486 prev_worktree_id,
487 None,
488 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
489 abs_path,
490 worktree.id(),
491 prev_worktree_id
492 )
493 }
494 } else {
495 let replica_id = self.replica_id();
496 for buffer in self.opened_buffers.values() {
497 if let Some(buffer) = buffer.upgrade(cx) {
498 let buffer = buffer.read(cx);
499 assert_eq!(
500 buffer.deferred_ops_len(),
501 0,
502 "replica {}, buffer {} has deferred operations",
503 replica_id,
504 buffer.remote_id()
505 );
506 }
507 }
508 }
509 }
510
511 #[cfg(any(test, feature = "test-support"))]
512 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
513 let path = path.into();
514 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
515 self.opened_buffers.iter().any(|(_, buffer)| {
516 if let Some(buffer) = buffer.upgrade(cx) {
517 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
518 if file.worktree == worktree && file.path() == &path.path {
519 return true;
520 }
521 }
522 }
523 false
524 })
525 } else {
526 false
527 }
528 }
529
530 pub fn fs(&self) -> &Arc<dyn Fs> {
531 &self.fs
532 }
533
534 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
535 self.unshare(cx);
536 for worktree in &self.worktrees {
537 if let Some(worktree) = worktree.upgrade(cx) {
538 worktree.update(cx, |worktree, _| {
539 worktree.as_local_mut().unwrap().unregister();
540 });
541 }
542 }
543
544 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
545 *remote_id_tx.borrow_mut() = None;
546 }
547
548 self.subscriptions.clear();
549 }
550
551 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
552 self.unregister(cx);
553
554 let response = self.client.request(proto::RegisterProject {});
555 cx.spawn(|this, mut cx| async move {
556 let remote_id = response.await?.project_id;
557
558 let mut registrations = Vec::new();
559 this.update(&mut cx, |this, cx| {
560 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
561 *remote_id_tx.borrow_mut() = Some(remote_id);
562 }
563
564 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
565
566 this.subscriptions
567 .push(this.client.add_model_for_remote_entity(remote_id, cx));
568
569 for worktree in &this.worktrees {
570 if let Some(worktree) = worktree.upgrade(cx) {
571 registrations.push(worktree.update(cx, |worktree, cx| {
572 let worktree = worktree.as_local_mut().unwrap();
573 worktree.register(remote_id, cx)
574 }));
575 }
576 }
577 });
578
579 futures::future::try_join_all(registrations).await?;
580 Ok(())
581 })
582 }
583
584 pub fn remote_id(&self) -> Option<u64> {
585 match &self.client_state {
586 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
587 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
588 }
589 }
590
591 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
592 let mut id = None;
593 let mut watch = None;
594 match &self.client_state {
595 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
596 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
597 }
598
599 async move {
600 if let Some(id) = id {
601 return id;
602 }
603 let mut watch = watch.unwrap();
604 loop {
605 let id = *watch.borrow();
606 if let Some(id) = id {
607 return id;
608 }
609 watch.next().await;
610 }
611 }
612 }
613
614 pub fn replica_id(&self) -> ReplicaId {
615 match &self.client_state {
616 ProjectClientState::Local { .. } => 0,
617 ProjectClientState::Remote { replica_id, .. } => *replica_id,
618 }
619 }
620
621 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
622 &self.collaborators
623 }
624
625 pub fn worktrees<'a>(
626 &'a self,
627 cx: &'a AppContext,
628 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
629 self.worktrees
630 .iter()
631 .filter_map(move |worktree| worktree.upgrade(cx))
632 }
633
634 pub fn visible_worktrees<'a>(
635 &'a self,
636 cx: &'a AppContext,
637 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
638 self.worktrees.iter().filter_map(|worktree| {
639 worktree.upgrade(cx).and_then(|worktree| {
640 if worktree.read(cx).is_visible() {
641 Some(worktree)
642 } else {
643 None
644 }
645 })
646 })
647 }
648
649 pub fn worktree_for_id(
650 &self,
651 id: WorktreeId,
652 cx: &AppContext,
653 ) -> Option<ModelHandle<Worktree>> {
654 self.worktrees(cx)
655 .find(|worktree| worktree.read(cx).id() == id)
656 }
657
658 pub fn worktree_for_entry(
659 &self,
660 entry_id: ProjectEntryId,
661 cx: &AppContext,
662 ) -> Option<ModelHandle<Worktree>> {
663 self.worktrees(cx)
664 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
665 }
666
667 pub fn worktree_id_for_entry(
668 &self,
669 entry_id: ProjectEntryId,
670 cx: &AppContext,
671 ) -> Option<WorktreeId> {
672 self.worktree_for_entry(entry_id, cx)
673 .map(|worktree| worktree.read(cx).id())
674 }
675
676 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
677 let rpc = self.client.clone();
678 cx.spawn(|this, mut cx| async move {
679 let project_id = this.update(&mut cx, |this, cx| {
680 if let ProjectClientState::Local {
681 is_shared,
682 remote_id_rx,
683 ..
684 } = &mut this.client_state
685 {
686 *is_shared = true;
687
688 for open_buffer in this.opened_buffers.values_mut() {
689 match open_buffer {
690 OpenBuffer::Strong(_) => {}
691 OpenBuffer::Weak(buffer) => {
692 if let Some(buffer) = buffer.upgrade(cx) {
693 *open_buffer = OpenBuffer::Strong(buffer);
694 }
695 }
696 OpenBuffer::Loading(_) => unreachable!(),
697 }
698 }
699
700 for worktree_handle in this.worktrees.iter_mut() {
701 match worktree_handle {
702 WorktreeHandle::Strong(_) => {}
703 WorktreeHandle::Weak(worktree) => {
704 if let Some(worktree) = worktree.upgrade(cx) {
705 *worktree_handle = WorktreeHandle::Strong(worktree);
706 }
707 }
708 }
709 }
710
711 remote_id_rx
712 .borrow()
713 .ok_or_else(|| anyhow!("no project id"))
714 } else {
715 Err(anyhow!("can't share a remote project"))
716 }
717 })?;
718
719 rpc.request(proto::ShareProject { project_id }).await?;
720
721 let mut tasks = Vec::new();
722 this.update(&mut cx, |this, cx| {
723 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
724 worktree.update(cx, |worktree, cx| {
725 let worktree = worktree.as_local_mut().unwrap();
726 tasks.push(worktree.share(project_id, cx));
727 });
728 }
729 });
730 for task in tasks {
731 task.await?;
732 }
733 this.update(&mut cx, |_, cx| cx.notify());
734 Ok(())
735 })
736 }
737
738 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
739 let rpc = self.client.clone();
740
741 if let ProjectClientState::Local {
742 is_shared,
743 remote_id_rx,
744 ..
745 } = &mut self.client_state
746 {
747 if !*is_shared {
748 return;
749 }
750
751 *is_shared = false;
752 self.collaborators.clear();
753 self.shared_buffers.clear();
754 for worktree_handle in self.worktrees.iter_mut() {
755 if let WorktreeHandle::Strong(worktree) = worktree_handle {
756 let is_visible = worktree.update(cx, |worktree, _| {
757 worktree.as_local_mut().unwrap().unshare();
758 worktree.is_visible()
759 });
760 if !is_visible {
761 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
762 }
763 }
764 }
765
766 for open_buffer in self.opened_buffers.values_mut() {
767 match open_buffer {
768 OpenBuffer::Strong(buffer) => {
769 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
770 }
771 _ => {}
772 }
773 }
774
775 if let Some(project_id) = *remote_id_rx.borrow() {
776 rpc.send(proto::UnshareProject { project_id }).log_err();
777 }
778
779 cx.notify();
780 } else {
781 log::error!("attempted to unshare a remote project");
782 }
783 }
784
785 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
786 if let ProjectClientState::Remote {
787 sharing_has_stopped,
788 ..
789 } = &mut self.client_state
790 {
791 *sharing_has_stopped = true;
792 self.collaborators.clear();
793 cx.notify();
794 }
795 }
796
797 pub fn is_read_only(&self) -> bool {
798 match &self.client_state {
799 ProjectClientState::Local { .. } => false,
800 ProjectClientState::Remote {
801 sharing_has_stopped,
802 ..
803 } => *sharing_has_stopped,
804 }
805 }
806
807 pub fn is_local(&self) -> bool {
808 match &self.client_state {
809 ProjectClientState::Local { .. } => true,
810 ProjectClientState::Remote { .. } => false,
811 }
812 }
813
814 pub fn is_remote(&self) -> bool {
815 !self.is_local()
816 }
817
818 pub fn create_buffer(
819 &mut self,
820 text: &str,
821 language: Option<Arc<Language>>,
822 cx: &mut ModelContext<Self>,
823 ) -> Result<ModelHandle<Buffer>> {
824 if self.is_remote() {
825 return Err(anyhow!("creating buffers as a guest is not supported yet"));
826 }
827
828 let buffer = cx.add_model(|cx| {
829 Buffer::new(self.replica_id(), text, cx)
830 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
831 });
832 self.register_buffer(&buffer, cx)?;
833 Ok(buffer)
834 }
835
836 pub fn open_path(
837 &mut self,
838 path: impl Into<ProjectPath>,
839 cx: &mut ModelContext<Self>,
840 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
841 let task = self.open_buffer(path, cx);
842 cx.spawn_weak(|_, cx| async move {
843 let buffer = task.await?;
844 let project_entry_id = buffer
845 .read_with(&cx, |buffer, cx| {
846 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
847 })
848 .ok_or_else(|| anyhow!("no project entry"))?;
849 Ok((project_entry_id, buffer.into()))
850 })
851 }
852
853 pub fn open_buffer(
854 &mut self,
855 path: impl Into<ProjectPath>,
856 cx: &mut ModelContext<Self>,
857 ) -> Task<Result<ModelHandle<Buffer>>> {
858 let project_path = path.into();
859 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
860 worktree
861 } else {
862 return Task::ready(Err(anyhow!("no such worktree")));
863 };
864
865 // If there is already a buffer for the given path, then return it.
866 let existing_buffer = self.get_open_buffer(&project_path, cx);
867 if let Some(existing_buffer) = existing_buffer {
868 return Task::ready(Ok(existing_buffer));
869 }
870
871 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
872 // If the given path is already being loaded, then wait for that existing
873 // task to complete and return the same buffer.
874 hash_map::Entry::Occupied(e) => e.get().clone(),
875
876 // Otherwise, record the fact that this path is now being loaded.
877 hash_map::Entry::Vacant(entry) => {
878 let (mut tx, rx) = postage::watch::channel();
879 entry.insert(rx.clone());
880
881 let load_buffer = if worktree.read(cx).is_local() {
882 self.open_local_buffer(&project_path.path, &worktree, cx)
883 } else {
884 self.open_remote_buffer(&project_path.path, &worktree, cx)
885 };
886
887 cx.spawn(move |this, mut cx| async move {
888 let load_result = load_buffer.await;
889 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
890 // Record the fact that the buffer is no longer loading.
891 this.loading_buffers.remove(&project_path);
892 let buffer = load_result.map_err(Arc::new)?;
893 Ok(buffer)
894 }));
895 })
896 .detach();
897 rx
898 }
899 };
900
901 cx.foreground().spawn(async move {
902 loop {
903 if let Some(result) = loading_watch.borrow().as_ref() {
904 match result {
905 Ok(buffer) => return Ok(buffer.clone()),
906 Err(error) => return Err(anyhow!("{}", error)),
907 }
908 }
909 loading_watch.next().await;
910 }
911 })
912 }
913
914 fn open_local_buffer(
915 &mut self,
916 path: &Arc<Path>,
917 worktree: &ModelHandle<Worktree>,
918 cx: &mut ModelContext<Self>,
919 ) -> Task<Result<ModelHandle<Buffer>>> {
920 let load_buffer = worktree.update(cx, |worktree, cx| {
921 let worktree = worktree.as_local_mut().unwrap();
922 worktree.load_buffer(path, cx)
923 });
924 cx.spawn(|this, mut cx| async move {
925 let buffer = load_buffer.await?;
926 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
927 Ok(buffer)
928 })
929 }
930
931 fn open_remote_buffer(
932 &mut self,
933 path: &Arc<Path>,
934 worktree: &ModelHandle<Worktree>,
935 cx: &mut ModelContext<Self>,
936 ) -> Task<Result<ModelHandle<Buffer>>> {
937 let rpc = self.client.clone();
938 let project_id = self.remote_id().unwrap();
939 let remote_worktree_id = worktree.read(cx).id();
940 let path = path.clone();
941 let path_string = path.to_string_lossy().to_string();
942 cx.spawn(|this, mut cx| async move {
943 let response = rpc
944 .request(proto::OpenBufferByPath {
945 project_id,
946 worktree_id: remote_worktree_id.to_proto(),
947 path: path_string,
948 })
949 .await?;
950 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
951 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
952 .await
953 })
954 }
955
956 fn open_local_buffer_via_lsp(
957 &mut self,
958 abs_path: lsp::Url,
959 lsp_adapter: Arc<dyn LspAdapter>,
960 lsp_server: Arc<LanguageServer>,
961 cx: &mut ModelContext<Self>,
962 ) -> Task<Result<ModelHandle<Buffer>>> {
963 cx.spawn(|this, mut cx| async move {
964 let abs_path = abs_path
965 .to_file_path()
966 .map_err(|_| anyhow!("can't convert URI to path"))?;
967 let (worktree, relative_path) = if let Some(result) =
968 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
969 {
970 result
971 } else {
972 let worktree = this
973 .update(&mut cx, |this, cx| {
974 this.create_local_worktree(&abs_path, false, cx)
975 })
976 .await?;
977 this.update(&mut cx, |this, cx| {
978 this.language_servers.insert(
979 (worktree.read(cx).id(), lsp_adapter.name()),
980 (lsp_adapter, lsp_server),
981 );
982 });
983 (worktree, PathBuf::new())
984 };
985
986 let project_path = ProjectPath {
987 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
988 path: relative_path.into(),
989 };
990 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
991 .await
992 })
993 }
994
995 pub fn open_buffer_by_id(
996 &mut self,
997 id: u64,
998 cx: &mut ModelContext<Self>,
999 ) -> Task<Result<ModelHandle<Buffer>>> {
1000 if let Some(buffer) = self.buffer_for_id(id, cx) {
1001 Task::ready(Ok(buffer))
1002 } else if self.is_local() {
1003 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1004 } else if let Some(project_id) = self.remote_id() {
1005 let request = self
1006 .client
1007 .request(proto::OpenBufferById { project_id, id });
1008 cx.spawn(|this, mut cx| async move {
1009 let buffer = request
1010 .await?
1011 .buffer
1012 .ok_or_else(|| anyhow!("invalid buffer"))?;
1013 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1014 .await
1015 })
1016 } else {
1017 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1018 }
1019 }
1020
1021 pub fn save_buffer_as(
1022 &mut self,
1023 buffer: ModelHandle<Buffer>,
1024 abs_path: PathBuf,
1025 cx: &mut ModelContext<Project>,
1026 ) -> Task<Result<()>> {
1027 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1028 let old_path =
1029 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1030 cx.spawn(|this, mut cx| async move {
1031 if let Some(old_path) = old_path {
1032 this.update(&mut cx, |this, cx| {
1033 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1034 });
1035 }
1036 let (worktree, path) = worktree_task.await?;
1037 worktree
1038 .update(&mut cx, |worktree, cx| {
1039 worktree
1040 .as_local_mut()
1041 .unwrap()
1042 .save_buffer_as(buffer.clone(), path, cx)
1043 })
1044 .await?;
1045 this.update(&mut cx, |this, cx| {
1046 this.assign_language_to_buffer(&buffer, cx);
1047 this.register_buffer_with_language_server(&buffer, cx);
1048 });
1049 Ok(())
1050 })
1051 }
1052
1053 pub fn get_open_buffer(
1054 &mut self,
1055 path: &ProjectPath,
1056 cx: &mut ModelContext<Self>,
1057 ) -> Option<ModelHandle<Buffer>> {
1058 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1059 self.opened_buffers.values().find_map(|buffer| {
1060 let buffer = buffer.upgrade(cx)?;
1061 let file = File::from_dyn(buffer.read(cx).file())?;
1062 if file.worktree == worktree && file.path() == &path.path {
1063 Some(buffer)
1064 } else {
1065 None
1066 }
1067 })
1068 }
1069
1070 fn register_buffer(
1071 &mut self,
1072 buffer: &ModelHandle<Buffer>,
1073 cx: &mut ModelContext<Self>,
1074 ) -> Result<()> {
1075 let remote_id = buffer.read(cx).remote_id();
1076 let open_buffer = if self.is_remote() || self.is_shared() {
1077 OpenBuffer::Strong(buffer.clone())
1078 } else {
1079 OpenBuffer::Weak(buffer.downgrade())
1080 };
1081
1082 match self.opened_buffers.insert(remote_id, open_buffer) {
1083 None => {}
1084 Some(OpenBuffer::Loading(operations)) => {
1085 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1086 }
1087 Some(OpenBuffer::Weak(existing_handle)) => {
1088 if existing_handle.upgrade(cx).is_some() {
1089 Err(anyhow!(
1090 "already registered buffer with remote id {}",
1091 remote_id
1092 ))?
1093 }
1094 }
1095 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1096 "already registered buffer with remote id {}",
1097 remote_id
1098 ))?,
1099 }
1100 cx.subscribe(buffer, |this, buffer, event, cx| {
1101 this.on_buffer_event(buffer, event, cx);
1102 })
1103 .detach();
1104
1105 self.assign_language_to_buffer(buffer, cx);
1106 self.register_buffer_with_language_server(buffer, cx);
1107 cx.observe_release(buffer, |this, buffer, cx| {
1108 if let Some(file) = File::from_dyn(buffer.file()) {
1109 if file.is_local() {
1110 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1111 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1112 server
1113 .notify::<lsp::notification::DidCloseTextDocument>(
1114 lsp::DidCloseTextDocumentParams {
1115 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1116 },
1117 )
1118 .log_err();
1119 }
1120 }
1121 }
1122 })
1123 .detach();
1124
1125 Ok(())
1126 }
1127
1128 fn register_buffer_with_language_server(
1129 &mut self,
1130 buffer_handle: &ModelHandle<Buffer>,
1131 cx: &mut ModelContext<Self>,
1132 ) {
1133 let buffer = buffer_handle.read(cx);
1134 let buffer_id = buffer.remote_id();
1135 if let Some(file) = File::from_dyn(buffer.file()) {
1136 if file.is_local() {
1137 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1138 let initial_snapshot = buffer.text_snapshot();
1139 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1140
1141 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1142 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1143 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1144 .log_err();
1145 }
1146 }
1147
1148 if let Some((_, server)) = language_server {
1149 server
1150 .notify::<lsp::notification::DidOpenTextDocument>(
1151 lsp::DidOpenTextDocumentParams {
1152 text_document: lsp::TextDocumentItem::new(
1153 uri,
1154 Default::default(),
1155 0,
1156 initial_snapshot.text(),
1157 ),
1158 }
1159 .clone(),
1160 )
1161 .log_err();
1162 buffer_handle.update(cx, |buffer, cx| {
1163 buffer.set_completion_triggers(
1164 server
1165 .capabilities()
1166 .completion_provider
1167 .as_ref()
1168 .and_then(|provider| provider.trigger_characters.clone())
1169 .unwrap_or(Vec::new()),
1170 cx,
1171 )
1172 });
1173 self.buffer_snapshots
1174 .insert(buffer_id, vec![(0, initial_snapshot)]);
1175 }
1176 }
1177 }
1178 }
1179
1180 fn unregister_buffer_from_language_server(
1181 &mut self,
1182 buffer: &ModelHandle<Buffer>,
1183 old_path: PathBuf,
1184 cx: &mut ModelContext<Self>,
1185 ) {
1186 buffer.update(cx, |buffer, cx| {
1187 buffer.update_diagnostics(Default::default(), cx);
1188 self.buffer_snapshots.remove(&buffer.remote_id());
1189 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1190 language_server
1191 .notify::<lsp::notification::DidCloseTextDocument>(
1192 lsp::DidCloseTextDocumentParams {
1193 text_document: lsp::TextDocumentIdentifier::new(
1194 lsp::Url::from_file_path(old_path).unwrap(),
1195 ),
1196 },
1197 )
1198 .log_err();
1199 }
1200 });
1201 }
1202
1203 fn on_buffer_event(
1204 &mut self,
1205 buffer: ModelHandle<Buffer>,
1206 event: &BufferEvent,
1207 cx: &mut ModelContext<Self>,
1208 ) -> Option<()> {
1209 match event {
1210 BufferEvent::Operation(operation) => {
1211 let project_id = self.remote_id()?;
1212 let request = self.client.request(proto::UpdateBuffer {
1213 project_id,
1214 buffer_id: buffer.read(cx).remote_id(),
1215 operations: vec![language::proto::serialize_operation(&operation)],
1216 });
1217 cx.background().spawn(request).detach_and_log_err(cx);
1218 }
1219 BufferEvent::Edited { .. } => {
1220 let (_, language_server) = self
1221 .language_server_for_buffer(buffer.read(cx), cx)?
1222 .clone();
1223 let buffer = buffer.read(cx);
1224 let file = File::from_dyn(buffer.file())?;
1225 let abs_path = file.as_local()?.abs_path(cx);
1226 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1227 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1228 let (version, prev_snapshot) = buffer_snapshots.last()?;
1229 let next_snapshot = buffer.text_snapshot();
1230 let next_version = version + 1;
1231
1232 let content_changes = buffer
1233 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1234 .map(|edit| {
1235 let edit_start = edit.new.start.0;
1236 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1237 let new_text = next_snapshot
1238 .text_for_range(edit.new.start.1..edit.new.end.1)
1239 .collect();
1240 lsp::TextDocumentContentChangeEvent {
1241 range: Some(lsp::Range::new(
1242 point_to_lsp(edit_start),
1243 point_to_lsp(edit_end),
1244 )),
1245 range_length: None,
1246 text: new_text,
1247 }
1248 })
1249 .collect();
1250
1251 buffer_snapshots.push((next_version, next_snapshot));
1252
1253 language_server
1254 .notify::<lsp::notification::DidChangeTextDocument>(
1255 lsp::DidChangeTextDocumentParams {
1256 text_document: lsp::VersionedTextDocumentIdentifier::new(
1257 uri,
1258 next_version,
1259 ),
1260 content_changes,
1261 },
1262 )
1263 .log_err();
1264 }
1265 BufferEvent::Saved => {
1266 let file = File::from_dyn(buffer.read(cx).file())?;
1267 let worktree_id = file.worktree_id(cx);
1268 let abs_path = file.as_local()?.abs_path(cx);
1269 let text_document = lsp::TextDocumentIdentifier {
1270 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1271 };
1272
1273 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1274 server
1275 .notify::<lsp::notification::DidSaveTextDocument>(
1276 lsp::DidSaveTextDocumentParams {
1277 text_document: text_document.clone(),
1278 text: None,
1279 },
1280 )
1281 .log_err();
1282 }
1283 }
1284 _ => {}
1285 }
1286
1287 None
1288 }
1289
1290 fn language_servers_for_worktree(
1291 &self,
1292 worktree_id: WorktreeId,
1293 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1294 self.language_servers.iter().filter_map(
1295 move |((language_server_worktree_id, _), server)| {
1296 if *language_server_worktree_id == worktree_id {
1297 Some(server)
1298 } else {
1299 None
1300 }
1301 },
1302 )
1303 }
1304
1305 fn assign_language_to_buffer(
1306 &mut self,
1307 buffer: &ModelHandle<Buffer>,
1308 cx: &mut ModelContext<Self>,
1309 ) -> Option<()> {
1310 // If the buffer has a language, set it and start the language server if we haven't already.
1311 let full_path = buffer.read(cx).file()?.full_path(cx);
1312 let language = self.languages.select_language(&full_path)?;
1313 buffer.update(cx, |buffer, cx| {
1314 buffer.set_language(Some(language.clone()), cx);
1315 });
1316
1317 let file = File::from_dyn(buffer.read(cx).file())?;
1318 let worktree = file.worktree.read(cx).as_local()?;
1319 let worktree_id = worktree.id();
1320 let worktree_abs_path = worktree.abs_path().clone();
1321 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1322
1323 None
1324 }
1325
1326 fn start_language_server(
1327 &mut self,
1328 worktree_id: WorktreeId,
1329 worktree_path: Arc<Path>,
1330 language: Arc<Language>,
1331 cx: &mut ModelContext<Self>,
1332 ) {
1333 let adapter = if let Some(adapter) = language.lsp_adapter() {
1334 adapter
1335 } else {
1336 return;
1337 };
1338 let key = (worktree_id, adapter.name());
1339 self.started_language_servers
1340 .entry(key.clone())
1341 .or_insert_with(|| {
1342 let server_id = post_inc(&mut self.next_language_server_id);
1343 let language_server = self.languages.start_language_server(
1344 server_id,
1345 language.clone(),
1346 worktree_path,
1347 self.client.http_client(),
1348 cx,
1349 );
1350 cx.spawn_weak(|this, mut cx| async move {
1351 let language_server = language_server?.await.log_err()?;
1352 let language_server = language_server
1353 .initialize(adapter.initialization_options())
1354 .await
1355 .log_err()?;
1356 let this = this.upgrade(&cx)?;
1357 let disk_based_diagnostics_progress_token =
1358 adapter.disk_based_diagnostics_progress_token();
1359
1360 language_server
1361 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1362 let this = this.downgrade();
1363 let adapter = adapter.clone();
1364 move |params, mut cx| {
1365 if let Some(this) = this.upgrade(&cx) {
1366 this.update(&mut cx, |this, cx| {
1367 this.on_lsp_diagnostics_published(
1368 server_id,
1369 params,
1370 &adapter,
1371 disk_based_diagnostics_progress_token,
1372 cx,
1373 );
1374 });
1375 }
1376 }
1377 })
1378 .detach();
1379
1380 language_server
1381 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1382 let settings = this
1383 .read_with(&cx, |this, _| this.language_server_settings.clone());
1384 move |params, _| {
1385 let settings = settings.lock().clone();
1386 async move {
1387 Ok(params
1388 .items
1389 .into_iter()
1390 .map(|item| {
1391 if let Some(section) = &item.section {
1392 settings
1393 .get(section)
1394 .cloned()
1395 .unwrap_or(serde_json::Value::Null)
1396 } else {
1397 settings.clone()
1398 }
1399 })
1400 .collect())
1401 }
1402 }
1403 })
1404 .detach();
1405
1406 language_server
1407 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1408 let this = this.downgrade();
1409 let adapter = adapter.clone();
1410 let language_server = language_server.clone();
1411 move |params, cx| {
1412 Self::on_lsp_workspace_edit(
1413 this,
1414 params,
1415 server_id,
1416 adapter.clone(),
1417 language_server.clone(),
1418 cx,
1419 )
1420 }
1421 })
1422 .detach();
1423
1424 language_server
1425 .on_notification::<lsp::notification::Progress, _>({
1426 let this = this.downgrade();
1427 move |params, mut cx| {
1428 if let Some(this) = this.upgrade(&cx) {
1429 this.update(&mut cx, |this, cx| {
1430 this.on_lsp_progress(
1431 params,
1432 server_id,
1433 disk_based_diagnostics_progress_token,
1434 cx,
1435 );
1436 });
1437 }
1438 }
1439 })
1440 .detach();
1441
1442 this.update(&mut cx, |this, cx| {
1443 this.language_servers
1444 .insert(key.clone(), (adapter, language_server.clone()));
1445 this.language_server_statuses.insert(
1446 server_id,
1447 LanguageServerStatus {
1448 name: language_server.name().to_string(),
1449 pending_work: Default::default(),
1450 pending_diagnostic_updates: 0,
1451 },
1452 );
1453 language_server
1454 .notify::<lsp::notification::DidChangeConfiguration>(
1455 lsp::DidChangeConfigurationParams {
1456 settings: this.language_server_settings.lock().clone(),
1457 },
1458 )
1459 .ok();
1460
1461 if let Some(project_id) = this.remote_id() {
1462 this.client
1463 .send(proto::StartLanguageServer {
1464 project_id,
1465 server: Some(proto::LanguageServer {
1466 id: server_id as u64,
1467 name: language_server.name().to_string(),
1468 }),
1469 })
1470 .log_err();
1471 }
1472
1473 // Tell the language server about every open buffer in the worktree that matches the language.
1474 for buffer in this.opened_buffers.values() {
1475 if let Some(buffer_handle) = buffer.upgrade(cx) {
1476 let buffer = buffer_handle.read(cx);
1477 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1478 file
1479 } else {
1480 continue;
1481 };
1482 let language = if let Some(language) = buffer.language() {
1483 language
1484 } else {
1485 continue;
1486 };
1487 if file.worktree.read(cx).id() != key.0
1488 || language.lsp_adapter().map(|a| a.name())
1489 != Some(key.1.clone())
1490 {
1491 continue;
1492 }
1493
1494 let file = file.as_local()?;
1495 let versions = this
1496 .buffer_snapshots
1497 .entry(buffer.remote_id())
1498 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1499 let (version, initial_snapshot) = versions.last().unwrap();
1500 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1501 language_server
1502 .notify::<lsp::notification::DidOpenTextDocument>(
1503 lsp::DidOpenTextDocumentParams {
1504 text_document: lsp::TextDocumentItem::new(
1505 uri,
1506 Default::default(),
1507 *version,
1508 initial_snapshot.text(),
1509 ),
1510 },
1511 )
1512 .log_err()?;
1513 buffer_handle.update(cx, |buffer, cx| {
1514 buffer.set_completion_triggers(
1515 language_server
1516 .capabilities()
1517 .completion_provider
1518 .as_ref()
1519 .and_then(|provider| {
1520 provider.trigger_characters.clone()
1521 })
1522 .unwrap_or(Vec::new()),
1523 cx,
1524 )
1525 });
1526 }
1527 }
1528
1529 cx.notify();
1530 Some(())
1531 });
1532
1533 Some(language_server)
1534 })
1535 });
1536 }
1537
1538 pub fn restart_language_servers_for_buffers(
1539 &mut self,
1540 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1541 cx: &mut ModelContext<Self>,
1542 ) -> Option<()> {
1543 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1544 .into_iter()
1545 .filter_map(|buffer| {
1546 let file = File::from_dyn(buffer.read(cx).file())?;
1547 let worktree = file.worktree.read(cx).as_local()?;
1548 let worktree_id = worktree.id();
1549 let worktree_abs_path = worktree.abs_path().clone();
1550 let full_path = file.full_path(cx);
1551 Some((worktree_id, worktree_abs_path, full_path))
1552 })
1553 .collect();
1554 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1555 let language = self.languages.select_language(&full_path)?;
1556 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1557 }
1558
1559 None
1560 }
1561
1562 fn restart_language_server(
1563 &mut self,
1564 worktree_id: WorktreeId,
1565 worktree_path: Arc<Path>,
1566 language: Arc<Language>,
1567 cx: &mut ModelContext<Self>,
1568 ) {
1569 let adapter = if let Some(adapter) = language.lsp_adapter() {
1570 adapter
1571 } else {
1572 return;
1573 };
1574 let key = (worktree_id, adapter.name());
1575 let server_to_shutdown = self.language_servers.remove(&key);
1576 self.started_language_servers.remove(&key);
1577 server_to_shutdown
1578 .as_ref()
1579 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1580 cx.spawn_weak(|this, mut cx| async move {
1581 if let Some(this) = this.upgrade(&cx) {
1582 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1583 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1584 shutdown_task.await;
1585 }
1586 }
1587
1588 this.update(&mut cx, |this, cx| {
1589 this.start_language_server(worktree_id, worktree_path, language, cx);
1590 });
1591 }
1592 })
1593 .detach();
1594 }
1595
1596 fn on_lsp_diagnostics_published(
1597 &mut self,
1598 server_id: usize,
1599 mut params: lsp::PublishDiagnosticsParams,
1600 adapter: &Arc<dyn LspAdapter>,
1601 disk_based_diagnostics_progress_token: Option<&str>,
1602 cx: &mut ModelContext<Self>,
1603 ) {
1604 adapter.process_diagnostics(&mut params);
1605 if disk_based_diagnostics_progress_token.is_none() {
1606 self.disk_based_diagnostics_started(cx);
1607 self.broadcast_language_server_update(
1608 server_id,
1609 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1610 proto::LspDiskBasedDiagnosticsUpdating {},
1611 ),
1612 );
1613 }
1614 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1615 .log_err();
1616 if disk_based_diagnostics_progress_token.is_none() {
1617 self.disk_based_diagnostics_finished(cx);
1618 self.broadcast_language_server_update(
1619 server_id,
1620 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1621 proto::LspDiskBasedDiagnosticsUpdated {},
1622 ),
1623 );
1624 }
1625 }
1626
1627 fn on_lsp_progress(
1628 &mut self,
1629 progress: lsp::ProgressParams,
1630 server_id: usize,
1631 disk_based_diagnostics_progress_token: Option<&str>,
1632 cx: &mut ModelContext<Self>,
1633 ) {
1634 let token = match progress.token {
1635 lsp::NumberOrString::String(token) => token,
1636 lsp::NumberOrString::Number(token) => {
1637 log::info!("skipping numeric progress token {}", token);
1638 return;
1639 }
1640 };
1641 let progress = match progress.value {
1642 lsp::ProgressParamsValue::WorkDone(value) => value,
1643 };
1644 let language_server_status =
1645 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1646 status
1647 } else {
1648 return;
1649 };
1650 match progress {
1651 lsp::WorkDoneProgress::Begin(_) => {
1652 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1653 language_server_status.pending_diagnostic_updates += 1;
1654 if language_server_status.pending_diagnostic_updates == 1 {
1655 self.disk_based_diagnostics_started(cx);
1656 self.broadcast_language_server_update(
1657 server_id,
1658 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1659 proto::LspDiskBasedDiagnosticsUpdating {},
1660 ),
1661 );
1662 }
1663 } else {
1664 self.on_lsp_work_start(server_id, token.clone(), cx);
1665 self.broadcast_language_server_update(
1666 server_id,
1667 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1668 token,
1669 }),
1670 );
1671 }
1672 }
1673 lsp::WorkDoneProgress::Report(report) => {
1674 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1675 self.on_lsp_work_progress(
1676 server_id,
1677 token.clone(),
1678 LanguageServerProgress {
1679 message: report.message.clone(),
1680 percentage: report.percentage.map(|p| p as usize),
1681 last_update_at: Instant::now(),
1682 },
1683 cx,
1684 );
1685 self.broadcast_language_server_update(
1686 server_id,
1687 proto::update_language_server::Variant::WorkProgress(
1688 proto::LspWorkProgress {
1689 token,
1690 message: report.message,
1691 percentage: report.percentage.map(|p| p as u32),
1692 },
1693 ),
1694 );
1695 }
1696 }
1697 lsp::WorkDoneProgress::End(_) => {
1698 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1699 language_server_status.pending_diagnostic_updates -= 1;
1700 if language_server_status.pending_diagnostic_updates == 0 {
1701 self.disk_based_diagnostics_finished(cx);
1702 self.broadcast_language_server_update(
1703 server_id,
1704 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1705 proto::LspDiskBasedDiagnosticsUpdated {},
1706 ),
1707 );
1708 }
1709 } else {
1710 self.on_lsp_work_end(server_id, token.clone(), cx);
1711 self.broadcast_language_server_update(
1712 server_id,
1713 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1714 token,
1715 }),
1716 );
1717 }
1718 }
1719 }
1720 }
1721
1722 fn on_lsp_work_start(
1723 &mut self,
1724 language_server_id: usize,
1725 token: String,
1726 cx: &mut ModelContext<Self>,
1727 ) {
1728 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1729 status.pending_work.insert(
1730 token,
1731 LanguageServerProgress {
1732 message: None,
1733 percentage: None,
1734 last_update_at: Instant::now(),
1735 },
1736 );
1737 cx.notify();
1738 }
1739 }
1740
1741 fn on_lsp_work_progress(
1742 &mut self,
1743 language_server_id: usize,
1744 token: String,
1745 progress: LanguageServerProgress,
1746 cx: &mut ModelContext<Self>,
1747 ) {
1748 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1749 status.pending_work.insert(token, progress);
1750 cx.notify();
1751 }
1752 }
1753
1754 fn on_lsp_work_end(
1755 &mut self,
1756 language_server_id: usize,
1757 token: String,
1758 cx: &mut ModelContext<Self>,
1759 ) {
1760 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1761 status.pending_work.remove(&token);
1762 cx.notify();
1763 }
1764 }
1765
1766 async fn on_lsp_workspace_edit(
1767 this: WeakModelHandle<Self>,
1768 params: lsp::ApplyWorkspaceEditParams,
1769 server_id: usize,
1770 adapter: Arc<dyn LspAdapter>,
1771 language_server: Arc<LanguageServer>,
1772 mut cx: AsyncAppContext,
1773 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1774 let this = this
1775 .upgrade(&cx)
1776 .ok_or_else(|| anyhow!("project project closed"))?;
1777 let transaction = Self::deserialize_workspace_edit(
1778 this.clone(),
1779 params.edit,
1780 true,
1781 adapter.clone(),
1782 language_server.clone(),
1783 &mut cx,
1784 )
1785 .await
1786 .log_err();
1787 this.update(&mut cx, |this, _| {
1788 if let Some(transaction) = transaction {
1789 this.last_workspace_edits_by_language_server
1790 .insert(server_id, transaction);
1791 }
1792 });
1793 Ok(lsp::ApplyWorkspaceEditResponse {
1794 applied: true,
1795 failed_change: None,
1796 failure_reason: None,
1797 })
1798 }
1799
1800 fn broadcast_language_server_update(
1801 &self,
1802 language_server_id: usize,
1803 event: proto::update_language_server::Variant,
1804 ) {
1805 if let Some(project_id) = self.remote_id() {
1806 self.client
1807 .send(proto::UpdateLanguageServer {
1808 project_id,
1809 language_server_id: language_server_id as u64,
1810 variant: Some(event),
1811 })
1812 .log_err();
1813 }
1814 }
1815
1816 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1817 for (_, server) in self.language_servers.values() {
1818 server
1819 .notify::<lsp::notification::DidChangeConfiguration>(
1820 lsp::DidChangeConfigurationParams {
1821 settings: settings.clone(),
1822 },
1823 )
1824 .ok();
1825 }
1826 *self.language_server_settings.lock() = settings;
1827 }
1828
1829 pub fn language_server_statuses(
1830 &self,
1831 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1832 self.language_server_statuses.values()
1833 }
1834
1835 pub fn update_diagnostics(
1836 &mut self,
1837 params: lsp::PublishDiagnosticsParams,
1838 disk_based_sources: &[&str],
1839 cx: &mut ModelContext<Self>,
1840 ) -> Result<()> {
1841 let abs_path = params
1842 .uri
1843 .to_file_path()
1844 .map_err(|_| anyhow!("URI is not a file"))?;
1845 let mut next_group_id = 0;
1846 let mut diagnostics = Vec::default();
1847 let mut primary_diagnostic_group_ids = HashMap::default();
1848 let mut sources_by_group_id = HashMap::default();
1849 let mut supporting_diagnostics = HashMap::default();
1850 for diagnostic in ¶ms.diagnostics {
1851 let source = diagnostic.source.as_ref();
1852 let code = diagnostic.code.as_ref().map(|code| match code {
1853 lsp::NumberOrString::Number(code) => code.to_string(),
1854 lsp::NumberOrString::String(code) => code.clone(),
1855 });
1856 let range = range_from_lsp(diagnostic.range);
1857 let is_supporting = diagnostic
1858 .related_information
1859 .as_ref()
1860 .map_or(false, |infos| {
1861 infos.iter().any(|info| {
1862 primary_diagnostic_group_ids.contains_key(&(
1863 source,
1864 code.clone(),
1865 range_from_lsp(info.location.range),
1866 ))
1867 })
1868 });
1869
1870 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1871 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1872 });
1873
1874 if is_supporting {
1875 supporting_diagnostics.insert(
1876 (source, code.clone(), range),
1877 (diagnostic.severity, is_unnecessary),
1878 );
1879 } else {
1880 let group_id = post_inc(&mut next_group_id);
1881 let is_disk_based = source.map_or(false, |source| {
1882 disk_based_sources.contains(&source.as_str())
1883 });
1884
1885 sources_by_group_id.insert(group_id, source);
1886 primary_diagnostic_group_ids
1887 .insert((source, code.clone(), range.clone()), group_id);
1888
1889 diagnostics.push(DiagnosticEntry {
1890 range,
1891 diagnostic: Diagnostic {
1892 code: code.clone(),
1893 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1894 message: diagnostic.message.clone(),
1895 group_id,
1896 is_primary: true,
1897 is_valid: true,
1898 is_disk_based,
1899 is_unnecessary,
1900 },
1901 });
1902 if let Some(infos) = &diagnostic.related_information {
1903 for info in infos {
1904 if info.location.uri == params.uri && !info.message.is_empty() {
1905 let range = range_from_lsp(info.location.range);
1906 diagnostics.push(DiagnosticEntry {
1907 range,
1908 diagnostic: Diagnostic {
1909 code: code.clone(),
1910 severity: DiagnosticSeverity::INFORMATION,
1911 message: info.message.clone(),
1912 group_id,
1913 is_primary: false,
1914 is_valid: true,
1915 is_disk_based,
1916 is_unnecessary: false,
1917 },
1918 });
1919 }
1920 }
1921 }
1922 }
1923 }
1924
1925 for entry in &mut diagnostics {
1926 let diagnostic = &mut entry.diagnostic;
1927 if !diagnostic.is_primary {
1928 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1929 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1930 source,
1931 diagnostic.code.clone(),
1932 entry.range.clone(),
1933 )) {
1934 if let Some(severity) = severity {
1935 diagnostic.severity = severity;
1936 }
1937 diagnostic.is_unnecessary = is_unnecessary;
1938 }
1939 }
1940 }
1941
1942 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1943 Ok(())
1944 }
1945
1946 pub fn update_diagnostic_entries(
1947 &mut self,
1948 abs_path: PathBuf,
1949 version: Option<i32>,
1950 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1951 cx: &mut ModelContext<Project>,
1952 ) -> Result<(), anyhow::Error> {
1953 let (worktree, relative_path) = self
1954 .find_local_worktree(&abs_path, cx)
1955 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1956 if !worktree.read(cx).is_visible() {
1957 return Ok(());
1958 }
1959
1960 let project_path = ProjectPath {
1961 worktree_id: worktree.read(cx).id(),
1962 path: relative_path.into(),
1963 };
1964
1965 for buffer in self.opened_buffers.values() {
1966 if let Some(buffer) = buffer.upgrade(cx) {
1967 if buffer
1968 .read(cx)
1969 .file()
1970 .map_or(false, |file| *file.path() == project_path.path)
1971 {
1972 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1973 break;
1974 }
1975 }
1976 }
1977 worktree.update(cx, |worktree, cx| {
1978 worktree
1979 .as_local_mut()
1980 .ok_or_else(|| anyhow!("not a local worktree"))?
1981 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1982 })?;
1983 cx.emit(Event::DiagnosticsUpdated(project_path));
1984 Ok(())
1985 }
1986
1987 fn update_buffer_diagnostics(
1988 &mut self,
1989 buffer: &ModelHandle<Buffer>,
1990 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1991 version: Option<i32>,
1992 cx: &mut ModelContext<Self>,
1993 ) -> Result<()> {
1994 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1995 Ordering::Equal
1996 .then_with(|| b.is_primary.cmp(&a.is_primary))
1997 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1998 .then_with(|| a.severity.cmp(&b.severity))
1999 .then_with(|| a.message.cmp(&b.message))
2000 }
2001
2002 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2003
2004 diagnostics.sort_unstable_by(|a, b| {
2005 Ordering::Equal
2006 .then_with(|| a.range.start.cmp(&b.range.start))
2007 .then_with(|| b.range.end.cmp(&a.range.end))
2008 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2009 });
2010
2011 let mut sanitized_diagnostics = Vec::new();
2012 let edits_since_save = Patch::new(
2013 snapshot
2014 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2015 .collect(),
2016 );
2017 for entry in diagnostics {
2018 let start;
2019 let end;
2020 if entry.diagnostic.is_disk_based {
2021 // Some diagnostics are based on files on disk instead of buffers'
2022 // current contents. Adjust these diagnostics' ranges to reflect
2023 // any unsaved edits.
2024 start = edits_since_save.old_to_new(entry.range.start);
2025 end = edits_since_save.old_to_new(entry.range.end);
2026 } else {
2027 start = entry.range.start;
2028 end = entry.range.end;
2029 }
2030
2031 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2032 ..snapshot.clip_point_utf16(end, Bias::Right);
2033
2034 // Expand empty ranges by one character
2035 if range.start == range.end {
2036 range.end.column += 1;
2037 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2038 if range.start == range.end && range.end.column > 0 {
2039 range.start.column -= 1;
2040 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2041 }
2042 }
2043
2044 sanitized_diagnostics.push(DiagnosticEntry {
2045 range,
2046 diagnostic: entry.diagnostic,
2047 });
2048 }
2049 drop(edits_since_save);
2050
2051 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2052 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2053 Ok(())
2054 }
2055
2056 pub fn reload_buffers(
2057 &self,
2058 buffers: HashSet<ModelHandle<Buffer>>,
2059 push_to_history: bool,
2060 cx: &mut ModelContext<Self>,
2061 ) -> Task<Result<ProjectTransaction>> {
2062 let mut local_buffers = Vec::new();
2063 let mut remote_buffers = None;
2064 for buffer_handle in buffers {
2065 let buffer = buffer_handle.read(cx);
2066 if buffer.is_dirty() {
2067 if let Some(file) = File::from_dyn(buffer.file()) {
2068 if file.is_local() {
2069 local_buffers.push(buffer_handle);
2070 } else {
2071 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2072 }
2073 }
2074 }
2075 }
2076
2077 let remote_buffers = self.remote_id().zip(remote_buffers);
2078 let client = self.client.clone();
2079
2080 cx.spawn(|this, mut cx| async move {
2081 let mut project_transaction = ProjectTransaction::default();
2082
2083 if let Some((project_id, remote_buffers)) = remote_buffers {
2084 let response = client
2085 .request(proto::ReloadBuffers {
2086 project_id,
2087 buffer_ids: remote_buffers
2088 .iter()
2089 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2090 .collect(),
2091 })
2092 .await?
2093 .transaction
2094 .ok_or_else(|| anyhow!("missing transaction"))?;
2095 project_transaction = this
2096 .update(&mut cx, |this, cx| {
2097 this.deserialize_project_transaction(response, push_to_history, cx)
2098 })
2099 .await?;
2100 }
2101
2102 for buffer in local_buffers {
2103 let transaction = buffer
2104 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2105 .await?;
2106 buffer.update(&mut cx, |buffer, cx| {
2107 if let Some(transaction) = transaction {
2108 if !push_to_history {
2109 buffer.forget_transaction(transaction.id);
2110 }
2111 project_transaction.0.insert(cx.handle(), transaction);
2112 }
2113 });
2114 }
2115
2116 Ok(project_transaction)
2117 })
2118 }
2119
2120 pub fn format(
2121 &self,
2122 buffers: HashSet<ModelHandle<Buffer>>,
2123 push_to_history: bool,
2124 cx: &mut ModelContext<Project>,
2125 ) -> Task<Result<ProjectTransaction>> {
2126 let mut local_buffers = Vec::new();
2127 let mut remote_buffers = None;
2128 for buffer_handle in buffers {
2129 let buffer = buffer_handle.read(cx);
2130 if let Some(file) = File::from_dyn(buffer.file()) {
2131 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2132 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2133 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2134 }
2135 } else {
2136 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2137 }
2138 } else {
2139 return Task::ready(Ok(Default::default()));
2140 }
2141 }
2142
2143 let remote_buffers = self.remote_id().zip(remote_buffers);
2144 let client = self.client.clone();
2145
2146 cx.spawn(|this, mut cx| async move {
2147 let mut project_transaction = ProjectTransaction::default();
2148
2149 if let Some((project_id, remote_buffers)) = remote_buffers {
2150 let response = client
2151 .request(proto::FormatBuffers {
2152 project_id,
2153 buffer_ids: remote_buffers
2154 .iter()
2155 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2156 .collect(),
2157 })
2158 .await?
2159 .transaction
2160 .ok_or_else(|| anyhow!("missing transaction"))?;
2161 project_transaction = this
2162 .update(&mut cx, |this, cx| {
2163 this.deserialize_project_transaction(response, push_to_history, cx)
2164 })
2165 .await?;
2166 }
2167
2168 for (buffer, buffer_abs_path, language_server) in local_buffers {
2169 let text_document = lsp::TextDocumentIdentifier::new(
2170 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2171 );
2172 let capabilities = &language_server.capabilities();
2173 let tab_size = cx.update(|cx| {
2174 let language_name = buffer.read(cx).language().map(|language| language.name());
2175 cx.global::<Settings>().tab_size(language_name.as_deref())
2176 });
2177 let lsp_edits = if capabilities
2178 .document_formatting_provider
2179 .as_ref()
2180 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2181 {
2182 language_server
2183 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2184 text_document,
2185 options: lsp::FormattingOptions {
2186 tab_size,
2187 insert_spaces: true,
2188 insert_final_newline: Some(true),
2189 ..Default::default()
2190 },
2191 work_done_progress_params: Default::default(),
2192 })
2193 .await?
2194 } else if capabilities
2195 .document_range_formatting_provider
2196 .as_ref()
2197 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2198 {
2199 let buffer_start = lsp::Position::new(0, 0);
2200 let buffer_end =
2201 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2202 language_server
2203 .request::<lsp::request::RangeFormatting>(
2204 lsp::DocumentRangeFormattingParams {
2205 text_document,
2206 range: lsp::Range::new(buffer_start, buffer_end),
2207 options: lsp::FormattingOptions {
2208 tab_size: 4,
2209 insert_spaces: true,
2210 insert_final_newline: Some(true),
2211 ..Default::default()
2212 },
2213 work_done_progress_params: Default::default(),
2214 },
2215 )
2216 .await?
2217 } else {
2218 continue;
2219 };
2220
2221 if let Some(lsp_edits) = lsp_edits {
2222 let edits = this
2223 .update(&mut cx, |this, cx| {
2224 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2225 })
2226 .await?;
2227 buffer.update(&mut cx, |buffer, cx| {
2228 buffer.finalize_last_transaction();
2229 buffer.start_transaction();
2230 for (range, text) in edits {
2231 buffer.edit([range], text, cx);
2232 }
2233 if buffer.end_transaction(cx).is_some() {
2234 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2235 if !push_to_history {
2236 buffer.forget_transaction(transaction.id);
2237 }
2238 project_transaction.0.insert(cx.handle(), transaction);
2239 }
2240 });
2241 }
2242 }
2243
2244 Ok(project_transaction)
2245 })
2246 }
2247
2248 pub fn definition<T: ToPointUtf16>(
2249 &self,
2250 buffer: &ModelHandle<Buffer>,
2251 position: T,
2252 cx: &mut ModelContext<Self>,
2253 ) -> Task<Result<Vec<Location>>> {
2254 let position = position.to_point_utf16(buffer.read(cx));
2255 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2256 }
2257
2258 pub fn references<T: ToPointUtf16>(
2259 &self,
2260 buffer: &ModelHandle<Buffer>,
2261 position: T,
2262 cx: &mut ModelContext<Self>,
2263 ) -> Task<Result<Vec<Location>>> {
2264 let position = position.to_point_utf16(buffer.read(cx));
2265 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2266 }
2267
2268 pub fn document_highlights<T: ToPointUtf16>(
2269 &self,
2270 buffer: &ModelHandle<Buffer>,
2271 position: T,
2272 cx: &mut ModelContext<Self>,
2273 ) -> Task<Result<Vec<DocumentHighlight>>> {
2274 let position = position.to_point_utf16(buffer.read(cx));
2275
2276 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2277 }
2278
2279 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2280 if self.is_local() {
2281 let mut language_servers = HashMap::default();
2282 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2283 if let Some(worktree) = self
2284 .worktree_for_id(*worktree_id, cx)
2285 .and_then(|worktree| worktree.read(cx).as_local())
2286 {
2287 language_servers
2288 .entry(Arc::as_ptr(language_server))
2289 .or_insert((
2290 lsp_adapter.clone(),
2291 language_server.clone(),
2292 *worktree_id,
2293 worktree.abs_path().clone(),
2294 ));
2295 }
2296 }
2297
2298 let mut requests = Vec::new();
2299 for (_, language_server, _, _) in language_servers.values() {
2300 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2301 lsp::WorkspaceSymbolParams {
2302 query: query.to_string(),
2303 ..Default::default()
2304 },
2305 ));
2306 }
2307
2308 cx.spawn_weak(|this, cx| async move {
2309 let responses = futures::future::try_join_all(requests).await?;
2310
2311 let mut symbols = Vec::new();
2312 if let Some(this) = this.upgrade(&cx) {
2313 this.read_with(&cx, |this, cx| {
2314 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2315 language_servers.into_values().zip(responses)
2316 {
2317 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2318 |lsp_symbol| {
2319 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2320 let mut worktree_id = source_worktree_id;
2321 let path;
2322 if let Some((worktree, rel_path)) =
2323 this.find_local_worktree(&abs_path, cx)
2324 {
2325 worktree_id = worktree.read(cx).id();
2326 path = rel_path;
2327 } else {
2328 path = relativize_path(&worktree_abs_path, &abs_path);
2329 }
2330
2331 let label = this
2332 .languages
2333 .select_language(&path)
2334 .and_then(|language| {
2335 language
2336 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2337 })
2338 .unwrap_or_else(|| {
2339 CodeLabel::plain(lsp_symbol.name.clone(), None)
2340 });
2341 let signature = this.symbol_signature(worktree_id, &path);
2342
2343 Some(Symbol {
2344 source_worktree_id,
2345 worktree_id,
2346 language_server_name: adapter.name(),
2347 name: lsp_symbol.name,
2348 kind: lsp_symbol.kind,
2349 label,
2350 path,
2351 range: range_from_lsp(lsp_symbol.location.range),
2352 signature,
2353 })
2354 },
2355 ));
2356 }
2357 })
2358 }
2359
2360 Ok(symbols)
2361 })
2362 } else if let Some(project_id) = self.remote_id() {
2363 let request = self.client.request(proto::GetProjectSymbols {
2364 project_id,
2365 query: query.to_string(),
2366 });
2367 cx.spawn_weak(|this, cx| async move {
2368 let response = request.await?;
2369 let mut symbols = Vec::new();
2370 if let Some(this) = this.upgrade(&cx) {
2371 this.read_with(&cx, |this, _| {
2372 symbols.extend(
2373 response
2374 .symbols
2375 .into_iter()
2376 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2377 );
2378 })
2379 }
2380 Ok(symbols)
2381 })
2382 } else {
2383 Task::ready(Ok(Default::default()))
2384 }
2385 }
2386
2387 pub fn open_buffer_for_symbol(
2388 &mut self,
2389 symbol: &Symbol,
2390 cx: &mut ModelContext<Self>,
2391 ) -> Task<Result<ModelHandle<Buffer>>> {
2392 if self.is_local() {
2393 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2394 symbol.source_worktree_id,
2395 symbol.language_server_name.clone(),
2396 )) {
2397 server.clone()
2398 } else {
2399 return Task::ready(Err(anyhow!(
2400 "language server for worktree and language not found"
2401 )));
2402 };
2403
2404 let worktree_abs_path = if let Some(worktree_abs_path) = self
2405 .worktree_for_id(symbol.worktree_id, cx)
2406 .and_then(|worktree| worktree.read(cx).as_local())
2407 .map(|local_worktree| local_worktree.abs_path())
2408 {
2409 worktree_abs_path
2410 } else {
2411 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2412 };
2413 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2414 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2415 uri
2416 } else {
2417 return Task::ready(Err(anyhow!("invalid symbol path")));
2418 };
2419
2420 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2421 } else if let Some(project_id) = self.remote_id() {
2422 let request = self.client.request(proto::OpenBufferForSymbol {
2423 project_id,
2424 symbol: Some(serialize_symbol(symbol)),
2425 });
2426 cx.spawn(|this, mut cx| async move {
2427 let response = request.await?;
2428 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2429 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2430 .await
2431 })
2432 } else {
2433 Task::ready(Err(anyhow!("project does not have a remote id")))
2434 }
2435 }
2436
2437 pub fn completions<T: ToPointUtf16>(
2438 &self,
2439 source_buffer_handle: &ModelHandle<Buffer>,
2440 position: T,
2441 cx: &mut ModelContext<Self>,
2442 ) -> Task<Result<Vec<Completion>>> {
2443 let source_buffer_handle = source_buffer_handle.clone();
2444 let source_buffer = source_buffer_handle.read(cx);
2445 let buffer_id = source_buffer.remote_id();
2446 let language = source_buffer.language().cloned();
2447 let worktree;
2448 let buffer_abs_path;
2449 if let Some(file) = File::from_dyn(source_buffer.file()) {
2450 worktree = file.worktree.clone();
2451 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2452 } else {
2453 return Task::ready(Ok(Default::default()));
2454 };
2455
2456 let position = position.to_point_utf16(source_buffer);
2457 let anchor = source_buffer.anchor_after(position);
2458
2459 if worktree.read(cx).as_local().is_some() {
2460 let buffer_abs_path = buffer_abs_path.unwrap();
2461 let (_, lang_server) =
2462 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2463 server.clone()
2464 } else {
2465 return Task::ready(Ok(Default::default()));
2466 };
2467
2468 cx.spawn(|_, cx| async move {
2469 let completions = lang_server
2470 .request::<lsp::request::Completion>(lsp::CompletionParams {
2471 text_document_position: lsp::TextDocumentPositionParams::new(
2472 lsp::TextDocumentIdentifier::new(
2473 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2474 ),
2475 point_to_lsp(position),
2476 ),
2477 context: Default::default(),
2478 work_done_progress_params: Default::default(),
2479 partial_result_params: Default::default(),
2480 })
2481 .await
2482 .context("lsp completion request failed")?;
2483
2484 let completions = if let Some(completions) = completions {
2485 match completions {
2486 lsp::CompletionResponse::Array(completions) => completions,
2487 lsp::CompletionResponse::List(list) => list.items,
2488 }
2489 } else {
2490 Default::default()
2491 };
2492
2493 source_buffer_handle.read_with(&cx, |this, _| {
2494 Ok(completions
2495 .into_iter()
2496 .filter_map(|lsp_completion| {
2497 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2498 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2499 (range_from_lsp(edit.range), edit.new_text.clone())
2500 }
2501 None => {
2502 let clipped_position =
2503 this.clip_point_utf16(position, Bias::Left);
2504 if position != clipped_position {
2505 log::info!("completion out of expected range");
2506 return None;
2507 }
2508 (
2509 this.common_prefix_at(
2510 clipped_position,
2511 &lsp_completion.label,
2512 ),
2513 lsp_completion.label.clone(),
2514 )
2515 }
2516 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2517 log::info!("unsupported insert/replace completion");
2518 return None;
2519 }
2520 };
2521
2522 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2523 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2524 if clipped_start == old_range.start && clipped_end == old_range.end {
2525 Some(Completion {
2526 old_range: this.anchor_before(old_range.start)
2527 ..this.anchor_after(old_range.end),
2528 new_text,
2529 label: language
2530 .as_ref()
2531 .and_then(|l| l.label_for_completion(&lsp_completion))
2532 .unwrap_or_else(|| {
2533 CodeLabel::plain(
2534 lsp_completion.label.clone(),
2535 lsp_completion.filter_text.as_deref(),
2536 )
2537 }),
2538 lsp_completion,
2539 })
2540 } else {
2541 log::info!("completion out of expected range");
2542 None
2543 }
2544 })
2545 .collect())
2546 })
2547 })
2548 } else if let Some(project_id) = self.remote_id() {
2549 let rpc = self.client.clone();
2550 let message = proto::GetCompletions {
2551 project_id,
2552 buffer_id,
2553 position: Some(language::proto::serialize_anchor(&anchor)),
2554 version: serialize_version(&source_buffer.version()),
2555 };
2556 cx.spawn_weak(|_, mut cx| async move {
2557 let response = rpc.request(message).await?;
2558
2559 source_buffer_handle
2560 .update(&mut cx, |buffer, _| {
2561 buffer.wait_for_version(deserialize_version(response.version))
2562 })
2563 .await;
2564
2565 response
2566 .completions
2567 .into_iter()
2568 .map(|completion| {
2569 language::proto::deserialize_completion(completion, language.as_ref())
2570 })
2571 .collect()
2572 })
2573 } else {
2574 Task::ready(Ok(Default::default()))
2575 }
2576 }
2577
2578 pub fn apply_additional_edits_for_completion(
2579 &self,
2580 buffer_handle: ModelHandle<Buffer>,
2581 completion: Completion,
2582 push_to_history: bool,
2583 cx: &mut ModelContext<Self>,
2584 ) -> Task<Result<Option<Transaction>>> {
2585 let buffer = buffer_handle.read(cx);
2586 let buffer_id = buffer.remote_id();
2587
2588 if self.is_local() {
2589 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2590 {
2591 server.clone()
2592 } else {
2593 return Task::ready(Ok(Default::default()));
2594 };
2595
2596 cx.spawn(|this, mut cx| async move {
2597 let resolved_completion = lang_server
2598 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2599 .await?;
2600 if let Some(edits) = resolved_completion.additional_text_edits {
2601 let edits = this
2602 .update(&mut cx, |this, cx| {
2603 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2604 })
2605 .await?;
2606 buffer_handle.update(&mut cx, |buffer, cx| {
2607 buffer.finalize_last_transaction();
2608 buffer.start_transaction();
2609 for (range, text) in edits {
2610 buffer.edit([range], text, cx);
2611 }
2612 let transaction = if buffer.end_transaction(cx).is_some() {
2613 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2614 if !push_to_history {
2615 buffer.forget_transaction(transaction.id);
2616 }
2617 Some(transaction)
2618 } else {
2619 None
2620 };
2621 Ok(transaction)
2622 })
2623 } else {
2624 Ok(None)
2625 }
2626 })
2627 } else if let Some(project_id) = self.remote_id() {
2628 let client = self.client.clone();
2629 cx.spawn(|_, mut cx| async move {
2630 let response = client
2631 .request(proto::ApplyCompletionAdditionalEdits {
2632 project_id,
2633 buffer_id,
2634 completion: Some(language::proto::serialize_completion(&completion)),
2635 })
2636 .await?;
2637
2638 if let Some(transaction) = response.transaction {
2639 let transaction = language::proto::deserialize_transaction(transaction)?;
2640 buffer_handle
2641 .update(&mut cx, |buffer, _| {
2642 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2643 })
2644 .await;
2645 if push_to_history {
2646 buffer_handle.update(&mut cx, |buffer, _| {
2647 buffer.push_transaction(transaction.clone(), Instant::now());
2648 });
2649 }
2650 Ok(Some(transaction))
2651 } else {
2652 Ok(None)
2653 }
2654 })
2655 } else {
2656 Task::ready(Err(anyhow!("project does not have a remote id")))
2657 }
2658 }
2659
2660 pub fn code_actions<T: Clone + ToOffset>(
2661 &self,
2662 buffer_handle: &ModelHandle<Buffer>,
2663 range: Range<T>,
2664 cx: &mut ModelContext<Self>,
2665 ) -> Task<Result<Vec<CodeAction>>> {
2666 let buffer_handle = buffer_handle.clone();
2667 let buffer = buffer_handle.read(cx);
2668 let snapshot = buffer.snapshot();
2669 let relevant_diagnostics = snapshot
2670 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2671 .map(|entry| entry.to_lsp_diagnostic_stub())
2672 .collect();
2673 let buffer_id = buffer.remote_id();
2674 let worktree;
2675 let buffer_abs_path;
2676 if let Some(file) = File::from_dyn(buffer.file()) {
2677 worktree = file.worktree.clone();
2678 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2679 } else {
2680 return Task::ready(Ok(Default::default()));
2681 };
2682 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2683
2684 if worktree.read(cx).as_local().is_some() {
2685 let buffer_abs_path = buffer_abs_path.unwrap();
2686 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2687 {
2688 server.clone()
2689 } else {
2690 return Task::ready(Ok(Default::default()));
2691 };
2692
2693 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2694 cx.foreground().spawn(async move {
2695 if !lang_server.capabilities().code_action_provider.is_some() {
2696 return Ok(Default::default());
2697 }
2698
2699 Ok(lang_server
2700 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2701 text_document: lsp::TextDocumentIdentifier::new(
2702 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2703 ),
2704 range: lsp_range,
2705 work_done_progress_params: Default::default(),
2706 partial_result_params: Default::default(),
2707 context: lsp::CodeActionContext {
2708 diagnostics: relevant_diagnostics,
2709 only: Some(vec![
2710 lsp::CodeActionKind::QUICKFIX,
2711 lsp::CodeActionKind::REFACTOR,
2712 lsp::CodeActionKind::REFACTOR_EXTRACT,
2713 lsp::CodeActionKind::SOURCE,
2714 ]),
2715 },
2716 })
2717 .await?
2718 .unwrap_or_default()
2719 .into_iter()
2720 .filter_map(|entry| {
2721 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2722 Some(CodeAction {
2723 range: range.clone(),
2724 lsp_action,
2725 })
2726 } else {
2727 None
2728 }
2729 })
2730 .collect())
2731 })
2732 } else if let Some(project_id) = self.remote_id() {
2733 let rpc = self.client.clone();
2734 let version = buffer.version();
2735 cx.spawn_weak(|_, mut cx| async move {
2736 let response = rpc
2737 .request(proto::GetCodeActions {
2738 project_id,
2739 buffer_id,
2740 start: Some(language::proto::serialize_anchor(&range.start)),
2741 end: Some(language::proto::serialize_anchor(&range.end)),
2742 version: serialize_version(&version),
2743 })
2744 .await?;
2745
2746 buffer_handle
2747 .update(&mut cx, |buffer, _| {
2748 buffer.wait_for_version(deserialize_version(response.version))
2749 })
2750 .await;
2751
2752 response
2753 .actions
2754 .into_iter()
2755 .map(language::proto::deserialize_code_action)
2756 .collect()
2757 })
2758 } else {
2759 Task::ready(Ok(Default::default()))
2760 }
2761 }
2762
2763 pub fn apply_code_action(
2764 &self,
2765 buffer_handle: ModelHandle<Buffer>,
2766 mut action: CodeAction,
2767 push_to_history: bool,
2768 cx: &mut ModelContext<Self>,
2769 ) -> Task<Result<ProjectTransaction>> {
2770 if self.is_local() {
2771 let buffer = buffer_handle.read(cx);
2772 let (lsp_adapter, lang_server) =
2773 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2774 server.clone()
2775 } else {
2776 return Task::ready(Ok(Default::default()));
2777 };
2778 let range = action.range.to_point_utf16(buffer);
2779
2780 cx.spawn(|this, mut cx| async move {
2781 if let Some(lsp_range) = action
2782 .lsp_action
2783 .data
2784 .as_mut()
2785 .and_then(|d| d.get_mut("codeActionParams"))
2786 .and_then(|d| d.get_mut("range"))
2787 {
2788 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2789 action.lsp_action = lang_server
2790 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2791 .await?;
2792 } else {
2793 let actions = this
2794 .update(&mut cx, |this, cx| {
2795 this.code_actions(&buffer_handle, action.range, cx)
2796 })
2797 .await?;
2798 action.lsp_action = actions
2799 .into_iter()
2800 .find(|a| a.lsp_action.title == action.lsp_action.title)
2801 .ok_or_else(|| anyhow!("code action is outdated"))?
2802 .lsp_action;
2803 }
2804
2805 if let Some(edit) = action.lsp_action.edit {
2806 Self::deserialize_workspace_edit(
2807 this,
2808 edit,
2809 push_to_history,
2810 lsp_adapter,
2811 lang_server,
2812 &mut cx,
2813 )
2814 .await
2815 } else if let Some(command) = action.lsp_action.command {
2816 this.update(&mut cx, |this, _| {
2817 this.last_workspace_edits_by_language_server
2818 .remove(&lang_server.server_id());
2819 });
2820 lang_server
2821 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2822 command: command.command,
2823 arguments: command.arguments.unwrap_or_default(),
2824 ..Default::default()
2825 })
2826 .await?;
2827 Ok(this.update(&mut cx, |this, _| {
2828 this.last_workspace_edits_by_language_server
2829 .remove(&lang_server.server_id())
2830 .unwrap_or_default()
2831 }))
2832 } else {
2833 Ok(ProjectTransaction::default())
2834 }
2835 })
2836 } else if let Some(project_id) = self.remote_id() {
2837 let client = self.client.clone();
2838 let request = proto::ApplyCodeAction {
2839 project_id,
2840 buffer_id: buffer_handle.read(cx).remote_id(),
2841 action: Some(language::proto::serialize_code_action(&action)),
2842 };
2843 cx.spawn(|this, mut cx| async move {
2844 let response = client
2845 .request(request)
2846 .await?
2847 .transaction
2848 .ok_or_else(|| anyhow!("missing transaction"))?;
2849 this.update(&mut cx, |this, cx| {
2850 this.deserialize_project_transaction(response, push_to_history, cx)
2851 })
2852 .await
2853 })
2854 } else {
2855 Task::ready(Err(anyhow!("project does not have a remote id")))
2856 }
2857 }
2858
2859 async fn deserialize_workspace_edit(
2860 this: ModelHandle<Self>,
2861 edit: lsp::WorkspaceEdit,
2862 push_to_history: bool,
2863 lsp_adapter: Arc<dyn LspAdapter>,
2864 language_server: Arc<LanguageServer>,
2865 cx: &mut AsyncAppContext,
2866 ) -> Result<ProjectTransaction> {
2867 let fs = this.read_with(cx, |this, _| this.fs.clone());
2868 let mut operations = Vec::new();
2869 if let Some(document_changes) = edit.document_changes {
2870 match document_changes {
2871 lsp::DocumentChanges::Edits(edits) => {
2872 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2873 }
2874 lsp::DocumentChanges::Operations(ops) => operations = ops,
2875 }
2876 } else if let Some(changes) = edit.changes {
2877 operations.extend(changes.into_iter().map(|(uri, edits)| {
2878 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2879 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2880 uri,
2881 version: None,
2882 },
2883 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2884 })
2885 }));
2886 }
2887
2888 let mut project_transaction = ProjectTransaction::default();
2889 for operation in operations {
2890 match operation {
2891 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2892 let abs_path = op
2893 .uri
2894 .to_file_path()
2895 .map_err(|_| anyhow!("can't convert URI to path"))?;
2896
2897 if let Some(parent_path) = abs_path.parent() {
2898 fs.create_dir(parent_path).await?;
2899 }
2900 if abs_path.ends_with("/") {
2901 fs.create_dir(&abs_path).await?;
2902 } else {
2903 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2904 .await?;
2905 }
2906 }
2907 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2908 let source_abs_path = op
2909 .old_uri
2910 .to_file_path()
2911 .map_err(|_| anyhow!("can't convert URI to path"))?;
2912 let target_abs_path = op
2913 .new_uri
2914 .to_file_path()
2915 .map_err(|_| anyhow!("can't convert URI to path"))?;
2916 fs.rename(
2917 &source_abs_path,
2918 &target_abs_path,
2919 op.options.map(Into::into).unwrap_or_default(),
2920 )
2921 .await?;
2922 }
2923 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2924 let abs_path = op
2925 .uri
2926 .to_file_path()
2927 .map_err(|_| anyhow!("can't convert URI to path"))?;
2928 let options = op.options.map(Into::into).unwrap_or_default();
2929 if abs_path.ends_with("/") {
2930 fs.remove_dir(&abs_path, options).await?;
2931 } else {
2932 fs.remove_file(&abs_path, options).await?;
2933 }
2934 }
2935 lsp::DocumentChangeOperation::Edit(op) => {
2936 let buffer_to_edit = this
2937 .update(cx, |this, cx| {
2938 this.open_local_buffer_via_lsp(
2939 op.text_document.uri,
2940 lsp_adapter.clone(),
2941 language_server.clone(),
2942 cx,
2943 )
2944 })
2945 .await?;
2946
2947 let edits = this
2948 .update(cx, |this, cx| {
2949 let edits = op.edits.into_iter().map(|edit| match edit {
2950 lsp::OneOf::Left(edit) => edit,
2951 lsp::OneOf::Right(edit) => edit.text_edit,
2952 });
2953 this.edits_from_lsp(
2954 &buffer_to_edit,
2955 edits,
2956 op.text_document.version,
2957 cx,
2958 )
2959 })
2960 .await?;
2961
2962 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2963 buffer.finalize_last_transaction();
2964 buffer.start_transaction();
2965 for (range, text) in edits {
2966 buffer.edit([range], text, cx);
2967 }
2968 let transaction = if buffer.end_transaction(cx).is_some() {
2969 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2970 if !push_to_history {
2971 buffer.forget_transaction(transaction.id);
2972 }
2973 Some(transaction)
2974 } else {
2975 None
2976 };
2977
2978 transaction
2979 });
2980 if let Some(transaction) = transaction {
2981 project_transaction.0.insert(buffer_to_edit, transaction);
2982 }
2983 }
2984 }
2985 }
2986
2987 Ok(project_transaction)
2988 }
2989
2990 pub fn prepare_rename<T: ToPointUtf16>(
2991 &self,
2992 buffer: ModelHandle<Buffer>,
2993 position: T,
2994 cx: &mut ModelContext<Self>,
2995 ) -> Task<Result<Option<Range<Anchor>>>> {
2996 let position = position.to_point_utf16(buffer.read(cx));
2997 self.request_lsp(buffer, PrepareRename { position }, cx)
2998 }
2999
3000 pub fn perform_rename<T: ToPointUtf16>(
3001 &self,
3002 buffer: ModelHandle<Buffer>,
3003 position: T,
3004 new_name: String,
3005 push_to_history: bool,
3006 cx: &mut ModelContext<Self>,
3007 ) -> Task<Result<ProjectTransaction>> {
3008 let position = position.to_point_utf16(buffer.read(cx));
3009 self.request_lsp(
3010 buffer,
3011 PerformRename {
3012 position,
3013 new_name,
3014 push_to_history,
3015 },
3016 cx,
3017 )
3018 }
3019
3020 pub fn search(
3021 &self,
3022 query: SearchQuery,
3023 cx: &mut ModelContext<Self>,
3024 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3025 if self.is_local() {
3026 let snapshots = self
3027 .visible_worktrees(cx)
3028 .filter_map(|tree| {
3029 let tree = tree.read(cx).as_local()?;
3030 Some(tree.snapshot())
3031 })
3032 .collect::<Vec<_>>();
3033
3034 let background = cx.background().clone();
3035 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3036 if path_count == 0 {
3037 return Task::ready(Ok(Default::default()));
3038 }
3039 let workers = background.num_cpus().min(path_count);
3040 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3041 cx.background()
3042 .spawn({
3043 let fs = self.fs.clone();
3044 let background = cx.background().clone();
3045 let query = query.clone();
3046 async move {
3047 let fs = &fs;
3048 let query = &query;
3049 let matching_paths_tx = &matching_paths_tx;
3050 let paths_per_worker = (path_count + workers - 1) / workers;
3051 let snapshots = &snapshots;
3052 background
3053 .scoped(|scope| {
3054 for worker_ix in 0..workers {
3055 let worker_start_ix = worker_ix * paths_per_worker;
3056 let worker_end_ix = worker_start_ix + paths_per_worker;
3057 scope.spawn(async move {
3058 let mut snapshot_start_ix = 0;
3059 let mut abs_path = PathBuf::new();
3060 for snapshot in snapshots {
3061 let snapshot_end_ix =
3062 snapshot_start_ix + snapshot.visible_file_count();
3063 if worker_end_ix <= snapshot_start_ix {
3064 break;
3065 } else if worker_start_ix > snapshot_end_ix {
3066 snapshot_start_ix = snapshot_end_ix;
3067 continue;
3068 } else {
3069 let start_in_snapshot = worker_start_ix
3070 .saturating_sub(snapshot_start_ix);
3071 let end_in_snapshot =
3072 cmp::min(worker_end_ix, snapshot_end_ix)
3073 - snapshot_start_ix;
3074
3075 for entry in snapshot
3076 .files(false, start_in_snapshot)
3077 .take(end_in_snapshot - start_in_snapshot)
3078 {
3079 if matching_paths_tx.is_closed() {
3080 break;
3081 }
3082
3083 abs_path.clear();
3084 abs_path.push(&snapshot.abs_path());
3085 abs_path.push(&entry.path);
3086 let matches = if let Some(file) =
3087 fs.open_sync(&abs_path).await.log_err()
3088 {
3089 query.detect(file).unwrap_or(false)
3090 } else {
3091 false
3092 };
3093
3094 if matches {
3095 let project_path =
3096 (snapshot.id(), entry.path.clone());
3097 if matching_paths_tx
3098 .send(project_path)
3099 .await
3100 .is_err()
3101 {
3102 break;
3103 }
3104 }
3105 }
3106
3107 snapshot_start_ix = snapshot_end_ix;
3108 }
3109 }
3110 });
3111 }
3112 })
3113 .await;
3114 }
3115 })
3116 .detach();
3117
3118 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3119 let open_buffers = self
3120 .opened_buffers
3121 .values()
3122 .filter_map(|b| b.upgrade(cx))
3123 .collect::<HashSet<_>>();
3124 cx.spawn(|this, cx| async move {
3125 for buffer in &open_buffers {
3126 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3127 buffers_tx.send((buffer.clone(), snapshot)).await?;
3128 }
3129
3130 let open_buffers = Rc::new(RefCell::new(open_buffers));
3131 while let Some(project_path) = matching_paths_rx.next().await {
3132 if buffers_tx.is_closed() {
3133 break;
3134 }
3135
3136 let this = this.clone();
3137 let open_buffers = open_buffers.clone();
3138 let buffers_tx = buffers_tx.clone();
3139 cx.spawn(|mut cx| async move {
3140 if let Some(buffer) = this
3141 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3142 .await
3143 .log_err()
3144 {
3145 if open_buffers.borrow_mut().insert(buffer.clone()) {
3146 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3147 buffers_tx.send((buffer, snapshot)).await?;
3148 }
3149 }
3150
3151 Ok::<_, anyhow::Error>(())
3152 })
3153 .detach();
3154 }
3155
3156 Ok::<_, anyhow::Error>(())
3157 })
3158 .detach_and_log_err(cx);
3159
3160 let background = cx.background().clone();
3161 cx.background().spawn(async move {
3162 let query = &query;
3163 let mut matched_buffers = Vec::new();
3164 for _ in 0..workers {
3165 matched_buffers.push(HashMap::default());
3166 }
3167 background
3168 .scoped(|scope| {
3169 for worker_matched_buffers in matched_buffers.iter_mut() {
3170 let mut buffers_rx = buffers_rx.clone();
3171 scope.spawn(async move {
3172 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3173 let buffer_matches = query
3174 .search(snapshot.as_rope())
3175 .await
3176 .iter()
3177 .map(|range| {
3178 snapshot.anchor_before(range.start)
3179 ..snapshot.anchor_after(range.end)
3180 })
3181 .collect::<Vec<_>>();
3182 if !buffer_matches.is_empty() {
3183 worker_matched_buffers
3184 .insert(buffer.clone(), buffer_matches);
3185 }
3186 }
3187 });
3188 }
3189 })
3190 .await;
3191 Ok(matched_buffers.into_iter().flatten().collect())
3192 })
3193 } else if let Some(project_id) = self.remote_id() {
3194 let request = self.client.request(query.to_proto(project_id));
3195 cx.spawn(|this, mut cx| async move {
3196 let response = request.await?;
3197 let mut result = HashMap::default();
3198 for location in response.locations {
3199 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3200 let target_buffer = this
3201 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3202 .await?;
3203 let start = location
3204 .start
3205 .and_then(deserialize_anchor)
3206 .ok_or_else(|| anyhow!("missing target start"))?;
3207 let end = location
3208 .end
3209 .and_then(deserialize_anchor)
3210 .ok_or_else(|| anyhow!("missing target end"))?;
3211 result
3212 .entry(target_buffer)
3213 .or_insert(Vec::new())
3214 .push(start..end)
3215 }
3216 Ok(result)
3217 })
3218 } else {
3219 Task::ready(Ok(Default::default()))
3220 }
3221 }
3222
3223 fn request_lsp<R: LspCommand>(
3224 &self,
3225 buffer_handle: ModelHandle<Buffer>,
3226 request: R,
3227 cx: &mut ModelContext<Self>,
3228 ) -> Task<Result<R::Response>>
3229 where
3230 <R::LspRequest as lsp::request::Request>::Result: Send,
3231 {
3232 let buffer = buffer_handle.read(cx);
3233 if self.is_local() {
3234 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3235 if let Some((file, (_, language_server))) =
3236 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3237 {
3238 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3239 return cx.spawn(|this, cx| async move {
3240 if !request.check_capabilities(&language_server.capabilities()) {
3241 return Ok(Default::default());
3242 }
3243
3244 let response = language_server
3245 .request::<R::LspRequest>(lsp_params)
3246 .await
3247 .context("lsp request failed")?;
3248 request
3249 .response_from_lsp(response, this, buffer_handle, cx)
3250 .await
3251 });
3252 }
3253 } else if let Some(project_id) = self.remote_id() {
3254 let rpc = self.client.clone();
3255 let message = request.to_proto(project_id, buffer);
3256 return cx.spawn(|this, cx| async move {
3257 let response = rpc.request(message).await?;
3258 request
3259 .response_from_proto(response, this, buffer_handle, cx)
3260 .await
3261 });
3262 }
3263 Task::ready(Ok(Default::default()))
3264 }
3265
3266 pub fn find_or_create_local_worktree(
3267 &mut self,
3268 abs_path: impl AsRef<Path>,
3269 visible: bool,
3270 cx: &mut ModelContext<Self>,
3271 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3272 let abs_path = abs_path.as_ref();
3273 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3274 Task::ready(Ok((tree.clone(), relative_path.into())))
3275 } else {
3276 let worktree = self.create_local_worktree(abs_path, visible, cx);
3277 cx.foreground()
3278 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3279 }
3280 }
3281
3282 pub fn find_local_worktree(
3283 &self,
3284 abs_path: &Path,
3285 cx: &AppContext,
3286 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3287 for tree in self.worktrees(cx) {
3288 if let Some(relative_path) = tree
3289 .read(cx)
3290 .as_local()
3291 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3292 {
3293 return Some((tree.clone(), relative_path.into()));
3294 }
3295 }
3296 None
3297 }
3298
3299 pub fn is_shared(&self) -> bool {
3300 match &self.client_state {
3301 ProjectClientState::Local { is_shared, .. } => *is_shared,
3302 ProjectClientState::Remote { .. } => false,
3303 }
3304 }
3305
3306 fn create_local_worktree(
3307 &mut self,
3308 abs_path: impl AsRef<Path>,
3309 visible: bool,
3310 cx: &mut ModelContext<Self>,
3311 ) -> Task<Result<ModelHandle<Worktree>>> {
3312 let fs = self.fs.clone();
3313 let client = self.client.clone();
3314 let next_entry_id = self.next_entry_id.clone();
3315 let path: Arc<Path> = abs_path.as_ref().into();
3316 let task = self
3317 .loading_local_worktrees
3318 .entry(path.clone())
3319 .or_insert_with(|| {
3320 cx.spawn(|project, mut cx| {
3321 async move {
3322 let worktree = Worktree::local(
3323 client.clone(),
3324 path.clone(),
3325 visible,
3326 fs,
3327 next_entry_id,
3328 &mut cx,
3329 )
3330 .await;
3331 project.update(&mut cx, |project, _| {
3332 project.loading_local_worktrees.remove(&path);
3333 });
3334 let worktree = worktree?;
3335
3336 let (remote_project_id, is_shared) =
3337 project.update(&mut cx, |project, cx| {
3338 project.add_worktree(&worktree, cx);
3339 (project.remote_id(), project.is_shared())
3340 });
3341
3342 if let Some(project_id) = remote_project_id {
3343 if is_shared {
3344 worktree
3345 .update(&mut cx, |worktree, cx| {
3346 worktree.as_local_mut().unwrap().share(project_id, cx)
3347 })
3348 .await?;
3349 } else {
3350 worktree
3351 .update(&mut cx, |worktree, cx| {
3352 worktree.as_local_mut().unwrap().register(project_id, cx)
3353 })
3354 .await?;
3355 }
3356 }
3357
3358 Ok(worktree)
3359 }
3360 .map_err(|err| Arc::new(err))
3361 })
3362 .shared()
3363 })
3364 .clone();
3365 cx.foreground().spawn(async move {
3366 match task.await {
3367 Ok(worktree) => Ok(worktree),
3368 Err(err) => Err(anyhow!("{}", err)),
3369 }
3370 })
3371 }
3372
3373 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3374 self.worktrees.retain(|worktree| {
3375 worktree
3376 .upgrade(cx)
3377 .map_or(false, |w| w.read(cx).id() != id)
3378 });
3379 cx.notify();
3380 }
3381
3382 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3383 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3384 if worktree.read(cx).is_local() {
3385 cx.subscribe(&worktree, |this, worktree, _, cx| {
3386 this.update_local_worktree_buffers(worktree, cx);
3387 })
3388 .detach();
3389 }
3390
3391 let push_strong_handle = {
3392 let worktree = worktree.read(cx);
3393 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3394 };
3395 if push_strong_handle {
3396 self.worktrees
3397 .push(WorktreeHandle::Strong(worktree.clone()));
3398 } else {
3399 cx.observe_release(&worktree, |this, _, cx| {
3400 this.worktrees
3401 .retain(|worktree| worktree.upgrade(cx).is_some());
3402 cx.notify();
3403 })
3404 .detach();
3405 self.worktrees
3406 .push(WorktreeHandle::Weak(worktree.downgrade()));
3407 }
3408 cx.notify();
3409 }
3410
3411 fn update_local_worktree_buffers(
3412 &mut self,
3413 worktree_handle: ModelHandle<Worktree>,
3414 cx: &mut ModelContext<Self>,
3415 ) {
3416 let snapshot = worktree_handle.read(cx).snapshot();
3417 let mut buffers_to_delete = Vec::new();
3418 let mut renamed_buffers = Vec::new();
3419 for (buffer_id, buffer) in &self.opened_buffers {
3420 if let Some(buffer) = buffer.upgrade(cx) {
3421 buffer.update(cx, |buffer, cx| {
3422 if let Some(old_file) = File::from_dyn(buffer.file()) {
3423 if old_file.worktree != worktree_handle {
3424 return;
3425 }
3426
3427 let new_file = if let Some(entry) = old_file
3428 .entry_id
3429 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3430 {
3431 File {
3432 is_local: true,
3433 entry_id: Some(entry.id),
3434 mtime: entry.mtime,
3435 path: entry.path.clone(),
3436 worktree: worktree_handle.clone(),
3437 }
3438 } else if let Some(entry) =
3439 snapshot.entry_for_path(old_file.path().as_ref())
3440 {
3441 File {
3442 is_local: true,
3443 entry_id: Some(entry.id),
3444 mtime: entry.mtime,
3445 path: entry.path.clone(),
3446 worktree: worktree_handle.clone(),
3447 }
3448 } else {
3449 File {
3450 is_local: true,
3451 entry_id: None,
3452 path: old_file.path().clone(),
3453 mtime: old_file.mtime(),
3454 worktree: worktree_handle.clone(),
3455 }
3456 };
3457
3458 let old_path = old_file.abs_path(cx);
3459 if new_file.abs_path(cx) != old_path {
3460 renamed_buffers.push((cx.handle(), old_path));
3461 }
3462
3463 if let Some(project_id) = self.remote_id() {
3464 self.client
3465 .send(proto::UpdateBufferFile {
3466 project_id,
3467 buffer_id: *buffer_id as u64,
3468 file: Some(new_file.to_proto()),
3469 })
3470 .log_err();
3471 }
3472 buffer.file_updated(Box::new(new_file), cx).detach();
3473 }
3474 });
3475 } else {
3476 buffers_to_delete.push(*buffer_id);
3477 }
3478 }
3479
3480 for buffer_id in buffers_to_delete {
3481 self.opened_buffers.remove(&buffer_id);
3482 }
3483
3484 for (buffer, old_path) in renamed_buffers {
3485 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3486 self.assign_language_to_buffer(&buffer, cx);
3487 self.register_buffer_with_language_server(&buffer, cx);
3488 }
3489 }
3490
3491 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3492 let new_active_entry = entry.and_then(|project_path| {
3493 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3494 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3495 Some(entry.id)
3496 });
3497 if new_active_entry != self.active_entry {
3498 self.active_entry = new_active_entry;
3499 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3500 }
3501 }
3502
3503 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3504 self.language_server_statuses
3505 .values()
3506 .any(|status| status.pending_diagnostic_updates > 0)
3507 }
3508
3509 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3510 let mut summary = DiagnosticSummary::default();
3511 for (_, path_summary) in self.diagnostic_summaries(cx) {
3512 summary.error_count += path_summary.error_count;
3513 summary.warning_count += path_summary.warning_count;
3514 summary.info_count += path_summary.info_count;
3515 summary.hint_count += path_summary.hint_count;
3516 }
3517 summary
3518 }
3519
3520 pub fn diagnostic_summaries<'a>(
3521 &'a self,
3522 cx: &'a AppContext,
3523 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3524 self.worktrees(cx).flat_map(move |worktree| {
3525 let worktree = worktree.read(cx);
3526 let worktree_id = worktree.id();
3527 worktree
3528 .diagnostic_summaries()
3529 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3530 })
3531 }
3532
3533 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3534 if self
3535 .language_server_statuses
3536 .values()
3537 .map(|status| status.pending_diagnostic_updates)
3538 .sum::<isize>()
3539 == 1
3540 {
3541 cx.emit(Event::DiskBasedDiagnosticsStarted);
3542 }
3543 }
3544
3545 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3546 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3547 if self
3548 .language_server_statuses
3549 .values()
3550 .map(|status| status.pending_diagnostic_updates)
3551 .sum::<isize>()
3552 == 0
3553 {
3554 cx.emit(Event::DiskBasedDiagnosticsFinished);
3555 }
3556 }
3557
3558 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3559 self.active_entry
3560 }
3561
3562 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3563 self.worktree_for_id(path.worktree_id, cx)?
3564 .read(cx)
3565 .entry_for_path(&path.path)
3566 .map(|entry| entry.id)
3567 }
3568
3569 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3570 let worktree = self.worktree_for_entry(entry_id, cx)?;
3571 let worktree = worktree.read(cx);
3572 let worktree_id = worktree.id();
3573 let path = worktree.entry_for_id(entry_id)?.path.clone();
3574 Some(ProjectPath { worktree_id, path })
3575 }
3576
3577 // RPC message handlers
3578
3579 async fn handle_unshare_project(
3580 this: ModelHandle<Self>,
3581 _: TypedEnvelope<proto::UnshareProject>,
3582 _: Arc<Client>,
3583 mut cx: AsyncAppContext,
3584 ) -> Result<()> {
3585 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3586 Ok(())
3587 }
3588
3589 async fn handle_add_collaborator(
3590 this: ModelHandle<Self>,
3591 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3592 _: Arc<Client>,
3593 mut cx: AsyncAppContext,
3594 ) -> Result<()> {
3595 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3596 let collaborator = envelope
3597 .payload
3598 .collaborator
3599 .take()
3600 .ok_or_else(|| anyhow!("empty collaborator"))?;
3601
3602 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3603 this.update(&mut cx, |this, cx| {
3604 this.collaborators
3605 .insert(collaborator.peer_id, collaborator);
3606 cx.notify();
3607 });
3608
3609 Ok(())
3610 }
3611
3612 async fn handle_remove_collaborator(
3613 this: ModelHandle<Self>,
3614 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3615 _: Arc<Client>,
3616 mut cx: AsyncAppContext,
3617 ) -> Result<()> {
3618 this.update(&mut cx, |this, cx| {
3619 let peer_id = PeerId(envelope.payload.peer_id);
3620 let replica_id = this
3621 .collaborators
3622 .remove(&peer_id)
3623 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3624 .replica_id;
3625 for (_, buffer) in &this.opened_buffers {
3626 if let Some(buffer) = buffer.upgrade(cx) {
3627 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3628 }
3629 }
3630 cx.emit(Event::CollaboratorLeft(peer_id));
3631 cx.notify();
3632 Ok(())
3633 })
3634 }
3635
3636 async fn handle_register_worktree(
3637 this: ModelHandle<Self>,
3638 envelope: TypedEnvelope<proto::RegisterWorktree>,
3639 client: Arc<Client>,
3640 mut cx: AsyncAppContext,
3641 ) -> Result<()> {
3642 this.update(&mut cx, |this, cx| {
3643 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3644 let replica_id = this.replica_id();
3645 let worktree = proto::Worktree {
3646 id: envelope.payload.worktree_id,
3647 root_name: envelope.payload.root_name,
3648 entries: Default::default(),
3649 diagnostic_summaries: Default::default(),
3650 visible: envelope.payload.visible,
3651 };
3652 let (worktree, load_task) =
3653 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3654 this.add_worktree(&worktree, cx);
3655 load_task.detach();
3656 Ok(())
3657 })
3658 }
3659
3660 async fn handle_unregister_worktree(
3661 this: ModelHandle<Self>,
3662 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3663 _: Arc<Client>,
3664 mut cx: AsyncAppContext,
3665 ) -> Result<()> {
3666 this.update(&mut cx, |this, cx| {
3667 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3668 this.remove_worktree(worktree_id, cx);
3669 Ok(())
3670 })
3671 }
3672
3673 async fn handle_update_worktree(
3674 this: ModelHandle<Self>,
3675 envelope: TypedEnvelope<proto::UpdateWorktree>,
3676 _: Arc<Client>,
3677 mut cx: AsyncAppContext,
3678 ) -> Result<()> {
3679 this.update(&mut cx, |this, cx| {
3680 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3681 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3682 worktree.update(cx, |worktree, _| {
3683 let worktree = worktree.as_remote_mut().unwrap();
3684 worktree.update_from_remote(envelope)
3685 })?;
3686 }
3687 Ok(())
3688 })
3689 }
3690
3691 async fn handle_update_diagnostic_summary(
3692 this: ModelHandle<Self>,
3693 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3694 _: Arc<Client>,
3695 mut cx: AsyncAppContext,
3696 ) -> Result<()> {
3697 this.update(&mut cx, |this, cx| {
3698 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3699 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3700 if let Some(summary) = envelope.payload.summary {
3701 let project_path = ProjectPath {
3702 worktree_id,
3703 path: Path::new(&summary.path).into(),
3704 };
3705 worktree.update(cx, |worktree, _| {
3706 worktree
3707 .as_remote_mut()
3708 .unwrap()
3709 .update_diagnostic_summary(project_path.path.clone(), &summary);
3710 });
3711 cx.emit(Event::DiagnosticsUpdated(project_path));
3712 }
3713 }
3714 Ok(())
3715 })
3716 }
3717
3718 async fn handle_start_language_server(
3719 this: ModelHandle<Self>,
3720 envelope: TypedEnvelope<proto::StartLanguageServer>,
3721 _: Arc<Client>,
3722 mut cx: AsyncAppContext,
3723 ) -> Result<()> {
3724 let server = envelope
3725 .payload
3726 .server
3727 .ok_or_else(|| anyhow!("invalid server"))?;
3728 this.update(&mut cx, |this, cx| {
3729 this.language_server_statuses.insert(
3730 server.id as usize,
3731 LanguageServerStatus {
3732 name: server.name,
3733 pending_work: Default::default(),
3734 pending_diagnostic_updates: 0,
3735 },
3736 );
3737 cx.notify();
3738 });
3739 Ok(())
3740 }
3741
3742 async fn handle_update_language_server(
3743 this: ModelHandle<Self>,
3744 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3745 _: Arc<Client>,
3746 mut cx: AsyncAppContext,
3747 ) -> Result<()> {
3748 let language_server_id = envelope.payload.language_server_id as usize;
3749 match envelope
3750 .payload
3751 .variant
3752 .ok_or_else(|| anyhow!("invalid variant"))?
3753 {
3754 proto::update_language_server::Variant::WorkStart(payload) => {
3755 this.update(&mut cx, |this, cx| {
3756 this.on_lsp_work_start(language_server_id, payload.token, cx);
3757 })
3758 }
3759 proto::update_language_server::Variant::WorkProgress(payload) => {
3760 this.update(&mut cx, |this, cx| {
3761 this.on_lsp_work_progress(
3762 language_server_id,
3763 payload.token,
3764 LanguageServerProgress {
3765 message: payload.message,
3766 percentage: payload.percentage.map(|p| p as usize),
3767 last_update_at: Instant::now(),
3768 },
3769 cx,
3770 );
3771 })
3772 }
3773 proto::update_language_server::Variant::WorkEnd(payload) => {
3774 this.update(&mut cx, |this, cx| {
3775 this.on_lsp_work_end(language_server_id, payload.token, cx);
3776 })
3777 }
3778 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3779 this.update(&mut cx, |this, cx| {
3780 this.disk_based_diagnostics_started(cx);
3781 })
3782 }
3783 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3784 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3785 }
3786 }
3787
3788 Ok(())
3789 }
3790
3791 async fn handle_update_buffer(
3792 this: ModelHandle<Self>,
3793 envelope: TypedEnvelope<proto::UpdateBuffer>,
3794 _: Arc<Client>,
3795 mut cx: AsyncAppContext,
3796 ) -> Result<()> {
3797 this.update(&mut cx, |this, cx| {
3798 let payload = envelope.payload.clone();
3799 let buffer_id = payload.buffer_id;
3800 let ops = payload
3801 .operations
3802 .into_iter()
3803 .map(|op| language::proto::deserialize_operation(op))
3804 .collect::<Result<Vec<_>, _>>()?;
3805 match this.opened_buffers.entry(buffer_id) {
3806 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3807 OpenBuffer::Strong(buffer) => {
3808 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3809 }
3810 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3811 OpenBuffer::Weak(_) => {}
3812 },
3813 hash_map::Entry::Vacant(e) => {
3814 e.insert(OpenBuffer::Loading(ops));
3815 }
3816 }
3817 Ok(())
3818 })
3819 }
3820
3821 async fn handle_update_buffer_file(
3822 this: ModelHandle<Self>,
3823 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3824 _: Arc<Client>,
3825 mut cx: AsyncAppContext,
3826 ) -> Result<()> {
3827 this.update(&mut cx, |this, cx| {
3828 let payload = envelope.payload.clone();
3829 let buffer_id = payload.buffer_id;
3830 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3831 let worktree = this
3832 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3833 .ok_or_else(|| anyhow!("no such worktree"))?;
3834 let file = File::from_proto(file, worktree.clone(), cx)?;
3835 let buffer = this
3836 .opened_buffers
3837 .get_mut(&buffer_id)
3838 .and_then(|b| b.upgrade(cx))
3839 .ok_or_else(|| anyhow!("no such buffer"))?;
3840 buffer.update(cx, |buffer, cx| {
3841 buffer.file_updated(Box::new(file), cx).detach();
3842 });
3843 Ok(())
3844 })
3845 }
3846
3847 async fn handle_save_buffer(
3848 this: ModelHandle<Self>,
3849 envelope: TypedEnvelope<proto::SaveBuffer>,
3850 _: Arc<Client>,
3851 mut cx: AsyncAppContext,
3852 ) -> Result<proto::BufferSaved> {
3853 let buffer_id = envelope.payload.buffer_id;
3854 let requested_version = deserialize_version(envelope.payload.version);
3855
3856 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3857 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3858 let buffer = this
3859 .opened_buffers
3860 .get(&buffer_id)
3861 .and_then(|buffer| buffer.upgrade(cx))
3862 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3863 Ok::<_, anyhow::Error>((project_id, buffer))
3864 })?;
3865 buffer
3866 .update(&mut cx, |buffer, _| {
3867 buffer.wait_for_version(requested_version)
3868 })
3869 .await;
3870
3871 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3872 Ok(proto::BufferSaved {
3873 project_id,
3874 buffer_id,
3875 version: serialize_version(&saved_version),
3876 mtime: Some(mtime.into()),
3877 })
3878 }
3879
3880 async fn handle_reload_buffers(
3881 this: ModelHandle<Self>,
3882 envelope: TypedEnvelope<proto::ReloadBuffers>,
3883 _: Arc<Client>,
3884 mut cx: AsyncAppContext,
3885 ) -> Result<proto::ReloadBuffersResponse> {
3886 let sender_id = envelope.original_sender_id()?;
3887 let reload = this.update(&mut cx, |this, cx| {
3888 let mut buffers = HashSet::default();
3889 for buffer_id in &envelope.payload.buffer_ids {
3890 buffers.insert(
3891 this.opened_buffers
3892 .get(buffer_id)
3893 .and_then(|buffer| buffer.upgrade(cx))
3894 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3895 );
3896 }
3897 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3898 })?;
3899
3900 let project_transaction = reload.await?;
3901 let project_transaction = this.update(&mut cx, |this, cx| {
3902 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3903 });
3904 Ok(proto::ReloadBuffersResponse {
3905 transaction: Some(project_transaction),
3906 })
3907 }
3908
3909 async fn handle_format_buffers(
3910 this: ModelHandle<Self>,
3911 envelope: TypedEnvelope<proto::FormatBuffers>,
3912 _: Arc<Client>,
3913 mut cx: AsyncAppContext,
3914 ) -> Result<proto::FormatBuffersResponse> {
3915 let sender_id = envelope.original_sender_id()?;
3916 let format = this.update(&mut cx, |this, cx| {
3917 let mut buffers = HashSet::default();
3918 for buffer_id in &envelope.payload.buffer_ids {
3919 buffers.insert(
3920 this.opened_buffers
3921 .get(buffer_id)
3922 .and_then(|buffer| buffer.upgrade(cx))
3923 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3924 );
3925 }
3926 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3927 })?;
3928
3929 let project_transaction = format.await?;
3930 let project_transaction = this.update(&mut cx, |this, cx| {
3931 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3932 });
3933 Ok(proto::FormatBuffersResponse {
3934 transaction: Some(project_transaction),
3935 })
3936 }
3937
3938 async fn handle_get_completions(
3939 this: ModelHandle<Self>,
3940 envelope: TypedEnvelope<proto::GetCompletions>,
3941 _: Arc<Client>,
3942 mut cx: AsyncAppContext,
3943 ) -> Result<proto::GetCompletionsResponse> {
3944 let position = envelope
3945 .payload
3946 .position
3947 .and_then(language::proto::deserialize_anchor)
3948 .ok_or_else(|| anyhow!("invalid position"))?;
3949 let version = deserialize_version(envelope.payload.version);
3950 let buffer = this.read_with(&cx, |this, cx| {
3951 this.opened_buffers
3952 .get(&envelope.payload.buffer_id)
3953 .and_then(|buffer| buffer.upgrade(cx))
3954 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3955 })?;
3956 buffer
3957 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3958 .await;
3959 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3960 let completions = this
3961 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3962 .await?;
3963
3964 Ok(proto::GetCompletionsResponse {
3965 completions: completions
3966 .iter()
3967 .map(language::proto::serialize_completion)
3968 .collect(),
3969 version: serialize_version(&version),
3970 })
3971 }
3972
3973 async fn handle_apply_additional_edits_for_completion(
3974 this: ModelHandle<Self>,
3975 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3976 _: Arc<Client>,
3977 mut cx: AsyncAppContext,
3978 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3979 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3980 let buffer = this
3981 .opened_buffers
3982 .get(&envelope.payload.buffer_id)
3983 .and_then(|buffer| buffer.upgrade(cx))
3984 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3985 let language = buffer.read(cx).language();
3986 let completion = language::proto::deserialize_completion(
3987 envelope
3988 .payload
3989 .completion
3990 .ok_or_else(|| anyhow!("invalid completion"))?,
3991 language,
3992 )?;
3993 Ok::<_, anyhow::Error>(
3994 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3995 )
3996 })?;
3997
3998 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3999 transaction: apply_additional_edits
4000 .await?
4001 .as_ref()
4002 .map(language::proto::serialize_transaction),
4003 })
4004 }
4005
4006 async fn handle_get_code_actions(
4007 this: ModelHandle<Self>,
4008 envelope: TypedEnvelope<proto::GetCodeActions>,
4009 _: Arc<Client>,
4010 mut cx: AsyncAppContext,
4011 ) -> Result<proto::GetCodeActionsResponse> {
4012 let start = envelope
4013 .payload
4014 .start
4015 .and_then(language::proto::deserialize_anchor)
4016 .ok_or_else(|| anyhow!("invalid start"))?;
4017 let end = envelope
4018 .payload
4019 .end
4020 .and_then(language::proto::deserialize_anchor)
4021 .ok_or_else(|| anyhow!("invalid end"))?;
4022 let buffer = this.update(&mut cx, |this, cx| {
4023 this.opened_buffers
4024 .get(&envelope.payload.buffer_id)
4025 .and_then(|buffer| buffer.upgrade(cx))
4026 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4027 })?;
4028 buffer
4029 .update(&mut cx, |buffer, _| {
4030 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4031 })
4032 .await;
4033
4034 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4035 let code_actions = this.update(&mut cx, |this, cx| {
4036 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4037 })?;
4038
4039 Ok(proto::GetCodeActionsResponse {
4040 actions: code_actions
4041 .await?
4042 .iter()
4043 .map(language::proto::serialize_code_action)
4044 .collect(),
4045 version: serialize_version(&version),
4046 })
4047 }
4048
4049 async fn handle_apply_code_action(
4050 this: ModelHandle<Self>,
4051 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4052 _: Arc<Client>,
4053 mut cx: AsyncAppContext,
4054 ) -> Result<proto::ApplyCodeActionResponse> {
4055 let sender_id = envelope.original_sender_id()?;
4056 let action = language::proto::deserialize_code_action(
4057 envelope
4058 .payload
4059 .action
4060 .ok_or_else(|| anyhow!("invalid action"))?,
4061 )?;
4062 let apply_code_action = this.update(&mut cx, |this, cx| {
4063 let buffer = this
4064 .opened_buffers
4065 .get(&envelope.payload.buffer_id)
4066 .and_then(|buffer| buffer.upgrade(cx))
4067 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4068 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4069 })?;
4070
4071 let project_transaction = apply_code_action.await?;
4072 let project_transaction = this.update(&mut cx, |this, cx| {
4073 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4074 });
4075 Ok(proto::ApplyCodeActionResponse {
4076 transaction: Some(project_transaction),
4077 })
4078 }
4079
4080 async fn handle_lsp_command<T: LspCommand>(
4081 this: ModelHandle<Self>,
4082 envelope: TypedEnvelope<T::ProtoRequest>,
4083 _: Arc<Client>,
4084 mut cx: AsyncAppContext,
4085 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4086 where
4087 <T::LspRequest as lsp::request::Request>::Result: Send,
4088 {
4089 let sender_id = envelope.original_sender_id()?;
4090 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4091 let buffer_handle = this.read_with(&cx, |this, _| {
4092 this.opened_buffers
4093 .get(&buffer_id)
4094 .and_then(|buffer| buffer.upgrade(&cx))
4095 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4096 })?;
4097 let request = T::from_proto(
4098 envelope.payload,
4099 this.clone(),
4100 buffer_handle.clone(),
4101 cx.clone(),
4102 )
4103 .await?;
4104 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4105 let response = this
4106 .update(&mut cx, |this, cx| {
4107 this.request_lsp(buffer_handle, request, cx)
4108 })
4109 .await?;
4110 this.update(&mut cx, |this, cx| {
4111 Ok(T::response_to_proto(
4112 response,
4113 this,
4114 sender_id,
4115 &buffer_version,
4116 cx,
4117 ))
4118 })
4119 }
4120
4121 async fn handle_get_project_symbols(
4122 this: ModelHandle<Self>,
4123 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4124 _: Arc<Client>,
4125 mut cx: AsyncAppContext,
4126 ) -> Result<proto::GetProjectSymbolsResponse> {
4127 let symbols = this
4128 .update(&mut cx, |this, cx| {
4129 this.symbols(&envelope.payload.query, cx)
4130 })
4131 .await?;
4132
4133 Ok(proto::GetProjectSymbolsResponse {
4134 symbols: symbols.iter().map(serialize_symbol).collect(),
4135 })
4136 }
4137
4138 async fn handle_search_project(
4139 this: ModelHandle<Self>,
4140 envelope: TypedEnvelope<proto::SearchProject>,
4141 _: Arc<Client>,
4142 mut cx: AsyncAppContext,
4143 ) -> Result<proto::SearchProjectResponse> {
4144 let peer_id = envelope.original_sender_id()?;
4145 let query = SearchQuery::from_proto(envelope.payload)?;
4146 let result = this
4147 .update(&mut cx, |this, cx| this.search(query, cx))
4148 .await?;
4149
4150 this.update(&mut cx, |this, cx| {
4151 let mut locations = Vec::new();
4152 for (buffer, ranges) in result {
4153 for range in ranges {
4154 let start = serialize_anchor(&range.start);
4155 let end = serialize_anchor(&range.end);
4156 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4157 locations.push(proto::Location {
4158 buffer: Some(buffer),
4159 start: Some(start),
4160 end: Some(end),
4161 });
4162 }
4163 }
4164 Ok(proto::SearchProjectResponse { locations })
4165 })
4166 }
4167
4168 async fn handle_open_buffer_for_symbol(
4169 this: ModelHandle<Self>,
4170 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4171 _: Arc<Client>,
4172 mut cx: AsyncAppContext,
4173 ) -> Result<proto::OpenBufferForSymbolResponse> {
4174 let peer_id = envelope.original_sender_id()?;
4175 let symbol = envelope
4176 .payload
4177 .symbol
4178 .ok_or_else(|| anyhow!("invalid symbol"))?;
4179 let symbol = this.read_with(&cx, |this, _| {
4180 let symbol = this.deserialize_symbol(symbol)?;
4181 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4182 if signature == symbol.signature {
4183 Ok(symbol)
4184 } else {
4185 Err(anyhow!("invalid symbol signature"))
4186 }
4187 })?;
4188 let buffer = this
4189 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4190 .await?;
4191
4192 Ok(proto::OpenBufferForSymbolResponse {
4193 buffer: Some(this.update(&mut cx, |this, cx| {
4194 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4195 })),
4196 })
4197 }
4198
4199 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4200 let mut hasher = Sha256::new();
4201 hasher.update(worktree_id.to_proto().to_be_bytes());
4202 hasher.update(path.to_string_lossy().as_bytes());
4203 hasher.update(self.nonce.to_be_bytes());
4204 hasher.finalize().as_slice().try_into().unwrap()
4205 }
4206
4207 async fn handle_open_buffer_by_id(
4208 this: ModelHandle<Self>,
4209 envelope: TypedEnvelope<proto::OpenBufferById>,
4210 _: Arc<Client>,
4211 mut cx: AsyncAppContext,
4212 ) -> Result<proto::OpenBufferResponse> {
4213 let peer_id = envelope.original_sender_id()?;
4214 let buffer = this
4215 .update(&mut cx, |this, cx| {
4216 this.open_buffer_by_id(envelope.payload.id, cx)
4217 })
4218 .await?;
4219 this.update(&mut cx, |this, cx| {
4220 Ok(proto::OpenBufferResponse {
4221 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4222 })
4223 })
4224 }
4225
4226 async fn handle_open_buffer_by_path(
4227 this: ModelHandle<Self>,
4228 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4229 _: Arc<Client>,
4230 mut cx: AsyncAppContext,
4231 ) -> Result<proto::OpenBufferResponse> {
4232 let peer_id = envelope.original_sender_id()?;
4233 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4234 let open_buffer = this.update(&mut cx, |this, cx| {
4235 this.open_buffer(
4236 ProjectPath {
4237 worktree_id,
4238 path: PathBuf::from(envelope.payload.path).into(),
4239 },
4240 cx,
4241 )
4242 });
4243
4244 let buffer = open_buffer.await?;
4245 this.update(&mut cx, |this, cx| {
4246 Ok(proto::OpenBufferResponse {
4247 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4248 })
4249 })
4250 }
4251
4252 fn serialize_project_transaction_for_peer(
4253 &mut self,
4254 project_transaction: ProjectTransaction,
4255 peer_id: PeerId,
4256 cx: &AppContext,
4257 ) -> proto::ProjectTransaction {
4258 let mut serialized_transaction = proto::ProjectTransaction {
4259 buffers: Default::default(),
4260 transactions: Default::default(),
4261 };
4262 for (buffer, transaction) in project_transaction.0 {
4263 serialized_transaction
4264 .buffers
4265 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4266 serialized_transaction
4267 .transactions
4268 .push(language::proto::serialize_transaction(&transaction));
4269 }
4270 serialized_transaction
4271 }
4272
4273 fn deserialize_project_transaction(
4274 &mut self,
4275 message: proto::ProjectTransaction,
4276 push_to_history: bool,
4277 cx: &mut ModelContext<Self>,
4278 ) -> Task<Result<ProjectTransaction>> {
4279 cx.spawn(|this, mut cx| async move {
4280 let mut project_transaction = ProjectTransaction::default();
4281 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4282 let buffer = this
4283 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4284 .await?;
4285 let transaction = language::proto::deserialize_transaction(transaction)?;
4286 project_transaction.0.insert(buffer, transaction);
4287 }
4288
4289 for (buffer, transaction) in &project_transaction.0 {
4290 buffer
4291 .update(&mut cx, |buffer, _| {
4292 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4293 })
4294 .await;
4295
4296 if push_to_history {
4297 buffer.update(&mut cx, |buffer, _| {
4298 buffer.push_transaction(transaction.clone(), Instant::now());
4299 });
4300 }
4301 }
4302
4303 Ok(project_transaction)
4304 })
4305 }
4306
4307 fn serialize_buffer_for_peer(
4308 &mut self,
4309 buffer: &ModelHandle<Buffer>,
4310 peer_id: PeerId,
4311 cx: &AppContext,
4312 ) -> proto::Buffer {
4313 let buffer_id = buffer.read(cx).remote_id();
4314 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4315 if shared_buffers.insert(buffer_id) {
4316 proto::Buffer {
4317 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4318 }
4319 } else {
4320 proto::Buffer {
4321 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4322 }
4323 }
4324 }
4325
4326 fn deserialize_buffer(
4327 &mut self,
4328 buffer: proto::Buffer,
4329 cx: &mut ModelContext<Self>,
4330 ) -> Task<Result<ModelHandle<Buffer>>> {
4331 let replica_id = self.replica_id();
4332
4333 let opened_buffer_tx = self.opened_buffer.0.clone();
4334 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4335 cx.spawn(|this, mut cx| async move {
4336 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4337 proto::buffer::Variant::Id(id) => {
4338 let buffer = loop {
4339 let buffer = this.read_with(&cx, |this, cx| {
4340 this.opened_buffers
4341 .get(&id)
4342 .and_then(|buffer| buffer.upgrade(cx))
4343 });
4344 if let Some(buffer) = buffer {
4345 break buffer;
4346 }
4347 opened_buffer_rx
4348 .next()
4349 .await
4350 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4351 };
4352 Ok(buffer)
4353 }
4354 proto::buffer::Variant::State(mut buffer) => {
4355 let mut buffer_worktree = None;
4356 let mut buffer_file = None;
4357 if let Some(file) = buffer.file.take() {
4358 this.read_with(&cx, |this, cx| {
4359 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4360 let worktree =
4361 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4362 anyhow!("no worktree found for id {}", file.worktree_id)
4363 })?;
4364 buffer_file =
4365 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4366 as Box<dyn language::File>);
4367 buffer_worktree = Some(worktree);
4368 Ok::<_, anyhow::Error>(())
4369 })?;
4370 }
4371
4372 let buffer = cx.add_model(|cx| {
4373 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4374 });
4375
4376 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4377
4378 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4379 Ok(buffer)
4380 }
4381 }
4382 })
4383 }
4384
4385 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4386 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4387 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4388 let start = serialized_symbol
4389 .start
4390 .ok_or_else(|| anyhow!("invalid start"))?;
4391 let end = serialized_symbol
4392 .end
4393 .ok_or_else(|| anyhow!("invalid end"))?;
4394 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4395 let path = PathBuf::from(serialized_symbol.path);
4396 let language = self.languages.select_language(&path);
4397 Ok(Symbol {
4398 source_worktree_id,
4399 worktree_id,
4400 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4401 label: language
4402 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4403 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4404 name: serialized_symbol.name,
4405 path,
4406 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4407 kind,
4408 signature: serialized_symbol
4409 .signature
4410 .try_into()
4411 .map_err(|_| anyhow!("invalid signature"))?,
4412 })
4413 }
4414
4415 async fn handle_buffer_saved(
4416 this: ModelHandle<Self>,
4417 envelope: TypedEnvelope<proto::BufferSaved>,
4418 _: Arc<Client>,
4419 mut cx: AsyncAppContext,
4420 ) -> Result<()> {
4421 let version = deserialize_version(envelope.payload.version);
4422 let mtime = envelope
4423 .payload
4424 .mtime
4425 .ok_or_else(|| anyhow!("missing mtime"))?
4426 .into();
4427
4428 this.update(&mut cx, |this, cx| {
4429 let buffer = this
4430 .opened_buffers
4431 .get(&envelope.payload.buffer_id)
4432 .and_then(|buffer| buffer.upgrade(cx));
4433 if let Some(buffer) = buffer {
4434 buffer.update(cx, |buffer, cx| {
4435 buffer.did_save(version, mtime, None, cx);
4436 });
4437 }
4438 Ok(())
4439 })
4440 }
4441
4442 async fn handle_buffer_reloaded(
4443 this: ModelHandle<Self>,
4444 envelope: TypedEnvelope<proto::BufferReloaded>,
4445 _: Arc<Client>,
4446 mut cx: AsyncAppContext,
4447 ) -> Result<()> {
4448 let payload = envelope.payload.clone();
4449 let version = deserialize_version(payload.version);
4450 let mtime = payload
4451 .mtime
4452 .ok_or_else(|| anyhow!("missing mtime"))?
4453 .into();
4454 this.update(&mut cx, |this, cx| {
4455 let buffer = this
4456 .opened_buffers
4457 .get(&payload.buffer_id)
4458 .and_then(|buffer| buffer.upgrade(cx));
4459 if let Some(buffer) = buffer {
4460 buffer.update(cx, |buffer, cx| {
4461 buffer.did_reload(version, mtime, cx);
4462 });
4463 }
4464 Ok(())
4465 })
4466 }
4467
4468 pub fn match_paths<'a>(
4469 &self,
4470 query: &'a str,
4471 include_ignored: bool,
4472 smart_case: bool,
4473 max_results: usize,
4474 cancel_flag: &'a AtomicBool,
4475 cx: &AppContext,
4476 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4477 let worktrees = self
4478 .worktrees(cx)
4479 .filter(|worktree| worktree.read(cx).is_visible())
4480 .collect::<Vec<_>>();
4481 let include_root_name = worktrees.len() > 1;
4482 let candidate_sets = worktrees
4483 .into_iter()
4484 .map(|worktree| CandidateSet {
4485 snapshot: worktree.read(cx).snapshot(),
4486 include_ignored,
4487 include_root_name,
4488 })
4489 .collect::<Vec<_>>();
4490
4491 let background = cx.background().clone();
4492 async move {
4493 fuzzy::match_paths(
4494 candidate_sets.as_slice(),
4495 query,
4496 smart_case,
4497 max_results,
4498 cancel_flag,
4499 background,
4500 )
4501 .await
4502 }
4503 }
4504
4505 fn edits_from_lsp(
4506 &mut self,
4507 buffer: &ModelHandle<Buffer>,
4508 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4509 version: Option<i32>,
4510 cx: &mut ModelContext<Self>,
4511 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4512 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4513 cx.background().spawn(async move {
4514 let snapshot = snapshot?;
4515 let mut lsp_edits = lsp_edits
4516 .into_iter()
4517 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4518 .peekable();
4519
4520 let mut edits = Vec::new();
4521 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4522 // Combine any LSP edits that are adjacent.
4523 //
4524 // Also, combine LSP edits that are separated from each other by only
4525 // a newline. This is important because for some code actions,
4526 // Rust-analyzer rewrites the entire buffer via a series of edits that
4527 // are separated by unchanged newline characters.
4528 //
4529 // In order for the diffing logic below to work properly, any edits that
4530 // cancel each other out must be combined into one.
4531 while let Some((next_range, next_text)) = lsp_edits.peek() {
4532 if next_range.start > range.end {
4533 if next_range.start.row > range.end.row + 1
4534 || next_range.start.column > 0
4535 || snapshot.clip_point_utf16(
4536 PointUtf16::new(range.end.row, u32::MAX),
4537 Bias::Left,
4538 ) > range.end
4539 {
4540 break;
4541 }
4542 new_text.push('\n');
4543 }
4544 range.end = next_range.end;
4545 new_text.push_str(&next_text);
4546 lsp_edits.next();
4547 }
4548
4549 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4550 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4551 {
4552 return Err(anyhow!("invalid edits received from language server"));
4553 }
4554
4555 // For multiline edits, perform a diff of the old and new text so that
4556 // we can identify the changes more precisely, preserving the locations
4557 // of any anchors positioned in the unchanged regions.
4558 if range.end.row > range.start.row {
4559 let mut offset = range.start.to_offset(&snapshot);
4560 let old_text = snapshot.text_for_range(range).collect::<String>();
4561
4562 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4563 let mut moved_since_edit = true;
4564 for change in diff.iter_all_changes() {
4565 let tag = change.tag();
4566 let value = change.value();
4567 match tag {
4568 ChangeTag::Equal => {
4569 offset += value.len();
4570 moved_since_edit = true;
4571 }
4572 ChangeTag::Delete => {
4573 let start = snapshot.anchor_after(offset);
4574 let end = snapshot.anchor_before(offset + value.len());
4575 if moved_since_edit {
4576 edits.push((start..end, String::new()));
4577 } else {
4578 edits.last_mut().unwrap().0.end = end;
4579 }
4580 offset += value.len();
4581 moved_since_edit = false;
4582 }
4583 ChangeTag::Insert => {
4584 if moved_since_edit {
4585 let anchor = snapshot.anchor_after(offset);
4586 edits.push((anchor.clone()..anchor, value.to_string()));
4587 } else {
4588 edits.last_mut().unwrap().1.push_str(value);
4589 }
4590 moved_since_edit = false;
4591 }
4592 }
4593 }
4594 } else if range.end == range.start {
4595 let anchor = snapshot.anchor_after(range.start);
4596 edits.push((anchor.clone()..anchor, new_text));
4597 } else {
4598 let edit_start = snapshot.anchor_after(range.start);
4599 let edit_end = snapshot.anchor_before(range.end);
4600 edits.push((edit_start..edit_end, new_text));
4601 }
4602 }
4603
4604 Ok(edits)
4605 })
4606 }
4607
4608 fn buffer_snapshot_for_lsp_version(
4609 &mut self,
4610 buffer: &ModelHandle<Buffer>,
4611 version: Option<i32>,
4612 cx: &AppContext,
4613 ) -> Result<TextBufferSnapshot> {
4614 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4615
4616 if let Some(version) = version {
4617 let buffer_id = buffer.read(cx).remote_id();
4618 let snapshots = self
4619 .buffer_snapshots
4620 .get_mut(&buffer_id)
4621 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4622 let mut found_snapshot = None;
4623 snapshots.retain(|(snapshot_version, snapshot)| {
4624 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4625 false
4626 } else {
4627 if *snapshot_version == version {
4628 found_snapshot = Some(snapshot.clone());
4629 }
4630 true
4631 }
4632 });
4633
4634 found_snapshot.ok_or_else(|| {
4635 anyhow!(
4636 "snapshot not found for buffer {} at version {}",
4637 buffer_id,
4638 version
4639 )
4640 })
4641 } else {
4642 Ok((buffer.read(cx)).text_snapshot())
4643 }
4644 }
4645
4646 fn language_server_for_buffer(
4647 &self,
4648 buffer: &Buffer,
4649 cx: &AppContext,
4650 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4651 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4652 let worktree_id = file.worktree_id(cx);
4653 self.language_servers
4654 .get(&(worktree_id, language.lsp_adapter()?.name()))
4655 } else {
4656 None
4657 }
4658 }
4659}
4660
4661impl WorktreeHandle {
4662 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4663 match self {
4664 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4665 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4666 }
4667 }
4668}
4669
4670impl OpenBuffer {
4671 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4672 match self {
4673 OpenBuffer::Strong(handle) => Some(handle.clone()),
4674 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4675 OpenBuffer::Loading(_) => None,
4676 }
4677 }
4678}
4679
4680struct CandidateSet {
4681 snapshot: Snapshot,
4682 include_ignored: bool,
4683 include_root_name: bool,
4684}
4685
4686impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4687 type Candidates = CandidateSetIter<'a>;
4688
4689 fn id(&self) -> usize {
4690 self.snapshot.id().to_usize()
4691 }
4692
4693 fn len(&self) -> usize {
4694 if self.include_ignored {
4695 self.snapshot.file_count()
4696 } else {
4697 self.snapshot.visible_file_count()
4698 }
4699 }
4700
4701 fn prefix(&self) -> Arc<str> {
4702 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4703 self.snapshot.root_name().into()
4704 } else if self.include_root_name {
4705 format!("{}/", self.snapshot.root_name()).into()
4706 } else {
4707 "".into()
4708 }
4709 }
4710
4711 fn candidates(&'a self, start: usize) -> Self::Candidates {
4712 CandidateSetIter {
4713 traversal: self.snapshot.files(self.include_ignored, start),
4714 }
4715 }
4716}
4717
4718struct CandidateSetIter<'a> {
4719 traversal: Traversal<'a>,
4720}
4721
4722impl<'a> Iterator for CandidateSetIter<'a> {
4723 type Item = PathMatchCandidate<'a>;
4724
4725 fn next(&mut self) -> Option<Self::Item> {
4726 self.traversal.next().map(|entry| {
4727 if let EntryKind::File(char_bag) = entry.kind {
4728 PathMatchCandidate {
4729 path: &entry.path,
4730 char_bag,
4731 }
4732 } else {
4733 unreachable!()
4734 }
4735 })
4736 }
4737}
4738
4739impl Entity for Project {
4740 type Event = Event;
4741
4742 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4743 match &self.client_state {
4744 ProjectClientState::Local { remote_id_rx, .. } => {
4745 if let Some(project_id) = *remote_id_rx.borrow() {
4746 self.client
4747 .send(proto::UnregisterProject { project_id })
4748 .log_err();
4749 }
4750 }
4751 ProjectClientState::Remote { remote_id, .. } => {
4752 self.client
4753 .send(proto::LeaveProject {
4754 project_id: *remote_id,
4755 })
4756 .log_err();
4757 }
4758 }
4759 }
4760
4761 fn app_will_quit(
4762 &mut self,
4763 _: &mut MutableAppContext,
4764 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4765 let shutdown_futures = self
4766 .language_servers
4767 .drain()
4768 .filter_map(|(_, (_, server))| server.shutdown())
4769 .collect::<Vec<_>>();
4770 Some(
4771 async move {
4772 futures::future::join_all(shutdown_futures).await;
4773 }
4774 .boxed(),
4775 )
4776 }
4777}
4778
4779impl Collaborator {
4780 fn from_proto(
4781 message: proto::Collaborator,
4782 user_store: &ModelHandle<UserStore>,
4783 cx: &mut AsyncAppContext,
4784 ) -> impl Future<Output = Result<Self>> {
4785 let user = user_store.update(cx, |user_store, cx| {
4786 user_store.fetch_user(message.user_id, cx)
4787 });
4788
4789 async move {
4790 Ok(Self {
4791 peer_id: PeerId(message.peer_id),
4792 user: user.await?,
4793 replica_id: message.replica_id as ReplicaId,
4794 })
4795 }
4796 }
4797}
4798
4799impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4800 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4801 Self {
4802 worktree_id,
4803 path: path.as_ref().into(),
4804 }
4805 }
4806}
4807
4808impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4809 fn from(options: lsp::CreateFileOptions) -> Self {
4810 Self {
4811 overwrite: options.overwrite.unwrap_or(false),
4812 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4813 }
4814 }
4815}
4816
4817impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4818 fn from(options: lsp::RenameFileOptions) -> Self {
4819 Self {
4820 overwrite: options.overwrite.unwrap_or(false),
4821 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4822 }
4823 }
4824}
4825
4826impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4827 fn from(options: lsp::DeleteFileOptions) -> Self {
4828 Self {
4829 recursive: options.recursive.unwrap_or(false),
4830 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4831 }
4832 }
4833}
4834
4835fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4836 proto::Symbol {
4837 source_worktree_id: symbol.source_worktree_id.to_proto(),
4838 worktree_id: symbol.worktree_id.to_proto(),
4839 language_server_name: symbol.language_server_name.0.to_string(),
4840 name: symbol.name.clone(),
4841 kind: unsafe { mem::transmute(symbol.kind) },
4842 path: symbol.path.to_string_lossy().to_string(),
4843 start: Some(proto::Point {
4844 row: symbol.range.start.row,
4845 column: symbol.range.start.column,
4846 }),
4847 end: Some(proto::Point {
4848 row: symbol.range.end.row,
4849 column: symbol.range.end.column,
4850 }),
4851 signature: symbol.signature.to_vec(),
4852 }
4853}
4854
4855fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4856 let mut path_components = path.components();
4857 let mut base_components = base.components();
4858 let mut components: Vec<Component> = Vec::new();
4859 loop {
4860 match (path_components.next(), base_components.next()) {
4861 (None, None) => break,
4862 (Some(a), None) => {
4863 components.push(a);
4864 components.extend(path_components.by_ref());
4865 break;
4866 }
4867 (None, _) => components.push(Component::ParentDir),
4868 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4869 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4870 (Some(a), Some(_)) => {
4871 components.push(Component::ParentDir);
4872 for _ in base_components {
4873 components.push(Component::ParentDir);
4874 }
4875 components.push(a);
4876 components.extend(path_components.by_ref());
4877 break;
4878 }
4879 }
4880 }
4881 components.iter().map(|c| c.as_os_str()).collect()
4882}
4883
4884impl Item for Buffer {
4885 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4886 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4887 }
4888}
4889
4890#[cfg(test)]
4891mod tests {
4892 use super::{Event, *};
4893 use fs::RealFs;
4894 use futures::{future, StreamExt};
4895 use gpui::test::subscribe;
4896 use language::{
4897 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4898 ToPoint,
4899 };
4900 use lsp::Url;
4901 use serde_json::json;
4902 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4903 use unindent::Unindent as _;
4904 use util::{assert_set_eq, test::temp_tree};
4905 use worktree::WorktreeHandle as _;
4906
4907 #[gpui::test]
4908 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4909 let dir = temp_tree(json!({
4910 "root": {
4911 "apple": "",
4912 "banana": {
4913 "carrot": {
4914 "date": "",
4915 "endive": "",
4916 }
4917 },
4918 "fennel": {
4919 "grape": "",
4920 }
4921 }
4922 }));
4923
4924 let root_link_path = dir.path().join("root_link");
4925 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4926 unix::fs::symlink(
4927 &dir.path().join("root/fennel"),
4928 &dir.path().join("root/finnochio"),
4929 )
4930 .unwrap();
4931
4932 let project = Project::test(Arc::new(RealFs), cx);
4933
4934 let (tree, _) = project
4935 .update(cx, |project, cx| {
4936 project.find_or_create_local_worktree(&root_link_path, true, cx)
4937 })
4938 .await
4939 .unwrap();
4940
4941 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4942 .await;
4943 cx.read(|cx| {
4944 let tree = tree.read(cx);
4945 assert_eq!(tree.file_count(), 5);
4946 assert_eq!(
4947 tree.inode_for_path("fennel/grape"),
4948 tree.inode_for_path("finnochio/grape")
4949 );
4950 });
4951
4952 let cancel_flag = Default::default();
4953 let results = project
4954 .read_with(cx, |project, cx| {
4955 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4956 })
4957 .await;
4958 assert_eq!(
4959 results
4960 .into_iter()
4961 .map(|result| result.path)
4962 .collect::<Vec<Arc<Path>>>(),
4963 vec![
4964 PathBuf::from("banana/carrot/date").into(),
4965 PathBuf::from("banana/carrot/endive").into(),
4966 ]
4967 );
4968 }
4969
4970 #[gpui::test]
4971 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4972 cx.foreground().forbid_parking();
4973
4974 let mut rust_language = Language::new(
4975 LanguageConfig {
4976 name: "Rust".into(),
4977 path_suffixes: vec!["rs".to_string()],
4978 ..Default::default()
4979 },
4980 Some(tree_sitter_rust::language()),
4981 );
4982 let mut json_language = Language::new(
4983 LanguageConfig {
4984 name: "JSON".into(),
4985 path_suffixes: vec!["json".to_string()],
4986 ..Default::default()
4987 },
4988 None,
4989 );
4990 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4991 name: "the-rust-language-server",
4992 capabilities: lsp::ServerCapabilities {
4993 completion_provider: Some(lsp::CompletionOptions {
4994 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4995 ..Default::default()
4996 }),
4997 ..Default::default()
4998 },
4999 ..Default::default()
5000 });
5001 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5002 name: "the-json-language-server",
5003 capabilities: lsp::ServerCapabilities {
5004 completion_provider: Some(lsp::CompletionOptions {
5005 trigger_characters: Some(vec![":".to_string()]),
5006 ..Default::default()
5007 }),
5008 ..Default::default()
5009 },
5010 ..Default::default()
5011 });
5012
5013 let fs = FakeFs::new(cx.background());
5014 fs.insert_tree(
5015 "/the-root",
5016 json!({
5017 "test.rs": "const A: i32 = 1;",
5018 "test2.rs": "",
5019 "Cargo.toml": "a = 1",
5020 "package.json": "{\"a\": 1}",
5021 }),
5022 )
5023 .await;
5024
5025 let project = Project::test(fs.clone(), cx);
5026 project.update(cx, |project, _| {
5027 project.languages.add(Arc::new(rust_language));
5028 project.languages.add(Arc::new(json_language));
5029 });
5030
5031 let worktree_id = project
5032 .update(cx, |project, cx| {
5033 project.find_or_create_local_worktree("/the-root", true, cx)
5034 })
5035 .await
5036 .unwrap()
5037 .0
5038 .read_with(cx, |tree, _| tree.id());
5039
5040 // Open a buffer without an associated language server.
5041 let toml_buffer = project
5042 .update(cx, |project, cx| {
5043 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5044 })
5045 .await
5046 .unwrap();
5047
5048 // Open a buffer with an associated language server.
5049 let rust_buffer = project
5050 .update(cx, |project, cx| {
5051 project.open_buffer((worktree_id, "test.rs"), cx)
5052 })
5053 .await
5054 .unwrap();
5055
5056 // A server is started up, and it is notified about Rust files.
5057 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5058 assert_eq!(
5059 fake_rust_server
5060 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5061 .await
5062 .text_document,
5063 lsp::TextDocumentItem {
5064 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5065 version: 0,
5066 text: "const A: i32 = 1;".to_string(),
5067 language_id: Default::default()
5068 }
5069 );
5070
5071 // The buffer is configured based on the language server's capabilities.
5072 rust_buffer.read_with(cx, |buffer, _| {
5073 assert_eq!(
5074 buffer.completion_triggers(),
5075 &[".".to_string(), "::".to_string()]
5076 );
5077 });
5078 toml_buffer.read_with(cx, |buffer, _| {
5079 assert!(buffer.completion_triggers().is_empty());
5080 });
5081
5082 // Edit a buffer. The changes are reported to the language server.
5083 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5084 assert_eq!(
5085 fake_rust_server
5086 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5087 .await
5088 .text_document,
5089 lsp::VersionedTextDocumentIdentifier::new(
5090 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5091 1
5092 )
5093 );
5094
5095 // Open a third buffer with a different associated language server.
5096 let json_buffer = project
5097 .update(cx, |project, cx| {
5098 project.open_buffer((worktree_id, "package.json"), cx)
5099 })
5100 .await
5101 .unwrap();
5102
5103 // A json language server is started up and is only notified about the json buffer.
5104 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5105 assert_eq!(
5106 fake_json_server
5107 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5108 .await
5109 .text_document,
5110 lsp::TextDocumentItem {
5111 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5112 version: 0,
5113 text: "{\"a\": 1}".to_string(),
5114 language_id: Default::default()
5115 }
5116 );
5117
5118 // This buffer is configured based on the second language server's
5119 // capabilities.
5120 json_buffer.read_with(cx, |buffer, _| {
5121 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5122 });
5123
5124 // When opening another buffer whose language server is already running,
5125 // it is also configured based on the existing language server's capabilities.
5126 let rust_buffer2 = project
5127 .update(cx, |project, cx| {
5128 project.open_buffer((worktree_id, "test2.rs"), cx)
5129 })
5130 .await
5131 .unwrap();
5132 rust_buffer2.read_with(cx, |buffer, _| {
5133 assert_eq!(
5134 buffer.completion_triggers(),
5135 &[".".to_string(), "::".to_string()]
5136 );
5137 });
5138
5139 // Changes are reported only to servers matching the buffer's language.
5140 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5141 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5142 assert_eq!(
5143 fake_rust_server
5144 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5145 .await
5146 .text_document,
5147 lsp::VersionedTextDocumentIdentifier::new(
5148 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5149 1
5150 )
5151 );
5152
5153 // Save notifications are reported to all servers.
5154 toml_buffer
5155 .update(cx, |buffer, cx| buffer.save(cx))
5156 .await
5157 .unwrap();
5158 assert_eq!(
5159 fake_rust_server
5160 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5161 .await
5162 .text_document,
5163 lsp::TextDocumentIdentifier::new(
5164 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5165 )
5166 );
5167 assert_eq!(
5168 fake_json_server
5169 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5170 .await
5171 .text_document,
5172 lsp::TextDocumentIdentifier::new(
5173 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5174 )
5175 );
5176
5177 // Renames are reported only to servers matching the buffer's language.
5178 fs.rename(
5179 Path::new("/the-root/test2.rs"),
5180 Path::new("/the-root/test3.rs"),
5181 Default::default(),
5182 )
5183 .await
5184 .unwrap();
5185 assert_eq!(
5186 fake_rust_server
5187 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5188 .await
5189 .text_document,
5190 lsp::TextDocumentIdentifier::new(
5191 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5192 ),
5193 );
5194 assert_eq!(
5195 fake_rust_server
5196 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5197 .await
5198 .text_document,
5199 lsp::TextDocumentItem {
5200 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5201 version: 0,
5202 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5203 language_id: Default::default()
5204 },
5205 );
5206
5207 rust_buffer2.update(cx, |buffer, cx| {
5208 buffer.update_diagnostics(
5209 DiagnosticSet::from_sorted_entries(
5210 vec![DiagnosticEntry {
5211 diagnostic: Default::default(),
5212 range: Anchor::MIN..Anchor::MAX,
5213 }],
5214 &buffer.snapshot(),
5215 ),
5216 cx,
5217 );
5218 assert_eq!(
5219 buffer
5220 .snapshot()
5221 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5222 .count(),
5223 1
5224 );
5225 });
5226
5227 // When the rename changes the extension of the file, the buffer gets closed on the old
5228 // language server and gets opened on the new one.
5229 fs.rename(
5230 Path::new("/the-root/test3.rs"),
5231 Path::new("/the-root/test3.json"),
5232 Default::default(),
5233 )
5234 .await
5235 .unwrap();
5236 assert_eq!(
5237 fake_rust_server
5238 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5239 .await
5240 .text_document,
5241 lsp::TextDocumentIdentifier::new(
5242 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5243 ),
5244 );
5245 assert_eq!(
5246 fake_json_server
5247 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5248 .await
5249 .text_document,
5250 lsp::TextDocumentItem {
5251 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5252 version: 0,
5253 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5254 language_id: Default::default()
5255 },
5256 );
5257 // We clear the diagnostics, since the language has changed.
5258 rust_buffer2.read_with(cx, |buffer, _| {
5259 assert_eq!(
5260 buffer
5261 .snapshot()
5262 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5263 .count(),
5264 0
5265 );
5266 });
5267
5268 // The renamed file's version resets after changing language server.
5269 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5270 assert_eq!(
5271 fake_json_server
5272 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5273 .await
5274 .text_document,
5275 lsp::VersionedTextDocumentIdentifier::new(
5276 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5277 1
5278 )
5279 );
5280
5281 // Restart language servers
5282 project.update(cx, |project, cx| {
5283 project.restart_language_servers_for_buffers(
5284 vec![rust_buffer.clone(), json_buffer.clone()],
5285 cx,
5286 );
5287 });
5288
5289 let mut rust_shutdown_requests = fake_rust_server
5290 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5291 let mut json_shutdown_requests = fake_json_server
5292 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5293 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5294
5295 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5296 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5297
5298 // Ensure rust document is reopened in new rust language server
5299 assert_eq!(
5300 fake_rust_server
5301 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5302 .await
5303 .text_document,
5304 lsp::TextDocumentItem {
5305 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5306 version: 1,
5307 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5308 language_id: Default::default()
5309 }
5310 );
5311
5312 // Ensure json documents are reopened in new json language server
5313 assert_set_eq!(
5314 [
5315 fake_json_server
5316 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5317 .await
5318 .text_document,
5319 fake_json_server
5320 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5321 .await
5322 .text_document,
5323 ],
5324 [
5325 lsp::TextDocumentItem {
5326 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5327 version: 0,
5328 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5329 language_id: Default::default()
5330 },
5331 lsp::TextDocumentItem {
5332 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5333 version: 1,
5334 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5335 language_id: Default::default()
5336 }
5337 ]
5338 );
5339
5340 // Close notifications are reported only to servers matching the buffer's language.
5341 cx.update(|_| drop(json_buffer));
5342 let close_message = lsp::DidCloseTextDocumentParams {
5343 text_document: lsp::TextDocumentIdentifier::new(
5344 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5345 ),
5346 };
5347 assert_eq!(
5348 fake_json_server
5349 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5350 .await,
5351 close_message,
5352 );
5353 }
5354
5355 #[gpui::test]
5356 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5357 cx.foreground().forbid_parking();
5358
5359 let progress_token = "the-progress-token";
5360 let mut language = Language::new(
5361 LanguageConfig {
5362 name: "Rust".into(),
5363 path_suffixes: vec!["rs".to_string()],
5364 ..Default::default()
5365 },
5366 Some(tree_sitter_rust::language()),
5367 );
5368 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5369 disk_based_diagnostics_progress_token: Some(progress_token),
5370 disk_based_diagnostics_sources: &["disk"],
5371 ..Default::default()
5372 });
5373
5374 let fs = FakeFs::new(cx.background());
5375 fs.insert_tree(
5376 "/dir",
5377 json!({
5378 "a.rs": "fn a() { A }",
5379 "b.rs": "const y: i32 = 1",
5380 }),
5381 )
5382 .await;
5383
5384 let project = Project::test(fs, cx);
5385 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5386
5387 let (tree, _) = project
5388 .update(cx, |project, cx| {
5389 project.find_or_create_local_worktree("/dir", true, cx)
5390 })
5391 .await
5392 .unwrap();
5393 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5394
5395 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5396 .await;
5397
5398 // Cause worktree to start the fake language server
5399 let _buffer = project
5400 .update(cx, |project, cx| {
5401 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5402 })
5403 .await
5404 .unwrap();
5405
5406 let mut events = subscribe(&project, cx);
5407
5408 let mut fake_server = fake_servers.next().await.unwrap();
5409 fake_server.start_progress(progress_token).await;
5410 assert_eq!(
5411 events.next().await.unwrap(),
5412 Event::DiskBasedDiagnosticsStarted
5413 );
5414
5415 fake_server.start_progress(progress_token).await;
5416 fake_server.end_progress(progress_token).await;
5417 fake_server.start_progress(progress_token).await;
5418
5419 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5420 lsp::PublishDiagnosticsParams {
5421 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5422 version: None,
5423 diagnostics: vec![lsp::Diagnostic {
5424 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5425 severity: Some(lsp::DiagnosticSeverity::ERROR),
5426 message: "undefined variable 'A'".to_string(),
5427 ..Default::default()
5428 }],
5429 },
5430 );
5431 assert_eq!(
5432 events.next().await.unwrap(),
5433 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5434 );
5435
5436 fake_server.end_progress(progress_token).await;
5437 fake_server.end_progress(progress_token).await;
5438 assert_eq!(
5439 events.next().await.unwrap(),
5440 Event::DiskBasedDiagnosticsUpdated
5441 );
5442 assert_eq!(
5443 events.next().await.unwrap(),
5444 Event::DiskBasedDiagnosticsFinished
5445 );
5446
5447 let buffer = project
5448 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5449 .await
5450 .unwrap();
5451
5452 buffer.read_with(cx, |buffer, _| {
5453 let snapshot = buffer.snapshot();
5454 let diagnostics = snapshot
5455 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5456 .collect::<Vec<_>>();
5457 assert_eq!(
5458 diagnostics,
5459 &[DiagnosticEntry {
5460 range: Point::new(0, 9)..Point::new(0, 10),
5461 diagnostic: Diagnostic {
5462 severity: lsp::DiagnosticSeverity::ERROR,
5463 message: "undefined variable 'A'".to_string(),
5464 group_id: 0,
5465 is_primary: true,
5466 ..Default::default()
5467 }
5468 }]
5469 )
5470 });
5471 }
5472
5473 #[gpui::test]
5474 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5475 cx.foreground().forbid_parking();
5476
5477 let progress_token = "the-progress-token";
5478 let mut language = Language::new(
5479 LanguageConfig {
5480 path_suffixes: vec!["rs".to_string()],
5481 ..Default::default()
5482 },
5483 None,
5484 );
5485 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5486 disk_based_diagnostics_sources: &["disk"],
5487 disk_based_diagnostics_progress_token: Some(progress_token),
5488 ..Default::default()
5489 });
5490
5491 let fs = FakeFs::new(cx.background());
5492 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5493
5494 let project = Project::test(fs, cx);
5495 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5496
5497 let worktree_id = project
5498 .update(cx, |project, cx| {
5499 project.find_or_create_local_worktree("/dir", true, cx)
5500 })
5501 .await
5502 .unwrap()
5503 .0
5504 .read_with(cx, |tree, _| tree.id());
5505
5506 let buffer = project
5507 .update(cx, |project, cx| {
5508 project.open_buffer((worktree_id, "a.rs"), cx)
5509 })
5510 .await
5511 .unwrap();
5512
5513 // Simulate diagnostics starting to update.
5514 let mut fake_server = fake_servers.next().await.unwrap();
5515 fake_server.start_progress(progress_token).await;
5516
5517 // Restart the server before the diagnostics finish updating.
5518 project.update(cx, |project, cx| {
5519 project.restart_language_servers_for_buffers([buffer], cx);
5520 });
5521 let mut events = subscribe(&project, cx);
5522
5523 // Simulate the newly started server sending more diagnostics.
5524 let mut fake_server = fake_servers.next().await.unwrap();
5525 fake_server.start_progress(progress_token).await;
5526 assert_eq!(
5527 events.next().await.unwrap(),
5528 Event::DiskBasedDiagnosticsStarted
5529 );
5530
5531 // All diagnostics are considered done, despite the old server's diagnostic
5532 // task never completing.
5533 fake_server.end_progress(progress_token).await;
5534 assert_eq!(
5535 events.next().await.unwrap(),
5536 Event::DiskBasedDiagnosticsUpdated
5537 );
5538 assert_eq!(
5539 events.next().await.unwrap(),
5540 Event::DiskBasedDiagnosticsFinished
5541 );
5542 project.read_with(cx, |project, _| {
5543 assert!(!project.is_running_disk_based_diagnostics());
5544 });
5545 }
5546
5547 #[gpui::test]
5548 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5549 cx.foreground().forbid_parking();
5550
5551 let mut language = Language::new(
5552 LanguageConfig {
5553 name: "Rust".into(),
5554 path_suffixes: vec!["rs".to_string()],
5555 ..Default::default()
5556 },
5557 Some(tree_sitter_rust::language()),
5558 );
5559 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5560 disk_based_diagnostics_sources: &["disk"],
5561 ..Default::default()
5562 });
5563
5564 let text = "
5565 fn a() { A }
5566 fn b() { BB }
5567 fn c() { CCC }
5568 "
5569 .unindent();
5570
5571 let fs = FakeFs::new(cx.background());
5572 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5573
5574 let project = Project::test(fs, cx);
5575 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5576
5577 let worktree_id = project
5578 .update(cx, |project, cx| {
5579 project.find_or_create_local_worktree("/dir", true, cx)
5580 })
5581 .await
5582 .unwrap()
5583 .0
5584 .read_with(cx, |tree, _| tree.id());
5585
5586 let buffer = project
5587 .update(cx, |project, cx| {
5588 project.open_buffer((worktree_id, "a.rs"), cx)
5589 })
5590 .await
5591 .unwrap();
5592
5593 let mut fake_server = fake_servers.next().await.unwrap();
5594 let open_notification = fake_server
5595 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5596 .await;
5597
5598 // Edit the buffer, moving the content down
5599 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5600 let change_notification_1 = fake_server
5601 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5602 .await;
5603 assert!(
5604 change_notification_1.text_document.version > open_notification.text_document.version
5605 );
5606
5607 // Report some diagnostics for the initial version of the buffer
5608 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5609 lsp::PublishDiagnosticsParams {
5610 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5611 version: Some(open_notification.text_document.version),
5612 diagnostics: vec![
5613 lsp::Diagnostic {
5614 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5615 severity: Some(DiagnosticSeverity::ERROR),
5616 message: "undefined variable 'A'".to_string(),
5617 source: Some("disk".to_string()),
5618 ..Default::default()
5619 },
5620 lsp::Diagnostic {
5621 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5622 severity: Some(DiagnosticSeverity::ERROR),
5623 message: "undefined variable 'BB'".to_string(),
5624 source: Some("disk".to_string()),
5625 ..Default::default()
5626 },
5627 lsp::Diagnostic {
5628 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5629 severity: Some(DiagnosticSeverity::ERROR),
5630 source: Some("disk".to_string()),
5631 message: "undefined variable 'CCC'".to_string(),
5632 ..Default::default()
5633 },
5634 ],
5635 },
5636 );
5637
5638 // The diagnostics have moved down since they were created.
5639 buffer.next_notification(cx).await;
5640 buffer.read_with(cx, |buffer, _| {
5641 assert_eq!(
5642 buffer
5643 .snapshot()
5644 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5645 .collect::<Vec<_>>(),
5646 &[
5647 DiagnosticEntry {
5648 range: Point::new(3, 9)..Point::new(3, 11),
5649 diagnostic: Diagnostic {
5650 severity: DiagnosticSeverity::ERROR,
5651 message: "undefined variable 'BB'".to_string(),
5652 is_disk_based: true,
5653 group_id: 1,
5654 is_primary: true,
5655 ..Default::default()
5656 },
5657 },
5658 DiagnosticEntry {
5659 range: Point::new(4, 9)..Point::new(4, 12),
5660 diagnostic: Diagnostic {
5661 severity: DiagnosticSeverity::ERROR,
5662 message: "undefined variable 'CCC'".to_string(),
5663 is_disk_based: true,
5664 group_id: 2,
5665 is_primary: true,
5666 ..Default::default()
5667 }
5668 }
5669 ]
5670 );
5671 assert_eq!(
5672 chunks_with_diagnostics(buffer, 0..buffer.len()),
5673 [
5674 ("\n\nfn a() { ".to_string(), None),
5675 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5676 (" }\nfn b() { ".to_string(), None),
5677 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5678 (" }\nfn c() { ".to_string(), None),
5679 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5680 (" }\n".to_string(), None),
5681 ]
5682 );
5683 assert_eq!(
5684 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5685 [
5686 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5687 (" }\nfn c() { ".to_string(), None),
5688 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5689 ]
5690 );
5691 });
5692
5693 // Ensure overlapping diagnostics are highlighted correctly.
5694 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5695 lsp::PublishDiagnosticsParams {
5696 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5697 version: Some(open_notification.text_document.version),
5698 diagnostics: vec![
5699 lsp::Diagnostic {
5700 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5701 severity: Some(DiagnosticSeverity::ERROR),
5702 message: "undefined variable 'A'".to_string(),
5703 source: Some("disk".to_string()),
5704 ..Default::default()
5705 },
5706 lsp::Diagnostic {
5707 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5708 severity: Some(DiagnosticSeverity::WARNING),
5709 message: "unreachable statement".to_string(),
5710 source: Some("disk".to_string()),
5711 ..Default::default()
5712 },
5713 ],
5714 },
5715 );
5716
5717 buffer.next_notification(cx).await;
5718 buffer.read_with(cx, |buffer, _| {
5719 assert_eq!(
5720 buffer
5721 .snapshot()
5722 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5723 .collect::<Vec<_>>(),
5724 &[
5725 DiagnosticEntry {
5726 range: Point::new(2, 9)..Point::new(2, 12),
5727 diagnostic: Diagnostic {
5728 severity: DiagnosticSeverity::WARNING,
5729 message: "unreachable statement".to_string(),
5730 is_disk_based: true,
5731 group_id: 1,
5732 is_primary: true,
5733 ..Default::default()
5734 }
5735 },
5736 DiagnosticEntry {
5737 range: Point::new(2, 9)..Point::new(2, 10),
5738 diagnostic: Diagnostic {
5739 severity: DiagnosticSeverity::ERROR,
5740 message: "undefined variable 'A'".to_string(),
5741 is_disk_based: true,
5742 group_id: 0,
5743 is_primary: true,
5744 ..Default::default()
5745 },
5746 }
5747 ]
5748 );
5749 assert_eq!(
5750 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5751 [
5752 ("fn a() { ".to_string(), None),
5753 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5754 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5755 ("\n".to_string(), None),
5756 ]
5757 );
5758 assert_eq!(
5759 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5760 [
5761 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5762 ("\n".to_string(), None),
5763 ]
5764 );
5765 });
5766
5767 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5768 // changes since the last save.
5769 buffer.update(cx, |buffer, cx| {
5770 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5771 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5772 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5773 });
5774 let change_notification_2 = fake_server
5775 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5776 .await;
5777 assert!(
5778 change_notification_2.text_document.version
5779 > change_notification_1.text_document.version
5780 );
5781
5782 // Handle out-of-order diagnostics
5783 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5784 lsp::PublishDiagnosticsParams {
5785 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5786 version: Some(change_notification_2.text_document.version),
5787 diagnostics: vec![
5788 lsp::Diagnostic {
5789 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5790 severity: Some(DiagnosticSeverity::ERROR),
5791 message: "undefined variable 'BB'".to_string(),
5792 source: Some("disk".to_string()),
5793 ..Default::default()
5794 },
5795 lsp::Diagnostic {
5796 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5797 severity: Some(DiagnosticSeverity::WARNING),
5798 message: "undefined variable 'A'".to_string(),
5799 source: Some("disk".to_string()),
5800 ..Default::default()
5801 },
5802 ],
5803 },
5804 );
5805
5806 buffer.next_notification(cx).await;
5807 buffer.read_with(cx, |buffer, _| {
5808 assert_eq!(
5809 buffer
5810 .snapshot()
5811 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5812 .collect::<Vec<_>>(),
5813 &[
5814 DiagnosticEntry {
5815 range: Point::new(2, 21)..Point::new(2, 22),
5816 diagnostic: Diagnostic {
5817 severity: DiagnosticSeverity::WARNING,
5818 message: "undefined variable 'A'".to_string(),
5819 is_disk_based: true,
5820 group_id: 1,
5821 is_primary: true,
5822 ..Default::default()
5823 }
5824 },
5825 DiagnosticEntry {
5826 range: Point::new(3, 9)..Point::new(3, 14),
5827 diagnostic: Diagnostic {
5828 severity: DiagnosticSeverity::ERROR,
5829 message: "undefined variable 'BB'".to_string(),
5830 is_disk_based: true,
5831 group_id: 0,
5832 is_primary: true,
5833 ..Default::default()
5834 },
5835 }
5836 ]
5837 );
5838 });
5839 }
5840
5841 #[gpui::test]
5842 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5843 cx.foreground().forbid_parking();
5844
5845 let text = concat!(
5846 "let one = ;\n", //
5847 "let two = \n",
5848 "let three = 3;\n",
5849 );
5850
5851 let fs = FakeFs::new(cx.background());
5852 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5853
5854 let project = Project::test(fs, cx);
5855 let worktree_id = project
5856 .update(cx, |project, cx| {
5857 project.find_or_create_local_worktree("/dir", true, cx)
5858 })
5859 .await
5860 .unwrap()
5861 .0
5862 .read_with(cx, |tree, _| tree.id());
5863
5864 let buffer = project
5865 .update(cx, |project, cx| {
5866 project.open_buffer((worktree_id, "a.rs"), cx)
5867 })
5868 .await
5869 .unwrap();
5870
5871 project.update(cx, |project, cx| {
5872 project
5873 .update_buffer_diagnostics(
5874 &buffer,
5875 vec![
5876 DiagnosticEntry {
5877 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5878 diagnostic: Diagnostic {
5879 severity: DiagnosticSeverity::ERROR,
5880 message: "syntax error 1".to_string(),
5881 ..Default::default()
5882 },
5883 },
5884 DiagnosticEntry {
5885 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5886 diagnostic: Diagnostic {
5887 severity: DiagnosticSeverity::ERROR,
5888 message: "syntax error 2".to_string(),
5889 ..Default::default()
5890 },
5891 },
5892 ],
5893 None,
5894 cx,
5895 )
5896 .unwrap();
5897 });
5898
5899 // An empty range is extended forward to include the following character.
5900 // At the end of a line, an empty range is extended backward to include
5901 // the preceding character.
5902 buffer.read_with(cx, |buffer, _| {
5903 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5904 assert_eq!(
5905 chunks
5906 .iter()
5907 .map(|(s, d)| (s.as_str(), *d))
5908 .collect::<Vec<_>>(),
5909 &[
5910 ("let one = ", None),
5911 (";", Some(DiagnosticSeverity::ERROR)),
5912 ("\nlet two =", None),
5913 (" ", Some(DiagnosticSeverity::ERROR)),
5914 ("\nlet three = 3;\n", None)
5915 ]
5916 );
5917 });
5918 }
5919
5920 #[gpui::test]
5921 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5922 cx.foreground().forbid_parking();
5923
5924 let mut language = Language::new(
5925 LanguageConfig {
5926 name: "Rust".into(),
5927 path_suffixes: vec!["rs".to_string()],
5928 ..Default::default()
5929 },
5930 Some(tree_sitter_rust::language()),
5931 );
5932 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5933
5934 let text = "
5935 fn a() {
5936 f1();
5937 }
5938 fn b() {
5939 f2();
5940 }
5941 fn c() {
5942 f3();
5943 }
5944 "
5945 .unindent();
5946
5947 let fs = FakeFs::new(cx.background());
5948 fs.insert_tree(
5949 "/dir",
5950 json!({
5951 "a.rs": text.clone(),
5952 }),
5953 )
5954 .await;
5955
5956 let project = Project::test(fs, cx);
5957 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5958
5959 let worktree_id = project
5960 .update(cx, |project, cx| {
5961 project.find_or_create_local_worktree("/dir", true, cx)
5962 })
5963 .await
5964 .unwrap()
5965 .0
5966 .read_with(cx, |tree, _| tree.id());
5967
5968 let buffer = project
5969 .update(cx, |project, cx| {
5970 project.open_buffer((worktree_id, "a.rs"), cx)
5971 })
5972 .await
5973 .unwrap();
5974
5975 let mut fake_server = fake_servers.next().await.unwrap();
5976 let lsp_document_version = fake_server
5977 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5978 .await
5979 .text_document
5980 .version;
5981
5982 // Simulate editing the buffer after the language server computes some edits.
5983 buffer.update(cx, |buffer, cx| {
5984 buffer.edit(
5985 [Point::new(0, 0)..Point::new(0, 0)],
5986 "// above first function\n",
5987 cx,
5988 );
5989 buffer.edit(
5990 [Point::new(2, 0)..Point::new(2, 0)],
5991 " // inside first function\n",
5992 cx,
5993 );
5994 buffer.edit(
5995 [Point::new(6, 4)..Point::new(6, 4)],
5996 "// inside second function ",
5997 cx,
5998 );
5999
6000 assert_eq!(
6001 buffer.text(),
6002 "
6003 // above first function
6004 fn a() {
6005 // inside first function
6006 f1();
6007 }
6008 fn b() {
6009 // inside second function f2();
6010 }
6011 fn c() {
6012 f3();
6013 }
6014 "
6015 .unindent()
6016 );
6017 });
6018
6019 let edits = project
6020 .update(cx, |project, cx| {
6021 project.edits_from_lsp(
6022 &buffer,
6023 vec![
6024 // replace body of first function
6025 lsp::TextEdit {
6026 range: lsp::Range::new(
6027 lsp::Position::new(0, 0),
6028 lsp::Position::new(3, 0),
6029 ),
6030 new_text: "
6031 fn a() {
6032 f10();
6033 }
6034 "
6035 .unindent(),
6036 },
6037 // edit inside second function
6038 lsp::TextEdit {
6039 range: lsp::Range::new(
6040 lsp::Position::new(4, 6),
6041 lsp::Position::new(4, 6),
6042 ),
6043 new_text: "00".into(),
6044 },
6045 // edit inside third function via two distinct edits
6046 lsp::TextEdit {
6047 range: lsp::Range::new(
6048 lsp::Position::new(7, 5),
6049 lsp::Position::new(7, 5),
6050 ),
6051 new_text: "4000".into(),
6052 },
6053 lsp::TextEdit {
6054 range: lsp::Range::new(
6055 lsp::Position::new(7, 5),
6056 lsp::Position::new(7, 6),
6057 ),
6058 new_text: "".into(),
6059 },
6060 ],
6061 Some(lsp_document_version),
6062 cx,
6063 )
6064 })
6065 .await
6066 .unwrap();
6067
6068 buffer.update(cx, |buffer, cx| {
6069 for (range, new_text) in edits {
6070 buffer.edit([range], new_text, cx);
6071 }
6072 assert_eq!(
6073 buffer.text(),
6074 "
6075 // above first function
6076 fn a() {
6077 // inside first function
6078 f10();
6079 }
6080 fn b() {
6081 // inside second function f200();
6082 }
6083 fn c() {
6084 f4000();
6085 }
6086 "
6087 .unindent()
6088 );
6089 });
6090 }
6091
6092 #[gpui::test]
6093 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6094 cx.foreground().forbid_parking();
6095
6096 let text = "
6097 use a::b;
6098 use a::c;
6099
6100 fn f() {
6101 b();
6102 c();
6103 }
6104 "
6105 .unindent();
6106
6107 let fs = FakeFs::new(cx.background());
6108 fs.insert_tree(
6109 "/dir",
6110 json!({
6111 "a.rs": text.clone(),
6112 }),
6113 )
6114 .await;
6115
6116 let project = Project::test(fs, cx);
6117 let worktree_id = project
6118 .update(cx, |project, cx| {
6119 project.find_or_create_local_worktree("/dir", true, cx)
6120 })
6121 .await
6122 .unwrap()
6123 .0
6124 .read_with(cx, |tree, _| tree.id());
6125
6126 let buffer = project
6127 .update(cx, |project, cx| {
6128 project.open_buffer((worktree_id, "a.rs"), cx)
6129 })
6130 .await
6131 .unwrap();
6132
6133 // Simulate the language server sending us a small edit in the form of a very large diff.
6134 // Rust-analyzer does this when performing a merge-imports code action.
6135 let edits = project
6136 .update(cx, |project, cx| {
6137 project.edits_from_lsp(
6138 &buffer,
6139 [
6140 // Replace the first use statement without editing the semicolon.
6141 lsp::TextEdit {
6142 range: lsp::Range::new(
6143 lsp::Position::new(0, 4),
6144 lsp::Position::new(0, 8),
6145 ),
6146 new_text: "a::{b, c}".into(),
6147 },
6148 // Reinsert the remainder of the file between the semicolon and the final
6149 // newline of the file.
6150 lsp::TextEdit {
6151 range: lsp::Range::new(
6152 lsp::Position::new(0, 9),
6153 lsp::Position::new(0, 9),
6154 ),
6155 new_text: "\n\n".into(),
6156 },
6157 lsp::TextEdit {
6158 range: lsp::Range::new(
6159 lsp::Position::new(0, 9),
6160 lsp::Position::new(0, 9),
6161 ),
6162 new_text: "
6163 fn f() {
6164 b();
6165 c();
6166 }"
6167 .unindent(),
6168 },
6169 // Delete everything after the first newline of the file.
6170 lsp::TextEdit {
6171 range: lsp::Range::new(
6172 lsp::Position::new(1, 0),
6173 lsp::Position::new(7, 0),
6174 ),
6175 new_text: "".into(),
6176 },
6177 ],
6178 None,
6179 cx,
6180 )
6181 })
6182 .await
6183 .unwrap();
6184
6185 buffer.update(cx, |buffer, cx| {
6186 let edits = edits
6187 .into_iter()
6188 .map(|(range, text)| {
6189 (
6190 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6191 text,
6192 )
6193 })
6194 .collect::<Vec<_>>();
6195
6196 assert_eq!(
6197 edits,
6198 [
6199 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6200 (Point::new(1, 0)..Point::new(2, 0), "".into())
6201 ]
6202 );
6203
6204 for (range, new_text) in edits {
6205 buffer.edit([range], new_text, cx);
6206 }
6207 assert_eq!(
6208 buffer.text(),
6209 "
6210 use a::{b, c};
6211
6212 fn f() {
6213 b();
6214 c();
6215 }
6216 "
6217 .unindent()
6218 );
6219 });
6220 }
6221
6222 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6223 buffer: &Buffer,
6224 range: Range<T>,
6225 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6226 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6227 for chunk in buffer.snapshot().chunks(range, true) {
6228 if chunks.last().map_or(false, |prev_chunk| {
6229 prev_chunk.1 == chunk.diagnostic_severity
6230 }) {
6231 chunks.last_mut().unwrap().0.push_str(chunk.text);
6232 } else {
6233 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6234 }
6235 }
6236 chunks
6237 }
6238
6239 #[gpui::test]
6240 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6241 let dir = temp_tree(json!({
6242 "root": {
6243 "dir1": {},
6244 "dir2": {
6245 "dir3": {}
6246 }
6247 }
6248 }));
6249
6250 let project = Project::test(Arc::new(RealFs), cx);
6251 let (tree, _) = project
6252 .update(cx, |project, cx| {
6253 project.find_or_create_local_worktree(&dir.path(), true, cx)
6254 })
6255 .await
6256 .unwrap();
6257
6258 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6259 .await;
6260
6261 let cancel_flag = Default::default();
6262 let results = project
6263 .read_with(cx, |project, cx| {
6264 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6265 })
6266 .await;
6267
6268 assert!(results.is_empty());
6269 }
6270
6271 #[gpui::test]
6272 async fn test_definition(cx: &mut gpui::TestAppContext) {
6273 let mut language = Language::new(
6274 LanguageConfig {
6275 name: "Rust".into(),
6276 path_suffixes: vec!["rs".to_string()],
6277 ..Default::default()
6278 },
6279 Some(tree_sitter_rust::language()),
6280 );
6281 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6282
6283 let fs = FakeFs::new(cx.background());
6284 fs.insert_tree(
6285 "/dir",
6286 json!({
6287 "a.rs": "const fn a() { A }",
6288 "b.rs": "const y: i32 = crate::a()",
6289 }),
6290 )
6291 .await;
6292
6293 let project = Project::test(fs, cx);
6294 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6295
6296 let (tree, _) = project
6297 .update(cx, |project, cx| {
6298 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6299 })
6300 .await
6301 .unwrap();
6302 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6303 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6304 .await;
6305
6306 let buffer = project
6307 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6308 .await
6309 .unwrap();
6310
6311 let fake_server = fake_servers.next().await.unwrap();
6312 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6313 let params = params.text_document_position_params;
6314 assert_eq!(
6315 params.text_document.uri.to_file_path().unwrap(),
6316 Path::new("/dir/b.rs"),
6317 );
6318 assert_eq!(params.position, lsp::Position::new(0, 22));
6319
6320 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6321 lsp::Location::new(
6322 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6323 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6324 ),
6325 )))
6326 });
6327
6328 let mut definitions = project
6329 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6330 .await
6331 .unwrap();
6332
6333 assert_eq!(definitions.len(), 1);
6334 let definition = definitions.pop().unwrap();
6335 cx.update(|cx| {
6336 let target_buffer = definition.buffer.read(cx);
6337 assert_eq!(
6338 target_buffer
6339 .file()
6340 .unwrap()
6341 .as_local()
6342 .unwrap()
6343 .abs_path(cx),
6344 Path::new("/dir/a.rs"),
6345 );
6346 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6347 assert_eq!(
6348 list_worktrees(&project, cx),
6349 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6350 );
6351
6352 drop(definition);
6353 });
6354 cx.read(|cx| {
6355 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6356 });
6357
6358 fn list_worktrees<'a>(
6359 project: &'a ModelHandle<Project>,
6360 cx: &'a AppContext,
6361 ) -> Vec<(&'a Path, bool)> {
6362 project
6363 .read(cx)
6364 .worktrees(cx)
6365 .map(|worktree| {
6366 let worktree = worktree.read(cx);
6367 (
6368 worktree.as_local().unwrap().abs_path().as_ref(),
6369 worktree.is_visible(),
6370 )
6371 })
6372 .collect::<Vec<_>>()
6373 }
6374 }
6375
6376 #[gpui::test(iterations = 10)]
6377 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6378 let mut language = Language::new(
6379 LanguageConfig {
6380 name: "TypeScript".into(),
6381 path_suffixes: vec!["ts".to_string()],
6382 ..Default::default()
6383 },
6384 None,
6385 );
6386 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6387
6388 let fs = FakeFs::new(cx.background());
6389 fs.insert_tree(
6390 "/dir",
6391 json!({
6392 "a.ts": "a",
6393 }),
6394 )
6395 .await;
6396
6397 let project = Project::test(fs, cx);
6398 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6399
6400 let (tree, _) = project
6401 .update(cx, |project, cx| {
6402 project.find_or_create_local_worktree("/dir", true, cx)
6403 })
6404 .await
6405 .unwrap();
6406 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6407 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6408 .await;
6409
6410 let buffer = project
6411 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6412 .await
6413 .unwrap();
6414
6415 let fake_server = fake_language_servers.next().await.unwrap();
6416
6417 // Language server returns code actions that contain commands, and not edits.
6418 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6419 fake_server
6420 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6421 Ok(Some(vec![
6422 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6423 title: "The code action".into(),
6424 command: Some(lsp::Command {
6425 title: "The command".into(),
6426 command: "_the/command".into(),
6427 arguments: Some(vec![json!("the-argument")]),
6428 }),
6429 ..Default::default()
6430 }),
6431 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6432 title: "two".into(),
6433 ..Default::default()
6434 }),
6435 ]))
6436 })
6437 .next()
6438 .await;
6439
6440 let action = actions.await.unwrap()[0].clone();
6441 let apply = project.update(cx, |project, cx| {
6442 project.apply_code_action(buffer.clone(), action, true, cx)
6443 });
6444
6445 // Resolving the code action does not populate its edits. In absence of
6446 // edits, we must execute the given command.
6447 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6448 |action, _| async move { Ok(action) },
6449 );
6450
6451 // While executing the command, the language server sends the editor
6452 // a `workspaceEdit` request.
6453 fake_server
6454 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6455 let fake = fake_server.clone();
6456 move |params, _| {
6457 assert_eq!(params.command, "_the/command");
6458 let fake = fake.clone();
6459 async move {
6460 fake.server
6461 .request::<lsp::request::ApplyWorkspaceEdit>(
6462 lsp::ApplyWorkspaceEditParams {
6463 label: None,
6464 edit: lsp::WorkspaceEdit {
6465 changes: Some(
6466 [(
6467 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6468 vec![lsp::TextEdit {
6469 range: lsp::Range::new(
6470 lsp::Position::new(0, 0),
6471 lsp::Position::new(0, 0),
6472 ),
6473 new_text: "X".into(),
6474 }],
6475 )]
6476 .into_iter()
6477 .collect(),
6478 ),
6479 ..Default::default()
6480 },
6481 },
6482 )
6483 .await
6484 .unwrap();
6485 Ok(Some(json!(null)))
6486 }
6487 }
6488 })
6489 .next()
6490 .await;
6491
6492 // Applying the code action returns a project transaction containing the edits
6493 // sent by the language server in its `workspaceEdit` request.
6494 let transaction = apply.await.unwrap();
6495 assert!(transaction.0.contains_key(&buffer));
6496 buffer.update(cx, |buffer, cx| {
6497 assert_eq!(buffer.text(), "Xa");
6498 buffer.undo(cx);
6499 assert_eq!(buffer.text(), "a");
6500 });
6501 }
6502
6503 #[gpui::test]
6504 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6505 let fs = FakeFs::new(cx.background());
6506 fs.insert_tree(
6507 "/dir",
6508 json!({
6509 "file1": "the old contents",
6510 }),
6511 )
6512 .await;
6513
6514 let project = Project::test(fs.clone(), cx);
6515 let worktree_id = project
6516 .update(cx, |p, cx| {
6517 p.find_or_create_local_worktree("/dir", true, cx)
6518 })
6519 .await
6520 .unwrap()
6521 .0
6522 .read_with(cx, |tree, _| tree.id());
6523
6524 let buffer = project
6525 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6526 .await
6527 .unwrap();
6528 buffer
6529 .update(cx, |buffer, cx| {
6530 assert_eq!(buffer.text(), "the old contents");
6531 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6532 buffer.save(cx)
6533 })
6534 .await
6535 .unwrap();
6536
6537 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6538 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6539 }
6540
6541 #[gpui::test]
6542 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6543 let fs = FakeFs::new(cx.background());
6544 fs.insert_tree(
6545 "/dir",
6546 json!({
6547 "file1": "the old contents",
6548 }),
6549 )
6550 .await;
6551
6552 let project = Project::test(fs.clone(), cx);
6553 let worktree_id = project
6554 .update(cx, |p, cx| {
6555 p.find_or_create_local_worktree("/dir/file1", true, cx)
6556 })
6557 .await
6558 .unwrap()
6559 .0
6560 .read_with(cx, |tree, _| tree.id());
6561
6562 let buffer = project
6563 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6564 .await
6565 .unwrap();
6566 buffer
6567 .update(cx, |buffer, cx| {
6568 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6569 buffer.save(cx)
6570 })
6571 .await
6572 .unwrap();
6573
6574 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6575 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6576 }
6577
6578 #[gpui::test]
6579 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6580 let fs = FakeFs::new(cx.background());
6581 fs.insert_tree("/dir", json!({})).await;
6582
6583 let project = Project::test(fs.clone(), cx);
6584 let (worktree, _) = project
6585 .update(cx, |project, cx| {
6586 project.find_or_create_local_worktree("/dir", true, cx)
6587 })
6588 .await
6589 .unwrap();
6590 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6591
6592 let buffer = project.update(cx, |project, cx| {
6593 project.create_buffer("", None, cx).unwrap()
6594 });
6595 buffer.update(cx, |buffer, cx| {
6596 buffer.edit([0..0], "abc", cx);
6597 assert!(buffer.is_dirty());
6598 assert!(!buffer.has_conflict());
6599 });
6600 project
6601 .update(cx, |project, cx| {
6602 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6603 })
6604 .await
6605 .unwrap();
6606 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6607 buffer.read_with(cx, |buffer, cx| {
6608 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6609 assert!(!buffer.is_dirty());
6610 assert!(!buffer.has_conflict());
6611 });
6612
6613 let opened_buffer = project
6614 .update(cx, |project, cx| {
6615 project.open_buffer((worktree_id, "file1"), cx)
6616 })
6617 .await
6618 .unwrap();
6619 assert_eq!(opened_buffer, buffer);
6620 }
6621
6622 #[gpui::test(retries = 5)]
6623 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6624 let dir = temp_tree(json!({
6625 "a": {
6626 "file1": "",
6627 "file2": "",
6628 "file3": "",
6629 },
6630 "b": {
6631 "c": {
6632 "file4": "",
6633 "file5": "",
6634 }
6635 }
6636 }));
6637
6638 let project = Project::test(Arc::new(RealFs), cx);
6639 let rpc = project.read_with(cx, |p, _| p.client.clone());
6640
6641 let (tree, _) = project
6642 .update(cx, |p, cx| {
6643 p.find_or_create_local_worktree(dir.path(), true, cx)
6644 })
6645 .await
6646 .unwrap();
6647 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6648
6649 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6650 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6651 async move { buffer.await.unwrap() }
6652 };
6653 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6654 tree.read_with(cx, |tree, _| {
6655 tree.entry_for_path(path)
6656 .expect(&format!("no entry for path {}", path))
6657 .id
6658 })
6659 };
6660
6661 let buffer2 = buffer_for_path("a/file2", cx).await;
6662 let buffer3 = buffer_for_path("a/file3", cx).await;
6663 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6664 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6665
6666 let file2_id = id_for_path("a/file2", &cx);
6667 let file3_id = id_for_path("a/file3", &cx);
6668 let file4_id = id_for_path("b/c/file4", &cx);
6669
6670 // Wait for the initial scan.
6671 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6672 .await;
6673
6674 // Create a remote copy of this worktree.
6675 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6676 let (remote, load_task) = cx.update(|cx| {
6677 Worktree::remote(
6678 1,
6679 1,
6680 initial_snapshot.to_proto(&Default::default(), true),
6681 rpc.clone(),
6682 cx,
6683 )
6684 });
6685 load_task.await;
6686
6687 cx.read(|cx| {
6688 assert!(!buffer2.read(cx).is_dirty());
6689 assert!(!buffer3.read(cx).is_dirty());
6690 assert!(!buffer4.read(cx).is_dirty());
6691 assert!(!buffer5.read(cx).is_dirty());
6692 });
6693
6694 // Rename and delete files and directories.
6695 tree.flush_fs_events(&cx).await;
6696 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6697 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6698 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6699 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6700 tree.flush_fs_events(&cx).await;
6701
6702 let expected_paths = vec![
6703 "a",
6704 "a/file1",
6705 "a/file2.new",
6706 "b",
6707 "d",
6708 "d/file3",
6709 "d/file4",
6710 ];
6711
6712 cx.read(|app| {
6713 assert_eq!(
6714 tree.read(app)
6715 .paths()
6716 .map(|p| p.to_str().unwrap())
6717 .collect::<Vec<_>>(),
6718 expected_paths
6719 );
6720
6721 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6722 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6723 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6724
6725 assert_eq!(
6726 buffer2.read(app).file().unwrap().path().as_ref(),
6727 Path::new("a/file2.new")
6728 );
6729 assert_eq!(
6730 buffer3.read(app).file().unwrap().path().as_ref(),
6731 Path::new("d/file3")
6732 );
6733 assert_eq!(
6734 buffer4.read(app).file().unwrap().path().as_ref(),
6735 Path::new("d/file4")
6736 );
6737 assert_eq!(
6738 buffer5.read(app).file().unwrap().path().as_ref(),
6739 Path::new("b/c/file5")
6740 );
6741
6742 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6743 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6744 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6745 assert!(buffer5.read(app).file().unwrap().is_deleted());
6746 });
6747
6748 // Update the remote worktree. Check that it becomes consistent with the
6749 // local worktree.
6750 remote.update(cx, |remote, cx| {
6751 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6752 &initial_snapshot,
6753 1,
6754 1,
6755 true,
6756 );
6757 remote
6758 .as_remote_mut()
6759 .unwrap()
6760 .snapshot
6761 .apply_remote_update(update_message)
6762 .unwrap();
6763
6764 assert_eq!(
6765 remote
6766 .paths()
6767 .map(|p| p.to_str().unwrap())
6768 .collect::<Vec<_>>(),
6769 expected_paths
6770 );
6771 });
6772 }
6773
6774 #[gpui::test]
6775 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6776 let fs = FakeFs::new(cx.background());
6777 fs.insert_tree(
6778 "/the-dir",
6779 json!({
6780 "a.txt": "a-contents",
6781 "b.txt": "b-contents",
6782 }),
6783 )
6784 .await;
6785
6786 let project = Project::test(fs.clone(), cx);
6787 let worktree_id = project
6788 .update(cx, |p, cx| {
6789 p.find_or_create_local_worktree("/the-dir", true, cx)
6790 })
6791 .await
6792 .unwrap()
6793 .0
6794 .read_with(cx, |tree, _| tree.id());
6795
6796 // Spawn multiple tasks to open paths, repeating some paths.
6797 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6798 (
6799 p.open_buffer((worktree_id, "a.txt"), cx),
6800 p.open_buffer((worktree_id, "b.txt"), cx),
6801 p.open_buffer((worktree_id, "a.txt"), cx),
6802 )
6803 });
6804
6805 let buffer_a_1 = buffer_a_1.await.unwrap();
6806 let buffer_a_2 = buffer_a_2.await.unwrap();
6807 let buffer_b = buffer_b.await.unwrap();
6808 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6809 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6810
6811 // There is only one buffer per path.
6812 let buffer_a_id = buffer_a_1.id();
6813 assert_eq!(buffer_a_2.id(), buffer_a_id);
6814
6815 // Open the same path again while it is still open.
6816 drop(buffer_a_1);
6817 let buffer_a_3 = project
6818 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6819 .await
6820 .unwrap();
6821
6822 // There's still only one buffer per path.
6823 assert_eq!(buffer_a_3.id(), buffer_a_id);
6824 }
6825
6826 #[gpui::test]
6827 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6828 use std::fs;
6829
6830 let dir = temp_tree(json!({
6831 "file1": "abc",
6832 "file2": "def",
6833 "file3": "ghi",
6834 }));
6835
6836 let project = Project::test(Arc::new(RealFs), cx);
6837 let (worktree, _) = project
6838 .update(cx, |p, cx| {
6839 p.find_or_create_local_worktree(dir.path(), true, cx)
6840 })
6841 .await
6842 .unwrap();
6843 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6844
6845 worktree.flush_fs_events(&cx).await;
6846 worktree
6847 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6848 .await;
6849
6850 let buffer1 = project
6851 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6852 .await
6853 .unwrap();
6854 let events = Rc::new(RefCell::new(Vec::new()));
6855
6856 // initially, the buffer isn't dirty.
6857 buffer1.update(cx, |buffer, cx| {
6858 cx.subscribe(&buffer1, {
6859 let events = events.clone();
6860 move |_, _, event, _| match event {
6861 BufferEvent::Operation(_) => {}
6862 _ => events.borrow_mut().push(event.clone()),
6863 }
6864 })
6865 .detach();
6866
6867 assert!(!buffer.is_dirty());
6868 assert!(events.borrow().is_empty());
6869
6870 buffer.edit(vec![1..2], "", cx);
6871 });
6872
6873 // after the first edit, the buffer is dirty, and emits a dirtied event.
6874 buffer1.update(cx, |buffer, cx| {
6875 assert!(buffer.text() == "ac");
6876 assert!(buffer.is_dirty());
6877 assert_eq!(
6878 *events.borrow(),
6879 &[language::Event::Edited, language::Event::Dirtied]
6880 );
6881 events.borrow_mut().clear();
6882 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6883 });
6884
6885 // after saving, the buffer is not dirty, and emits a saved event.
6886 buffer1.update(cx, |buffer, cx| {
6887 assert!(!buffer.is_dirty());
6888 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6889 events.borrow_mut().clear();
6890
6891 buffer.edit(vec![1..1], "B", cx);
6892 buffer.edit(vec![2..2], "D", cx);
6893 });
6894
6895 // after editing again, the buffer is dirty, and emits another dirty event.
6896 buffer1.update(cx, |buffer, cx| {
6897 assert!(buffer.text() == "aBDc");
6898 assert!(buffer.is_dirty());
6899 assert_eq!(
6900 *events.borrow(),
6901 &[
6902 language::Event::Edited,
6903 language::Event::Dirtied,
6904 language::Event::Edited,
6905 ],
6906 );
6907 events.borrow_mut().clear();
6908
6909 // TODO - currently, after restoring the buffer to its
6910 // previously-saved state, the is still considered dirty.
6911 buffer.edit([1..3], "", cx);
6912 assert!(buffer.text() == "ac");
6913 assert!(buffer.is_dirty());
6914 });
6915
6916 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6917
6918 // When a file is deleted, the buffer is considered dirty.
6919 let events = Rc::new(RefCell::new(Vec::new()));
6920 let buffer2 = project
6921 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6922 .await
6923 .unwrap();
6924 buffer2.update(cx, |_, cx| {
6925 cx.subscribe(&buffer2, {
6926 let events = events.clone();
6927 move |_, _, event, _| events.borrow_mut().push(event.clone())
6928 })
6929 .detach();
6930 });
6931
6932 fs::remove_file(dir.path().join("file2")).unwrap();
6933 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6934 assert_eq!(
6935 *events.borrow(),
6936 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6937 );
6938
6939 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6940 let events = Rc::new(RefCell::new(Vec::new()));
6941 let buffer3 = project
6942 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6943 .await
6944 .unwrap();
6945 buffer3.update(cx, |_, cx| {
6946 cx.subscribe(&buffer3, {
6947 let events = events.clone();
6948 move |_, _, event, _| events.borrow_mut().push(event.clone())
6949 })
6950 .detach();
6951 });
6952
6953 worktree.flush_fs_events(&cx).await;
6954 buffer3.update(cx, |buffer, cx| {
6955 buffer.edit(Some(0..0), "x", cx);
6956 });
6957 events.borrow_mut().clear();
6958 fs::remove_file(dir.path().join("file3")).unwrap();
6959 buffer3
6960 .condition(&cx, |_, _| !events.borrow().is_empty())
6961 .await;
6962 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6963 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6964 }
6965
6966 #[gpui::test]
6967 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6968 use std::fs;
6969
6970 let initial_contents = "aaa\nbbbbb\nc\n";
6971 let dir = temp_tree(json!({ "the-file": initial_contents }));
6972
6973 let project = Project::test(Arc::new(RealFs), cx);
6974 let (worktree, _) = project
6975 .update(cx, |p, cx| {
6976 p.find_or_create_local_worktree(dir.path(), true, cx)
6977 })
6978 .await
6979 .unwrap();
6980 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6981
6982 worktree
6983 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6984 .await;
6985
6986 let abs_path = dir.path().join("the-file");
6987 let buffer = project
6988 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6989 .await
6990 .unwrap();
6991
6992 // TODO
6993 // Add a cursor on each row.
6994 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6995 // assert!(!buffer.is_dirty());
6996 // buffer.add_selection_set(
6997 // &(0..3)
6998 // .map(|row| Selection {
6999 // id: row as usize,
7000 // start: Point::new(row, 1),
7001 // end: Point::new(row, 1),
7002 // reversed: false,
7003 // goal: SelectionGoal::None,
7004 // })
7005 // .collect::<Vec<_>>(),
7006 // cx,
7007 // )
7008 // });
7009
7010 // Change the file on disk, adding two new lines of text, and removing
7011 // one line.
7012 buffer.read_with(cx, |buffer, _| {
7013 assert!(!buffer.is_dirty());
7014 assert!(!buffer.has_conflict());
7015 });
7016 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7017 fs::write(&abs_path, new_contents).unwrap();
7018
7019 // Because the buffer was not modified, it is reloaded from disk. Its
7020 // contents are edited according to the diff between the old and new
7021 // file contents.
7022 buffer
7023 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7024 .await;
7025
7026 buffer.update(cx, |buffer, _| {
7027 assert_eq!(buffer.text(), new_contents);
7028 assert!(!buffer.is_dirty());
7029 assert!(!buffer.has_conflict());
7030
7031 // TODO
7032 // let cursor_positions = buffer
7033 // .selection_set(selection_set_id)
7034 // .unwrap()
7035 // .selections::<Point>(&*buffer)
7036 // .map(|selection| {
7037 // assert_eq!(selection.start, selection.end);
7038 // selection.start
7039 // })
7040 // .collect::<Vec<_>>();
7041 // assert_eq!(
7042 // cursor_positions,
7043 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7044 // );
7045 });
7046
7047 // Modify the buffer
7048 buffer.update(cx, |buffer, cx| {
7049 buffer.edit(vec![0..0], " ", cx);
7050 assert!(buffer.is_dirty());
7051 assert!(!buffer.has_conflict());
7052 });
7053
7054 // Change the file on disk again, adding blank lines to the beginning.
7055 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
7056
7057 // Because the buffer is modified, it doesn't reload from disk, but is
7058 // marked as having a conflict.
7059 buffer
7060 .condition(&cx, |buffer, _| buffer.has_conflict())
7061 .await;
7062 }
7063
7064 #[gpui::test]
7065 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7066 cx.foreground().forbid_parking();
7067
7068 let fs = FakeFs::new(cx.background());
7069 fs.insert_tree(
7070 "/the-dir",
7071 json!({
7072 "a.rs": "
7073 fn foo(mut v: Vec<usize>) {
7074 for x in &v {
7075 v.push(1);
7076 }
7077 }
7078 "
7079 .unindent(),
7080 }),
7081 )
7082 .await;
7083
7084 let project = Project::test(fs.clone(), cx);
7085 let (worktree, _) = project
7086 .update(cx, |p, cx| {
7087 p.find_or_create_local_worktree("/the-dir", true, cx)
7088 })
7089 .await
7090 .unwrap();
7091 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7092
7093 let buffer = project
7094 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7095 .await
7096 .unwrap();
7097
7098 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7099 let message = lsp::PublishDiagnosticsParams {
7100 uri: buffer_uri.clone(),
7101 diagnostics: vec![
7102 lsp::Diagnostic {
7103 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7104 severity: Some(DiagnosticSeverity::WARNING),
7105 message: "error 1".to_string(),
7106 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7107 location: lsp::Location {
7108 uri: buffer_uri.clone(),
7109 range: lsp::Range::new(
7110 lsp::Position::new(1, 8),
7111 lsp::Position::new(1, 9),
7112 ),
7113 },
7114 message: "error 1 hint 1".to_string(),
7115 }]),
7116 ..Default::default()
7117 },
7118 lsp::Diagnostic {
7119 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7120 severity: Some(DiagnosticSeverity::HINT),
7121 message: "error 1 hint 1".to_string(),
7122 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7123 location: lsp::Location {
7124 uri: buffer_uri.clone(),
7125 range: lsp::Range::new(
7126 lsp::Position::new(1, 8),
7127 lsp::Position::new(1, 9),
7128 ),
7129 },
7130 message: "original diagnostic".to_string(),
7131 }]),
7132 ..Default::default()
7133 },
7134 lsp::Diagnostic {
7135 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7136 severity: Some(DiagnosticSeverity::ERROR),
7137 message: "error 2".to_string(),
7138 related_information: Some(vec![
7139 lsp::DiagnosticRelatedInformation {
7140 location: lsp::Location {
7141 uri: buffer_uri.clone(),
7142 range: lsp::Range::new(
7143 lsp::Position::new(1, 13),
7144 lsp::Position::new(1, 15),
7145 ),
7146 },
7147 message: "error 2 hint 1".to_string(),
7148 },
7149 lsp::DiagnosticRelatedInformation {
7150 location: lsp::Location {
7151 uri: buffer_uri.clone(),
7152 range: lsp::Range::new(
7153 lsp::Position::new(1, 13),
7154 lsp::Position::new(1, 15),
7155 ),
7156 },
7157 message: "error 2 hint 2".to_string(),
7158 },
7159 ]),
7160 ..Default::default()
7161 },
7162 lsp::Diagnostic {
7163 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7164 severity: Some(DiagnosticSeverity::HINT),
7165 message: "error 2 hint 1".to_string(),
7166 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7167 location: lsp::Location {
7168 uri: buffer_uri.clone(),
7169 range: lsp::Range::new(
7170 lsp::Position::new(2, 8),
7171 lsp::Position::new(2, 17),
7172 ),
7173 },
7174 message: "original diagnostic".to_string(),
7175 }]),
7176 ..Default::default()
7177 },
7178 lsp::Diagnostic {
7179 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7180 severity: Some(DiagnosticSeverity::HINT),
7181 message: "error 2 hint 2".to_string(),
7182 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7183 location: lsp::Location {
7184 uri: buffer_uri.clone(),
7185 range: lsp::Range::new(
7186 lsp::Position::new(2, 8),
7187 lsp::Position::new(2, 17),
7188 ),
7189 },
7190 message: "original diagnostic".to_string(),
7191 }]),
7192 ..Default::default()
7193 },
7194 ],
7195 version: None,
7196 };
7197
7198 project
7199 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7200 .unwrap();
7201 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7202
7203 assert_eq!(
7204 buffer
7205 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7206 .collect::<Vec<_>>(),
7207 &[
7208 DiagnosticEntry {
7209 range: Point::new(1, 8)..Point::new(1, 9),
7210 diagnostic: Diagnostic {
7211 severity: DiagnosticSeverity::WARNING,
7212 message: "error 1".to_string(),
7213 group_id: 0,
7214 is_primary: true,
7215 ..Default::default()
7216 }
7217 },
7218 DiagnosticEntry {
7219 range: Point::new(1, 8)..Point::new(1, 9),
7220 diagnostic: Diagnostic {
7221 severity: DiagnosticSeverity::HINT,
7222 message: "error 1 hint 1".to_string(),
7223 group_id: 0,
7224 is_primary: false,
7225 ..Default::default()
7226 }
7227 },
7228 DiagnosticEntry {
7229 range: Point::new(1, 13)..Point::new(1, 15),
7230 diagnostic: Diagnostic {
7231 severity: DiagnosticSeverity::HINT,
7232 message: "error 2 hint 1".to_string(),
7233 group_id: 1,
7234 is_primary: false,
7235 ..Default::default()
7236 }
7237 },
7238 DiagnosticEntry {
7239 range: Point::new(1, 13)..Point::new(1, 15),
7240 diagnostic: Diagnostic {
7241 severity: DiagnosticSeverity::HINT,
7242 message: "error 2 hint 2".to_string(),
7243 group_id: 1,
7244 is_primary: false,
7245 ..Default::default()
7246 }
7247 },
7248 DiagnosticEntry {
7249 range: Point::new(2, 8)..Point::new(2, 17),
7250 diagnostic: Diagnostic {
7251 severity: DiagnosticSeverity::ERROR,
7252 message: "error 2".to_string(),
7253 group_id: 1,
7254 is_primary: true,
7255 ..Default::default()
7256 }
7257 }
7258 ]
7259 );
7260
7261 assert_eq!(
7262 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7263 &[
7264 DiagnosticEntry {
7265 range: Point::new(1, 8)..Point::new(1, 9),
7266 diagnostic: Diagnostic {
7267 severity: DiagnosticSeverity::WARNING,
7268 message: "error 1".to_string(),
7269 group_id: 0,
7270 is_primary: true,
7271 ..Default::default()
7272 }
7273 },
7274 DiagnosticEntry {
7275 range: Point::new(1, 8)..Point::new(1, 9),
7276 diagnostic: Diagnostic {
7277 severity: DiagnosticSeverity::HINT,
7278 message: "error 1 hint 1".to_string(),
7279 group_id: 0,
7280 is_primary: false,
7281 ..Default::default()
7282 }
7283 },
7284 ]
7285 );
7286 assert_eq!(
7287 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7288 &[
7289 DiagnosticEntry {
7290 range: Point::new(1, 13)..Point::new(1, 15),
7291 diagnostic: Diagnostic {
7292 severity: DiagnosticSeverity::HINT,
7293 message: "error 2 hint 1".to_string(),
7294 group_id: 1,
7295 is_primary: false,
7296 ..Default::default()
7297 }
7298 },
7299 DiagnosticEntry {
7300 range: Point::new(1, 13)..Point::new(1, 15),
7301 diagnostic: Diagnostic {
7302 severity: DiagnosticSeverity::HINT,
7303 message: "error 2 hint 2".to_string(),
7304 group_id: 1,
7305 is_primary: false,
7306 ..Default::default()
7307 }
7308 },
7309 DiagnosticEntry {
7310 range: Point::new(2, 8)..Point::new(2, 17),
7311 diagnostic: Diagnostic {
7312 severity: DiagnosticSeverity::ERROR,
7313 message: "error 2".to_string(),
7314 group_id: 1,
7315 is_primary: true,
7316 ..Default::default()
7317 }
7318 }
7319 ]
7320 );
7321 }
7322
7323 #[gpui::test]
7324 async fn test_rename(cx: &mut gpui::TestAppContext) {
7325 cx.foreground().forbid_parking();
7326
7327 let mut language = Language::new(
7328 LanguageConfig {
7329 name: "Rust".into(),
7330 path_suffixes: vec!["rs".to_string()],
7331 ..Default::default()
7332 },
7333 Some(tree_sitter_rust::language()),
7334 );
7335 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7336
7337 let fs = FakeFs::new(cx.background());
7338 fs.insert_tree(
7339 "/dir",
7340 json!({
7341 "one.rs": "const ONE: usize = 1;",
7342 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7343 }),
7344 )
7345 .await;
7346
7347 let project = Project::test(fs.clone(), cx);
7348 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7349
7350 let (tree, _) = project
7351 .update(cx, |project, cx| {
7352 project.find_or_create_local_worktree("/dir", true, cx)
7353 })
7354 .await
7355 .unwrap();
7356 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7357 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7358 .await;
7359
7360 let buffer = project
7361 .update(cx, |project, cx| {
7362 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7363 })
7364 .await
7365 .unwrap();
7366
7367 let fake_server = fake_servers.next().await.unwrap();
7368
7369 let response = project.update(cx, |project, cx| {
7370 project.prepare_rename(buffer.clone(), 7, cx)
7371 });
7372 fake_server
7373 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7374 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7375 assert_eq!(params.position, lsp::Position::new(0, 7));
7376 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7377 lsp::Position::new(0, 6),
7378 lsp::Position::new(0, 9),
7379 ))))
7380 })
7381 .next()
7382 .await
7383 .unwrap();
7384 let range = response.await.unwrap().unwrap();
7385 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7386 assert_eq!(range, 6..9);
7387
7388 let response = project.update(cx, |project, cx| {
7389 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7390 });
7391 fake_server
7392 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7393 assert_eq!(
7394 params.text_document_position.text_document.uri.as_str(),
7395 "file:///dir/one.rs"
7396 );
7397 assert_eq!(
7398 params.text_document_position.position,
7399 lsp::Position::new(0, 7)
7400 );
7401 assert_eq!(params.new_name, "THREE");
7402 Ok(Some(lsp::WorkspaceEdit {
7403 changes: Some(
7404 [
7405 (
7406 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7407 vec![lsp::TextEdit::new(
7408 lsp::Range::new(
7409 lsp::Position::new(0, 6),
7410 lsp::Position::new(0, 9),
7411 ),
7412 "THREE".to_string(),
7413 )],
7414 ),
7415 (
7416 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7417 vec![
7418 lsp::TextEdit::new(
7419 lsp::Range::new(
7420 lsp::Position::new(0, 24),
7421 lsp::Position::new(0, 27),
7422 ),
7423 "THREE".to_string(),
7424 ),
7425 lsp::TextEdit::new(
7426 lsp::Range::new(
7427 lsp::Position::new(0, 35),
7428 lsp::Position::new(0, 38),
7429 ),
7430 "THREE".to_string(),
7431 ),
7432 ],
7433 ),
7434 ]
7435 .into_iter()
7436 .collect(),
7437 ),
7438 ..Default::default()
7439 }))
7440 })
7441 .next()
7442 .await
7443 .unwrap();
7444 let mut transaction = response.await.unwrap().0;
7445 assert_eq!(transaction.len(), 2);
7446 assert_eq!(
7447 transaction
7448 .remove_entry(&buffer)
7449 .unwrap()
7450 .0
7451 .read_with(cx, |buffer, _| buffer.text()),
7452 "const THREE: usize = 1;"
7453 );
7454 assert_eq!(
7455 transaction
7456 .into_keys()
7457 .next()
7458 .unwrap()
7459 .read_with(cx, |buffer, _| buffer.text()),
7460 "const TWO: usize = one::THREE + one::THREE;"
7461 );
7462 }
7463
7464 #[gpui::test]
7465 async fn test_search(cx: &mut gpui::TestAppContext) {
7466 let fs = FakeFs::new(cx.background());
7467 fs.insert_tree(
7468 "/dir",
7469 json!({
7470 "one.rs": "const ONE: usize = 1;",
7471 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7472 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7473 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7474 }),
7475 )
7476 .await;
7477 let project = Project::test(fs.clone(), cx);
7478 let (tree, _) = project
7479 .update(cx, |project, cx| {
7480 project.find_or_create_local_worktree("/dir", true, cx)
7481 })
7482 .await
7483 .unwrap();
7484 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7485 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7486 .await;
7487
7488 assert_eq!(
7489 search(&project, SearchQuery::text("TWO", false, true), cx)
7490 .await
7491 .unwrap(),
7492 HashMap::from_iter([
7493 ("two.rs".to_string(), vec![6..9]),
7494 ("three.rs".to_string(), vec![37..40])
7495 ])
7496 );
7497
7498 let buffer_4 = project
7499 .update(cx, |project, cx| {
7500 project.open_buffer((worktree_id, "four.rs"), cx)
7501 })
7502 .await
7503 .unwrap();
7504 buffer_4.update(cx, |buffer, cx| {
7505 buffer.edit([20..28, 31..43], "two::TWO", cx);
7506 });
7507
7508 assert_eq!(
7509 search(&project, SearchQuery::text("TWO", false, true), cx)
7510 .await
7511 .unwrap(),
7512 HashMap::from_iter([
7513 ("two.rs".to_string(), vec![6..9]),
7514 ("three.rs".to_string(), vec![37..40]),
7515 ("four.rs".to_string(), vec![25..28, 36..39])
7516 ])
7517 );
7518
7519 async fn search(
7520 project: &ModelHandle<Project>,
7521 query: SearchQuery,
7522 cx: &mut gpui::TestAppContext,
7523 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7524 let results = project
7525 .update(cx, |project, cx| project.search(query, cx))
7526 .await?;
7527
7528 Ok(results
7529 .into_iter()
7530 .map(|(buffer, ranges)| {
7531 buffer.read_with(cx, |buffer, _| {
7532 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7533 let ranges = ranges
7534 .into_iter()
7535 .map(|range| range.to_offset(buffer))
7536 .collect::<Vec<_>>();
7537 (path, ranges)
7538 })
7539 })
7540 .collect())
7541 }
7542 }
7543}