1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 hash::Hash,
40 mem,
41 ops::Range,
42 path::{Component, Path, PathBuf},
43 rc::Rc,
44 sync::{
45 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
46 Arc,
47 },
48 time::Instant,
49};
50use util::{post_inc, ResultExt, TryFutureExt as _};
51
52pub use fs::*;
53pub use worktree::*;
54
55pub trait Item: Entity {
56 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
57}
58
59pub struct Project {
60 worktrees: Vec<WorktreeHandle>,
61 active_entry: Option<ProjectEntryId>,
62 languages: Arc<LanguageRegistry>,
63 language_servers:
64 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
65 started_language_servers:
66 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
67 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
68 language_server_settings: Arc<Mutex<serde_json::Value>>,
69 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
70 next_language_server_id: usize,
71 client: Arc<client::Client>,
72 next_entry_id: Arc<AtomicUsize>,
73 user_store: ModelHandle<UserStore>,
74 fs: Arc<dyn Fs>,
75 client_state: ProjectClientState,
76 collaborators: HashMap<PeerId, Collaborator>,
77 subscriptions: Vec<client::Subscription>,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136#[derive(Serialize)]
137pub struct LanguageServerStatus {
138 pub name: String,
139 pub pending_work: BTreeMap<String, LanguageServerProgress>,
140 pub pending_diagnostic_updates: isize,
141}
142
143#[derive(Clone, Debug, Serialize)]
144pub struct LanguageServerProgress {
145 pub message: Option<String>,
146 pub percentage: Option<usize>,
147 #[serde(skip_serializing)]
148 pub last_update_at: Instant,
149}
150
151#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
152pub struct ProjectPath {
153 pub worktree_id: WorktreeId,
154 pub path: Arc<Path>,
155}
156
157#[derive(Clone, Debug, Default, PartialEq, Serialize)]
158pub struct DiagnosticSummary {
159 pub error_count: usize,
160 pub warning_count: usize,
161 pub info_count: usize,
162 pub hint_count: usize,
163}
164
165#[derive(Debug)]
166pub struct Location {
167 pub buffer: ModelHandle<Buffer>,
168 pub range: Range<language::Anchor>,
169}
170
171#[derive(Debug)]
172pub struct DocumentHighlight {
173 pub range: Range<language::Anchor>,
174 pub kind: DocumentHighlightKind,
175}
176
177#[derive(Clone, Debug)]
178pub struct Symbol {
179 pub source_worktree_id: WorktreeId,
180 pub worktree_id: WorktreeId,
181 pub language_server_name: LanguageServerName,
182 pub path: PathBuf,
183 pub label: CodeLabel,
184 pub name: String,
185 pub kind: lsp::SymbolKind,
186 pub range: Range<PointUtf16>,
187 pub signature: [u8; 32],
188}
189
190#[derive(Default)]
191pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
192
193impl DiagnosticSummary {
194 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
195 let mut this = Self {
196 error_count: 0,
197 warning_count: 0,
198 info_count: 0,
199 hint_count: 0,
200 };
201
202 for entry in diagnostics {
203 if entry.diagnostic.is_primary {
204 match entry.diagnostic.severity {
205 DiagnosticSeverity::ERROR => this.error_count += 1,
206 DiagnosticSeverity::WARNING => this.warning_count += 1,
207 DiagnosticSeverity::INFORMATION => this.info_count += 1,
208 DiagnosticSeverity::HINT => this.hint_count += 1,
209 _ => {}
210 }
211 }
212 }
213
214 this
215 }
216
217 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
218 proto::DiagnosticSummary {
219 path: path.to_string_lossy().to_string(),
220 error_count: self.error_count as u32,
221 warning_count: self.warning_count as u32,
222 info_count: self.info_count as u32,
223 hint_count: self.hint_count as u32,
224 }
225 }
226}
227
228#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
229pub struct ProjectEntryId(usize);
230
231impl ProjectEntryId {
232 pub fn new(counter: &AtomicUsize) -> Self {
233 Self(counter.fetch_add(1, SeqCst))
234 }
235
236 pub fn from_proto(id: u64) -> Self {
237 Self(id as usize)
238 }
239
240 pub fn to_proto(&self) -> u64 {
241 self.0 as u64
242 }
243
244 pub fn to_usize(&self) -> usize {
245 self.0
246 }
247}
248
249impl Project {
250 pub fn init(client: &Arc<Client>) {
251 client.add_model_message_handler(Self::handle_add_collaborator);
252 client.add_model_message_handler(Self::handle_buffer_reloaded);
253 client.add_model_message_handler(Self::handle_buffer_saved);
254 client.add_model_message_handler(Self::handle_start_language_server);
255 client.add_model_message_handler(Self::handle_update_language_server);
256 client.add_model_message_handler(Self::handle_remove_collaborator);
257 client.add_model_message_handler(Self::handle_register_worktree);
258 client.add_model_message_handler(Self::handle_unregister_worktree);
259 client.add_model_message_handler(Self::handle_unshare_project);
260 client.add_model_message_handler(Self::handle_update_buffer_file);
261 client.add_model_message_handler(Self::handle_update_buffer);
262 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
263 client.add_model_message_handler(Self::handle_update_worktree);
264 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
265 client.add_model_request_handler(Self::handle_apply_code_action);
266 client.add_model_request_handler(Self::handle_reload_buffers);
267 client.add_model_request_handler(Self::handle_format_buffers);
268 client.add_model_request_handler(Self::handle_get_code_actions);
269 client.add_model_request_handler(Self::handle_get_completions);
270 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
271 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
272 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
273 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
274 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
275 client.add_model_request_handler(Self::handle_search_project);
276 client.add_model_request_handler(Self::handle_get_project_symbols);
277 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
278 client.add_model_request_handler(Self::handle_open_buffer_by_id);
279 client.add_model_request_handler(Self::handle_open_buffer_by_path);
280 client.add_model_request_handler(Self::handle_save_buffer);
281 }
282
283 pub fn local(
284 client: Arc<Client>,
285 user_store: ModelHandle<UserStore>,
286 languages: Arc<LanguageRegistry>,
287 fs: Arc<dyn Fs>,
288 cx: &mut MutableAppContext,
289 ) -> ModelHandle<Self> {
290 cx.add_model(|cx: &mut ModelContext<Self>| {
291 let (remote_id_tx, remote_id_rx) = watch::channel();
292 let _maintain_remote_id_task = cx.spawn_weak({
293 let rpc = client.clone();
294 move |this, mut cx| {
295 async move {
296 let mut status = rpc.status();
297 while let Some(status) = status.next().await {
298 if let Some(this) = this.upgrade(&cx) {
299 if status.is_connected() {
300 this.update(&mut cx, |this, cx| this.register(cx)).await?;
301 } else {
302 this.update(&mut cx, |this, cx| this.unregister(cx));
303 }
304 }
305 }
306 Ok(())
307 }
308 .log_err()
309 }
310 });
311
312 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
313 Self {
314 worktrees: Default::default(),
315 collaborators: Default::default(),
316 opened_buffers: Default::default(),
317 shared_buffers: Default::default(),
318 loading_buffers: Default::default(),
319 loading_local_worktrees: Default::default(),
320 buffer_snapshots: Default::default(),
321 client_state: ProjectClientState::Local {
322 is_shared: false,
323 remote_id_tx,
324 remote_id_rx,
325 _maintain_remote_id_task,
326 },
327 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
328 subscriptions: Vec::new(),
329 active_entry: None,
330 languages,
331 client,
332 user_store,
333 fs,
334 next_entry_id: Default::default(),
335 language_servers: Default::default(),
336 started_language_servers: Default::default(),
337 language_server_statuses: Default::default(),
338 last_workspace_edits_by_language_server: Default::default(),
339 language_server_settings: Default::default(),
340 next_language_server_id: 0,
341 nonce: StdRng::from_entropy().gen(),
342 }
343 })
344 }
345
346 pub async fn remote(
347 remote_id: u64,
348 client: Arc<Client>,
349 user_store: ModelHandle<UserStore>,
350 languages: Arc<LanguageRegistry>,
351 fs: Arc<dyn Fs>,
352 cx: &mut AsyncAppContext,
353 ) -> Result<ModelHandle<Self>> {
354 client.authenticate_and_connect(true, &cx).await?;
355
356 let response = client
357 .request(proto::JoinProject {
358 project_id: remote_id,
359 })
360 .await?;
361
362 let replica_id = response.replica_id as ReplicaId;
363
364 let mut worktrees = Vec::new();
365 for worktree in response.worktrees {
366 let (worktree, load_task) = cx
367 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
368 worktrees.push(worktree);
369 load_task.detach();
370 }
371
372 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
373 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
374 let mut this = Self {
375 worktrees: Vec::new(),
376 loading_buffers: Default::default(),
377 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
378 shared_buffers: Default::default(),
379 loading_local_worktrees: Default::default(),
380 active_entry: None,
381 collaborators: Default::default(),
382 languages,
383 user_store: user_store.clone(),
384 fs,
385 next_entry_id: Default::default(),
386 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
387 client: client.clone(),
388 client_state: ProjectClientState::Remote {
389 sharing_has_stopped: false,
390 remote_id,
391 replica_id,
392 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
393 async move {
394 let mut status = client.status();
395 let is_connected =
396 status.next().await.map_or(false, |s| s.is_connected());
397 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
398 if !is_connected || status.next().await.is_some() {
399 if let Some(this) = this.upgrade(&cx) {
400 this.update(&mut cx, |this, cx| this.project_unshared(cx))
401 }
402 }
403 Ok(())
404 }
405 .log_err()
406 }),
407 },
408 language_servers: Default::default(),
409 started_language_servers: Default::default(),
410 language_server_settings: Default::default(),
411 language_server_statuses: response
412 .language_servers
413 .into_iter()
414 .map(|server| {
415 (
416 server.id as usize,
417 LanguageServerStatus {
418 name: server.name,
419 pending_work: Default::default(),
420 pending_diagnostic_updates: 0,
421 },
422 )
423 })
424 .collect(),
425 last_workspace_edits_by_language_server: Default::default(),
426 next_language_server_id: 0,
427 opened_buffers: Default::default(),
428 buffer_snapshots: Default::default(),
429 nonce: StdRng::from_entropy().gen(),
430 };
431 for worktree in worktrees {
432 this.add_worktree(&worktree, cx);
433 }
434 this
435 });
436
437 let user_ids = response
438 .collaborators
439 .iter()
440 .map(|peer| peer.user_id)
441 .collect();
442 user_store
443 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
444 .await?;
445 let mut collaborators = HashMap::default();
446 for message in response.collaborators {
447 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
448 collaborators.insert(collaborator.peer_id, collaborator);
449 }
450
451 this.update(cx, |this, _| {
452 this.collaborators = collaborators;
453 });
454
455 Ok(this)
456 }
457
458 #[cfg(any(test, feature = "test-support"))]
459 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
460 let languages = Arc::new(LanguageRegistry::test());
461 let http_client = client::test::FakeHttpClient::with_404_response();
462 let client = client::Client::new(http_client.clone());
463 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
464 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
465 }
466
467 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
468 self.opened_buffers
469 .get(&remote_id)
470 .and_then(|buffer| buffer.upgrade(cx))
471 }
472
473 pub fn languages(&self) -> &Arc<LanguageRegistry> {
474 &self.languages
475 }
476
477 #[cfg(any(test, feature = "test-support"))]
478 pub fn check_invariants(&self, cx: &AppContext) {
479 if self.is_local() {
480 let mut worktree_root_paths = HashMap::default();
481 for worktree in self.worktrees(cx) {
482 let worktree = worktree.read(cx);
483 let abs_path = worktree.as_local().unwrap().abs_path().clone();
484 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
485 assert_eq!(
486 prev_worktree_id,
487 None,
488 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
489 abs_path,
490 worktree.id(),
491 prev_worktree_id
492 )
493 }
494 } else {
495 let replica_id = self.replica_id();
496 for buffer in self.opened_buffers.values() {
497 if let Some(buffer) = buffer.upgrade(cx) {
498 let buffer = buffer.read(cx);
499 assert_eq!(
500 buffer.deferred_ops_len(),
501 0,
502 "replica {}, buffer {} has deferred operations",
503 replica_id,
504 buffer.remote_id()
505 );
506 }
507 }
508 }
509 }
510
511 #[cfg(any(test, feature = "test-support"))]
512 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
513 let path = path.into();
514 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
515 self.opened_buffers.iter().any(|(_, buffer)| {
516 if let Some(buffer) = buffer.upgrade(cx) {
517 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
518 if file.worktree == worktree && file.path() == &path.path {
519 return true;
520 }
521 }
522 }
523 false
524 })
525 } else {
526 false
527 }
528 }
529
530 pub fn fs(&self) -> &Arc<dyn Fs> {
531 &self.fs
532 }
533
534 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
535 self.unshare(cx);
536 for worktree in &self.worktrees {
537 if let Some(worktree) = worktree.upgrade(cx) {
538 worktree.update(cx, |worktree, _| {
539 worktree.as_local_mut().unwrap().unregister();
540 });
541 }
542 }
543
544 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
545 *remote_id_tx.borrow_mut() = None;
546 }
547
548 self.subscriptions.clear();
549 }
550
551 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
552 self.unregister(cx);
553
554 let response = self.client.request(proto::RegisterProject {});
555 cx.spawn(|this, mut cx| async move {
556 let remote_id = response.await?.project_id;
557
558 let mut registrations = Vec::new();
559 this.update(&mut cx, |this, cx| {
560 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
561 *remote_id_tx.borrow_mut() = Some(remote_id);
562 }
563
564 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
565
566 this.subscriptions
567 .push(this.client.add_model_for_remote_entity(remote_id, cx));
568
569 for worktree in &this.worktrees {
570 if let Some(worktree) = worktree.upgrade(cx) {
571 registrations.push(worktree.update(cx, |worktree, cx| {
572 let worktree = worktree.as_local_mut().unwrap();
573 worktree.register(remote_id, cx)
574 }));
575 }
576 }
577 });
578
579 futures::future::try_join_all(registrations).await?;
580 Ok(())
581 })
582 }
583
584 pub fn remote_id(&self) -> Option<u64> {
585 match &self.client_state {
586 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
587 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
588 }
589 }
590
591 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
592 let mut id = None;
593 let mut watch = None;
594 match &self.client_state {
595 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
596 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
597 }
598
599 async move {
600 if let Some(id) = id {
601 return id;
602 }
603 let mut watch = watch.unwrap();
604 loop {
605 let id = *watch.borrow();
606 if let Some(id) = id {
607 return id;
608 }
609 watch.next().await;
610 }
611 }
612 }
613
614 pub fn replica_id(&self) -> ReplicaId {
615 match &self.client_state {
616 ProjectClientState::Local { .. } => 0,
617 ProjectClientState::Remote { replica_id, .. } => *replica_id,
618 }
619 }
620
621 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
622 &self.collaborators
623 }
624
625 pub fn worktrees<'a>(
626 &'a self,
627 cx: &'a AppContext,
628 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
629 self.worktrees
630 .iter()
631 .filter_map(move |worktree| worktree.upgrade(cx))
632 }
633
634 pub fn visible_worktrees<'a>(
635 &'a self,
636 cx: &'a AppContext,
637 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
638 self.worktrees.iter().filter_map(|worktree| {
639 worktree.upgrade(cx).and_then(|worktree| {
640 if worktree.read(cx).is_visible() {
641 Some(worktree)
642 } else {
643 None
644 }
645 })
646 })
647 }
648
649 pub fn worktree_for_id(
650 &self,
651 id: WorktreeId,
652 cx: &AppContext,
653 ) -> Option<ModelHandle<Worktree>> {
654 self.worktrees(cx)
655 .find(|worktree| worktree.read(cx).id() == id)
656 }
657
658 pub fn worktree_for_entry(
659 &self,
660 entry_id: ProjectEntryId,
661 cx: &AppContext,
662 ) -> Option<ModelHandle<Worktree>> {
663 self.worktrees(cx)
664 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
665 }
666
667 pub fn worktree_id_for_entry(
668 &self,
669 entry_id: ProjectEntryId,
670 cx: &AppContext,
671 ) -> Option<WorktreeId> {
672 self.worktree_for_entry(entry_id, cx)
673 .map(|worktree| worktree.read(cx).id())
674 }
675
676 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
677 let rpc = self.client.clone();
678 cx.spawn(|this, mut cx| async move {
679 let project_id = this.update(&mut cx, |this, cx| {
680 if let ProjectClientState::Local {
681 is_shared,
682 remote_id_rx,
683 ..
684 } = &mut this.client_state
685 {
686 *is_shared = true;
687
688 for open_buffer in this.opened_buffers.values_mut() {
689 match open_buffer {
690 OpenBuffer::Strong(_) => {}
691 OpenBuffer::Weak(buffer) => {
692 if let Some(buffer) = buffer.upgrade(cx) {
693 *open_buffer = OpenBuffer::Strong(buffer);
694 }
695 }
696 OpenBuffer::Loading(_) => unreachable!(),
697 }
698 }
699
700 for worktree_handle in this.worktrees.iter_mut() {
701 match worktree_handle {
702 WorktreeHandle::Strong(_) => {}
703 WorktreeHandle::Weak(worktree) => {
704 if let Some(worktree) = worktree.upgrade(cx) {
705 *worktree_handle = WorktreeHandle::Strong(worktree);
706 }
707 }
708 }
709 }
710
711 remote_id_rx
712 .borrow()
713 .ok_or_else(|| anyhow!("no project id"))
714 } else {
715 Err(anyhow!("can't share a remote project"))
716 }
717 })?;
718
719 rpc.request(proto::ShareProject { project_id }).await?;
720
721 let mut tasks = Vec::new();
722 this.update(&mut cx, |this, cx| {
723 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
724 worktree.update(cx, |worktree, cx| {
725 let worktree = worktree.as_local_mut().unwrap();
726 tasks.push(worktree.share(project_id, cx));
727 });
728 }
729 });
730 for task in tasks {
731 task.await?;
732 }
733 this.update(&mut cx, |_, cx| cx.notify());
734 Ok(())
735 })
736 }
737
738 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
739 let rpc = self.client.clone();
740
741 if let ProjectClientState::Local {
742 is_shared,
743 remote_id_rx,
744 ..
745 } = &mut self.client_state
746 {
747 if !*is_shared {
748 return;
749 }
750
751 *is_shared = false;
752 self.collaborators.clear();
753 self.shared_buffers.clear();
754 for worktree_handle in self.worktrees.iter_mut() {
755 if let WorktreeHandle::Strong(worktree) = worktree_handle {
756 let is_visible = worktree.update(cx, |worktree, _| {
757 worktree.as_local_mut().unwrap().unshare();
758 worktree.is_visible()
759 });
760 if !is_visible {
761 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
762 }
763 }
764 }
765
766 for open_buffer in self.opened_buffers.values_mut() {
767 match open_buffer {
768 OpenBuffer::Strong(buffer) => {
769 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
770 }
771 _ => {}
772 }
773 }
774
775 if let Some(project_id) = *remote_id_rx.borrow() {
776 rpc.send(proto::UnshareProject { project_id }).log_err();
777 }
778
779 cx.notify();
780 } else {
781 log::error!("attempted to unshare a remote project");
782 }
783 }
784
785 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
786 if let ProjectClientState::Remote {
787 sharing_has_stopped,
788 ..
789 } = &mut self.client_state
790 {
791 *sharing_has_stopped = true;
792 self.collaborators.clear();
793 cx.notify();
794 }
795 }
796
797 pub fn is_read_only(&self) -> bool {
798 match &self.client_state {
799 ProjectClientState::Local { .. } => false,
800 ProjectClientState::Remote {
801 sharing_has_stopped,
802 ..
803 } => *sharing_has_stopped,
804 }
805 }
806
807 pub fn is_local(&self) -> bool {
808 match &self.client_state {
809 ProjectClientState::Local { .. } => true,
810 ProjectClientState::Remote { .. } => false,
811 }
812 }
813
814 pub fn is_remote(&self) -> bool {
815 !self.is_local()
816 }
817
818 pub fn create_buffer(
819 &mut self,
820 text: &str,
821 language: Option<Arc<Language>>,
822 cx: &mut ModelContext<Self>,
823 ) -> Result<ModelHandle<Buffer>> {
824 if self.is_remote() {
825 return Err(anyhow!("creating buffers as a guest is not supported yet"));
826 }
827
828 let buffer = cx.add_model(|cx| {
829 Buffer::new(self.replica_id(), text, cx)
830 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
831 });
832 self.register_buffer(&buffer, cx)?;
833 Ok(buffer)
834 }
835
836 pub fn open_path(
837 &mut self,
838 path: impl Into<ProjectPath>,
839 cx: &mut ModelContext<Self>,
840 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
841 let task = self.open_buffer(path, cx);
842 cx.spawn_weak(|_, cx| async move {
843 let buffer = task.await?;
844 let project_entry_id = buffer
845 .read_with(&cx, |buffer, cx| {
846 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
847 })
848 .ok_or_else(|| anyhow!("no project entry"))?;
849 Ok((project_entry_id, buffer.into()))
850 })
851 }
852
853 pub fn open_buffer(
854 &mut self,
855 path: impl Into<ProjectPath>,
856 cx: &mut ModelContext<Self>,
857 ) -> Task<Result<ModelHandle<Buffer>>> {
858 let project_path = path.into();
859 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
860 worktree
861 } else {
862 return Task::ready(Err(anyhow!("no such worktree")));
863 };
864
865 // If there is already a buffer for the given path, then return it.
866 let existing_buffer = self.get_open_buffer(&project_path, cx);
867 if let Some(existing_buffer) = existing_buffer {
868 return Task::ready(Ok(existing_buffer));
869 }
870
871 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
872 // If the given path is already being loaded, then wait for that existing
873 // task to complete and return the same buffer.
874 hash_map::Entry::Occupied(e) => e.get().clone(),
875
876 // Otherwise, record the fact that this path is now being loaded.
877 hash_map::Entry::Vacant(entry) => {
878 let (mut tx, rx) = postage::watch::channel();
879 entry.insert(rx.clone());
880
881 let load_buffer = if worktree.read(cx).is_local() {
882 self.open_local_buffer(&project_path.path, &worktree, cx)
883 } else {
884 self.open_remote_buffer(&project_path.path, &worktree, cx)
885 };
886
887 cx.spawn(move |this, mut cx| async move {
888 let load_result = load_buffer.await;
889 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
890 // Record the fact that the buffer is no longer loading.
891 this.loading_buffers.remove(&project_path);
892 let buffer = load_result.map_err(Arc::new)?;
893 Ok(buffer)
894 }));
895 })
896 .detach();
897 rx
898 }
899 };
900
901 cx.foreground().spawn(async move {
902 loop {
903 if let Some(result) = loading_watch.borrow().as_ref() {
904 match result {
905 Ok(buffer) => return Ok(buffer.clone()),
906 Err(error) => return Err(anyhow!("{}", error)),
907 }
908 }
909 loading_watch.next().await;
910 }
911 })
912 }
913
914 fn open_local_buffer(
915 &mut self,
916 path: &Arc<Path>,
917 worktree: &ModelHandle<Worktree>,
918 cx: &mut ModelContext<Self>,
919 ) -> Task<Result<ModelHandle<Buffer>>> {
920 let load_buffer = worktree.update(cx, |worktree, cx| {
921 let worktree = worktree.as_local_mut().unwrap();
922 worktree.load_buffer(path, cx)
923 });
924 cx.spawn(|this, mut cx| async move {
925 let buffer = load_buffer.await?;
926 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
927 Ok(buffer)
928 })
929 }
930
931 fn open_remote_buffer(
932 &mut self,
933 path: &Arc<Path>,
934 worktree: &ModelHandle<Worktree>,
935 cx: &mut ModelContext<Self>,
936 ) -> Task<Result<ModelHandle<Buffer>>> {
937 let rpc = self.client.clone();
938 let project_id = self.remote_id().unwrap();
939 let remote_worktree_id = worktree.read(cx).id();
940 let path = path.clone();
941 let path_string = path.to_string_lossy().to_string();
942 cx.spawn(|this, mut cx| async move {
943 let response = rpc
944 .request(proto::OpenBufferByPath {
945 project_id,
946 worktree_id: remote_worktree_id.to_proto(),
947 path: path_string,
948 })
949 .await?;
950 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
951 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
952 .await
953 })
954 }
955
956 fn open_local_buffer_via_lsp(
957 &mut self,
958 abs_path: lsp::Url,
959 lsp_adapter: Arc<dyn LspAdapter>,
960 lsp_server: Arc<LanguageServer>,
961 cx: &mut ModelContext<Self>,
962 ) -> Task<Result<ModelHandle<Buffer>>> {
963 cx.spawn(|this, mut cx| async move {
964 let abs_path = abs_path
965 .to_file_path()
966 .map_err(|_| anyhow!("can't convert URI to path"))?;
967 let (worktree, relative_path) = if let Some(result) =
968 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
969 {
970 result
971 } else {
972 let worktree = this
973 .update(&mut cx, |this, cx| {
974 this.create_local_worktree(&abs_path, false, cx)
975 })
976 .await?;
977 this.update(&mut cx, |this, cx| {
978 this.language_servers.insert(
979 (worktree.read(cx).id(), lsp_adapter.name()),
980 (lsp_adapter, lsp_server),
981 );
982 });
983 (worktree, PathBuf::new())
984 };
985
986 let project_path = ProjectPath {
987 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
988 path: relative_path.into(),
989 };
990 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
991 .await
992 })
993 }
994
995 pub fn open_buffer_by_id(
996 &mut self,
997 id: u64,
998 cx: &mut ModelContext<Self>,
999 ) -> Task<Result<ModelHandle<Buffer>>> {
1000 if let Some(buffer) = self.buffer_for_id(id, cx) {
1001 Task::ready(Ok(buffer))
1002 } else if self.is_local() {
1003 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1004 } else if let Some(project_id) = self.remote_id() {
1005 let request = self
1006 .client
1007 .request(proto::OpenBufferById { project_id, id });
1008 cx.spawn(|this, mut cx| async move {
1009 let buffer = request
1010 .await?
1011 .buffer
1012 .ok_or_else(|| anyhow!("invalid buffer"))?;
1013 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1014 .await
1015 })
1016 } else {
1017 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1018 }
1019 }
1020
1021 pub fn save_buffer_as(
1022 &mut self,
1023 buffer: ModelHandle<Buffer>,
1024 abs_path: PathBuf,
1025 cx: &mut ModelContext<Project>,
1026 ) -> Task<Result<()>> {
1027 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1028 let old_path =
1029 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1030 cx.spawn(|this, mut cx| async move {
1031 if let Some(old_path) = old_path {
1032 this.update(&mut cx, |this, cx| {
1033 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1034 });
1035 }
1036 let (worktree, path) = worktree_task.await?;
1037 worktree
1038 .update(&mut cx, |worktree, cx| {
1039 worktree
1040 .as_local_mut()
1041 .unwrap()
1042 .save_buffer_as(buffer.clone(), path, cx)
1043 })
1044 .await?;
1045 this.update(&mut cx, |this, cx| {
1046 this.assign_language_to_buffer(&buffer, cx);
1047 this.register_buffer_with_language_server(&buffer, cx);
1048 });
1049 Ok(())
1050 })
1051 }
1052
1053 pub fn get_open_buffer(
1054 &mut self,
1055 path: &ProjectPath,
1056 cx: &mut ModelContext<Self>,
1057 ) -> Option<ModelHandle<Buffer>> {
1058 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1059 self.opened_buffers.values().find_map(|buffer| {
1060 let buffer = buffer.upgrade(cx)?;
1061 let file = File::from_dyn(buffer.read(cx).file())?;
1062 if file.worktree == worktree && file.path() == &path.path {
1063 Some(buffer)
1064 } else {
1065 None
1066 }
1067 })
1068 }
1069
1070 fn register_buffer(
1071 &mut self,
1072 buffer: &ModelHandle<Buffer>,
1073 cx: &mut ModelContext<Self>,
1074 ) -> Result<()> {
1075 let remote_id = buffer.read(cx).remote_id();
1076 let open_buffer = if self.is_remote() || self.is_shared() {
1077 OpenBuffer::Strong(buffer.clone())
1078 } else {
1079 OpenBuffer::Weak(buffer.downgrade())
1080 };
1081
1082 match self.opened_buffers.insert(remote_id, open_buffer) {
1083 None => {}
1084 Some(OpenBuffer::Loading(operations)) => {
1085 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1086 }
1087 Some(OpenBuffer::Weak(existing_handle)) => {
1088 if existing_handle.upgrade(cx).is_some() {
1089 Err(anyhow!(
1090 "already registered buffer with remote id {}",
1091 remote_id
1092 ))?
1093 }
1094 }
1095 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1096 "already registered buffer with remote id {}",
1097 remote_id
1098 ))?,
1099 }
1100 cx.subscribe(buffer, |this, buffer, event, cx| {
1101 this.on_buffer_event(buffer, event, cx);
1102 })
1103 .detach();
1104
1105 self.assign_language_to_buffer(buffer, cx);
1106 self.register_buffer_with_language_server(buffer, cx);
1107 cx.observe_release(buffer, |this, buffer, cx| {
1108 if let Some(file) = File::from_dyn(buffer.file()) {
1109 if file.is_local() {
1110 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1111 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1112 server
1113 .notify::<lsp::notification::DidCloseTextDocument>(
1114 lsp::DidCloseTextDocumentParams {
1115 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1116 },
1117 )
1118 .log_err();
1119 }
1120 }
1121 }
1122 })
1123 .detach();
1124
1125 Ok(())
1126 }
1127
1128 fn register_buffer_with_language_server(
1129 &mut self,
1130 buffer_handle: &ModelHandle<Buffer>,
1131 cx: &mut ModelContext<Self>,
1132 ) {
1133 let buffer = buffer_handle.read(cx);
1134 let buffer_id = buffer.remote_id();
1135 if let Some(file) = File::from_dyn(buffer.file()) {
1136 if file.is_local() {
1137 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1138 let initial_snapshot = buffer.text_snapshot();
1139 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1140
1141 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1142 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1143 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1144 .log_err();
1145 }
1146 }
1147
1148 if let Some((_, server)) = language_server {
1149 server
1150 .notify::<lsp::notification::DidOpenTextDocument>(
1151 lsp::DidOpenTextDocumentParams {
1152 text_document: lsp::TextDocumentItem::new(
1153 uri,
1154 Default::default(),
1155 0,
1156 initial_snapshot.text(),
1157 ),
1158 }
1159 .clone(),
1160 )
1161 .log_err();
1162 buffer_handle.update(cx, |buffer, cx| {
1163 buffer.set_completion_triggers(
1164 server
1165 .capabilities()
1166 .completion_provider
1167 .as_ref()
1168 .and_then(|provider| provider.trigger_characters.clone())
1169 .unwrap_or(Vec::new()),
1170 cx,
1171 )
1172 });
1173 self.buffer_snapshots
1174 .insert(buffer_id, vec![(0, initial_snapshot)]);
1175 }
1176 }
1177 }
1178 }
1179
1180 fn unregister_buffer_from_language_server(
1181 &mut self,
1182 buffer: &ModelHandle<Buffer>,
1183 old_path: PathBuf,
1184 cx: &mut ModelContext<Self>,
1185 ) {
1186 buffer.update(cx, |buffer, cx| {
1187 buffer.update_diagnostics(Default::default(), cx);
1188 self.buffer_snapshots.remove(&buffer.remote_id());
1189 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1190 language_server
1191 .notify::<lsp::notification::DidCloseTextDocument>(
1192 lsp::DidCloseTextDocumentParams {
1193 text_document: lsp::TextDocumentIdentifier::new(
1194 lsp::Url::from_file_path(old_path).unwrap(),
1195 ),
1196 },
1197 )
1198 .log_err();
1199 }
1200 });
1201 }
1202
1203 fn on_buffer_event(
1204 &mut self,
1205 buffer: ModelHandle<Buffer>,
1206 event: &BufferEvent,
1207 cx: &mut ModelContext<Self>,
1208 ) -> Option<()> {
1209 match event {
1210 BufferEvent::Operation(operation) => {
1211 let project_id = self.remote_id()?;
1212 let request = self.client.request(proto::UpdateBuffer {
1213 project_id,
1214 buffer_id: buffer.read(cx).remote_id(),
1215 operations: vec![language::proto::serialize_operation(&operation)],
1216 });
1217 cx.background().spawn(request).detach_and_log_err(cx);
1218 }
1219 BufferEvent::Edited { .. } => {
1220 let (_, language_server) = self
1221 .language_server_for_buffer(buffer.read(cx), cx)?
1222 .clone();
1223 let buffer = buffer.read(cx);
1224 let file = File::from_dyn(buffer.file())?;
1225 let abs_path = file.as_local()?.abs_path(cx);
1226 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1227 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1228 let (version, prev_snapshot) = buffer_snapshots.last()?;
1229 let next_snapshot = buffer.text_snapshot();
1230 let next_version = version + 1;
1231
1232 let content_changes = buffer
1233 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1234 .map(|edit| {
1235 let edit_start = edit.new.start.0;
1236 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1237 let new_text = next_snapshot
1238 .text_for_range(edit.new.start.1..edit.new.end.1)
1239 .collect();
1240 lsp::TextDocumentContentChangeEvent {
1241 range: Some(lsp::Range::new(
1242 point_to_lsp(edit_start),
1243 point_to_lsp(edit_end),
1244 )),
1245 range_length: None,
1246 text: new_text,
1247 }
1248 })
1249 .collect();
1250
1251 buffer_snapshots.push((next_version, next_snapshot));
1252
1253 language_server
1254 .notify::<lsp::notification::DidChangeTextDocument>(
1255 lsp::DidChangeTextDocumentParams {
1256 text_document: lsp::VersionedTextDocumentIdentifier::new(
1257 uri,
1258 next_version,
1259 ),
1260 content_changes,
1261 },
1262 )
1263 .log_err();
1264 }
1265 BufferEvent::Saved => {
1266 let file = File::from_dyn(buffer.read(cx).file())?;
1267 let worktree_id = file.worktree_id(cx);
1268 let abs_path = file.as_local()?.abs_path(cx);
1269 let text_document = lsp::TextDocumentIdentifier {
1270 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1271 };
1272
1273 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1274 server
1275 .notify::<lsp::notification::DidSaveTextDocument>(
1276 lsp::DidSaveTextDocumentParams {
1277 text_document: text_document.clone(),
1278 text: None,
1279 },
1280 )
1281 .log_err();
1282 }
1283 }
1284 _ => {}
1285 }
1286
1287 None
1288 }
1289
1290 fn language_servers_for_worktree(
1291 &self,
1292 worktree_id: WorktreeId,
1293 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1294 self.language_servers.iter().filter_map(
1295 move |((language_server_worktree_id, _), server)| {
1296 if *language_server_worktree_id == worktree_id {
1297 Some(server)
1298 } else {
1299 None
1300 }
1301 },
1302 )
1303 }
1304
1305 fn assign_language_to_buffer(
1306 &mut self,
1307 buffer: &ModelHandle<Buffer>,
1308 cx: &mut ModelContext<Self>,
1309 ) -> Option<()> {
1310 // If the buffer has a language, set it and start the language server if we haven't already.
1311 let full_path = buffer.read(cx).file()?.full_path(cx);
1312 let language = self.languages.select_language(&full_path)?;
1313 buffer.update(cx, |buffer, cx| {
1314 buffer.set_language(Some(language.clone()), cx);
1315 });
1316
1317 let file = File::from_dyn(buffer.read(cx).file())?;
1318 let worktree = file.worktree.read(cx).as_local()?;
1319 let worktree_id = worktree.id();
1320 let worktree_abs_path = worktree.abs_path().clone();
1321 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1322
1323 None
1324 }
1325
1326 fn start_language_server(
1327 &mut self,
1328 worktree_id: WorktreeId,
1329 worktree_path: Arc<Path>,
1330 language: Arc<Language>,
1331 cx: &mut ModelContext<Self>,
1332 ) {
1333 let adapter = if let Some(adapter) = language.lsp_adapter() {
1334 adapter
1335 } else {
1336 return;
1337 };
1338 let key = (worktree_id, adapter.name());
1339 self.started_language_servers
1340 .entry(key.clone())
1341 .or_insert_with(|| {
1342 let server_id = post_inc(&mut self.next_language_server_id);
1343 let language_server = self.languages.start_language_server(
1344 server_id,
1345 language.clone(),
1346 worktree_path,
1347 self.client.http_client(),
1348 cx,
1349 );
1350 cx.spawn_weak(|this, mut cx| async move {
1351 let language_server = language_server?.await.log_err()?;
1352 let language_server = language_server
1353 .initialize(adapter.initialization_options())
1354 .await
1355 .log_err()?;
1356 let this = this.upgrade(&cx)?;
1357 let disk_based_diagnostics_progress_token =
1358 adapter.disk_based_diagnostics_progress_token();
1359
1360 language_server
1361 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1362 let this = this.downgrade();
1363 let adapter = adapter.clone();
1364 move |params, mut cx| {
1365 if let Some(this) = this.upgrade(&cx) {
1366 this.update(&mut cx, |this, cx| {
1367 this.on_lsp_diagnostics_published(
1368 server_id,
1369 params,
1370 &adapter,
1371 disk_based_diagnostics_progress_token,
1372 cx,
1373 );
1374 });
1375 }
1376 }
1377 })
1378 .detach();
1379
1380 language_server
1381 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1382 let settings = this
1383 .read_with(&cx, |this, _| this.language_server_settings.clone());
1384 move |params, _| {
1385 let settings = settings.lock().clone();
1386 async move {
1387 Ok(params
1388 .items
1389 .into_iter()
1390 .map(|item| {
1391 if let Some(section) = &item.section {
1392 settings
1393 .get(section)
1394 .cloned()
1395 .unwrap_or(serde_json::Value::Null)
1396 } else {
1397 settings.clone()
1398 }
1399 })
1400 .collect())
1401 }
1402 }
1403 })
1404 .detach();
1405
1406 language_server
1407 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1408 let this = this.downgrade();
1409 let adapter = adapter.clone();
1410 let language_server = language_server.clone();
1411 move |params, cx| {
1412 Self::on_lsp_workspace_edit(
1413 this,
1414 params,
1415 server_id,
1416 adapter.clone(),
1417 language_server.clone(),
1418 cx,
1419 )
1420 }
1421 })
1422 .detach();
1423
1424 language_server
1425 .on_notification::<lsp::notification::Progress, _>({
1426 let this = this.downgrade();
1427 move |params, mut cx| {
1428 if let Some(this) = this.upgrade(&cx) {
1429 this.update(&mut cx, |this, cx| {
1430 this.on_lsp_progress(
1431 params,
1432 server_id,
1433 disk_based_diagnostics_progress_token,
1434 cx,
1435 );
1436 });
1437 }
1438 }
1439 })
1440 .detach();
1441
1442 this.update(&mut cx, |this, cx| {
1443 this.language_servers
1444 .insert(key.clone(), (adapter, language_server.clone()));
1445 this.language_server_statuses.insert(
1446 server_id,
1447 LanguageServerStatus {
1448 name: language_server.name().to_string(),
1449 pending_work: Default::default(),
1450 pending_diagnostic_updates: 0,
1451 },
1452 );
1453 language_server
1454 .notify::<lsp::notification::DidChangeConfiguration>(
1455 lsp::DidChangeConfigurationParams {
1456 settings: this.language_server_settings.lock().clone(),
1457 },
1458 )
1459 .ok();
1460
1461 if let Some(project_id) = this.remote_id() {
1462 this.client
1463 .send(proto::StartLanguageServer {
1464 project_id,
1465 server: Some(proto::LanguageServer {
1466 id: server_id as u64,
1467 name: language_server.name().to_string(),
1468 }),
1469 })
1470 .log_err();
1471 }
1472
1473 // Tell the language server about every open buffer in the worktree that matches the language.
1474 for buffer in this.opened_buffers.values() {
1475 if let Some(buffer_handle) = buffer.upgrade(cx) {
1476 let buffer = buffer_handle.read(cx);
1477 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1478 file
1479 } else {
1480 continue;
1481 };
1482 let language = if let Some(language) = buffer.language() {
1483 language
1484 } else {
1485 continue;
1486 };
1487 if file.worktree.read(cx).id() != key.0
1488 || language.lsp_adapter().map(|a| a.name())
1489 != Some(key.1.clone())
1490 {
1491 continue;
1492 }
1493
1494 let file = file.as_local()?;
1495 let versions = this
1496 .buffer_snapshots
1497 .entry(buffer.remote_id())
1498 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1499 let (version, initial_snapshot) = versions.last().unwrap();
1500 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1501 language_server
1502 .notify::<lsp::notification::DidOpenTextDocument>(
1503 lsp::DidOpenTextDocumentParams {
1504 text_document: lsp::TextDocumentItem::new(
1505 uri,
1506 Default::default(),
1507 *version,
1508 initial_snapshot.text(),
1509 ),
1510 },
1511 )
1512 .log_err()?;
1513 buffer_handle.update(cx, |buffer, cx| {
1514 buffer.set_completion_triggers(
1515 language_server
1516 .capabilities()
1517 .completion_provider
1518 .as_ref()
1519 .and_then(|provider| {
1520 provider.trigger_characters.clone()
1521 })
1522 .unwrap_or(Vec::new()),
1523 cx,
1524 )
1525 });
1526 }
1527 }
1528
1529 cx.notify();
1530 Some(())
1531 });
1532
1533 Some(language_server)
1534 })
1535 });
1536 }
1537
1538 pub fn restart_language_servers_for_buffers(
1539 &mut self,
1540 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1541 cx: &mut ModelContext<Self>,
1542 ) -> Option<()> {
1543 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1544 .into_iter()
1545 .filter_map(|buffer| {
1546 let file = File::from_dyn(buffer.read(cx).file())?;
1547 let worktree = file.worktree.read(cx).as_local()?;
1548 let worktree_id = worktree.id();
1549 let worktree_abs_path = worktree.abs_path().clone();
1550 let full_path = file.full_path(cx);
1551 Some((worktree_id, worktree_abs_path, full_path))
1552 })
1553 .collect();
1554 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1555 let language = self.languages.select_language(&full_path)?;
1556 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1557 }
1558
1559 None
1560 }
1561
1562 fn restart_language_server(
1563 &mut self,
1564 worktree_id: WorktreeId,
1565 worktree_path: Arc<Path>,
1566 language: Arc<Language>,
1567 cx: &mut ModelContext<Self>,
1568 ) {
1569 let adapter = if let Some(adapter) = language.lsp_adapter() {
1570 adapter
1571 } else {
1572 return;
1573 };
1574 let key = (worktree_id, adapter.name());
1575 let server_to_shutdown = self.language_servers.remove(&key);
1576 self.started_language_servers.remove(&key);
1577 server_to_shutdown
1578 .as_ref()
1579 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1580 cx.spawn_weak(|this, mut cx| async move {
1581 if let Some(this) = this.upgrade(&cx) {
1582 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1583 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1584 shutdown_task.await;
1585 }
1586 }
1587
1588 this.update(&mut cx, |this, cx| {
1589 this.start_language_server(worktree_id, worktree_path, language, cx);
1590 });
1591 }
1592 })
1593 .detach();
1594 }
1595
1596 fn on_lsp_diagnostics_published(
1597 &mut self,
1598 server_id: usize,
1599 mut params: lsp::PublishDiagnosticsParams,
1600 adapter: &Arc<dyn LspAdapter>,
1601 disk_based_diagnostics_progress_token: Option<&str>,
1602 cx: &mut ModelContext<Self>,
1603 ) {
1604 adapter.process_diagnostics(&mut params);
1605 if disk_based_diagnostics_progress_token.is_none() {
1606 self.disk_based_diagnostics_started(cx);
1607 self.broadcast_language_server_update(
1608 server_id,
1609 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1610 proto::LspDiskBasedDiagnosticsUpdating {},
1611 ),
1612 );
1613 }
1614 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1615 .log_err();
1616 if disk_based_diagnostics_progress_token.is_none() {
1617 self.disk_based_diagnostics_finished(cx);
1618 self.broadcast_language_server_update(
1619 server_id,
1620 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1621 proto::LspDiskBasedDiagnosticsUpdated {},
1622 ),
1623 );
1624 }
1625 }
1626
1627 fn on_lsp_progress(
1628 &mut self,
1629 progress: lsp::ProgressParams,
1630 server_id: usize,
1631 disk_based_diagnostics_progress_token: Option<&str>,
1632 cx: &mut ModelContext<Self>,
1633 ) {
1634 let token = match progress.token {
1635 lsp::NumberOrString::String(token) => token,
1636 lsp::NumberOrString::Number(token) => {
1637 log::info!("skipping numeric progress token {}", token);
1638 return;
1639 }
1640 };
1641 let progress = match progress.value {
1642 lsp::ProgressParamsValue::WorkDone(value) => value,
1643 };
1644 let language_server_status =
1645 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1646 status
1647 } else {
1648 return;
1649 };
1650 match progress {
1651 lsp::WorkDoneProgress::Begin(_) => {
1652 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1653 language_server_status.pending_diagnostic_updates += 1;
1654 if language_server_status.pending_diagnostic_updates == 1 {
1655 self.disk_based_diagnostics_started(cx);
1656 self.broadcast_language_server_update(
1657 server_id,
1658 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1659 proto::LspDiskBasedDiagnosticsUpdating {},
1660 ),
1661 );
1662 }
1663 } else {
1664 self.on_lsp_work_start(server_id, token.clone(), cx);
1665 self.broadcast_language_server_update(
1666 server_id,
1667 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1668 token,
1669 }),
1670 );
1671 }
1672 }
1673 lsp::WorkDoneProgress::Report(report) => {
1674 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1675 self.on_lsp_work_progress(
1676 server_id,
1677 token.clone(),
1678 LanguageServerProgress {
1679 message: report.message.clone(),
1680 percentage: report.percentage.map(|p| p as usize),
1681 last_update_at: Instant::now(),
1682 },
1683 cx,
1684 );
1685 self.broadcast_language_server_update(
1686 server_id,
1687 proto::update_language_server::Variant::WorkProgress(
1688 proto::LspWorkProgress {
1689 token,
1690 message: report.message,
1691 percentage: report.percentage.map(|p| p as u32),
1692 },
1693 ),
1694 );
1695 }
1696 }
1697 lsp::WorkDoneProgress::End(_) => {
1698 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1699 language_server_status.pending_diagnostic_updates -= 1;
1700 if language_server_status.pending_diagnostic_updates == 0 {
1701 self.disk_based_diagnostics_finished(cx);
1702 self.broadcast_language_server_update(
1703 server_id,
1704 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1705 proto::LspDiskBasedDiagnosticsUpdated {},
1706 ),
1707 );
1708 }
1709 } else {
1710 self.on_lsp_work_end(server_id, token.clone(), cx);
1711 self.broadcast_language_server_update(
1712 server_id,
1713 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1714 token,
1715 }),
1716 );
1717 }
1718 }
1719 }
1720 }
1721
1722 fn on_lsp_work_start(
1723 &mut self,
1724 language_server_id: usize,
1725 token: String,
1726 cx: &mut ModelContext<Self>,
1727 ) {
1728 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1729 status.pending_work.insert(
1730 token,
1731 LanguageServerProgress {
1732 message: None,
1733 percentage: None,
1734 last_update_at: Instant::now(),
1735 },
1736 );
1737 cx.notify();
1738 }
1739 }
1740
1741 fn on_lsp_work_progress(
1742 &mut self,
1743 language_server_id: usize,
1744 token: String,
1745 progress: LanguageServerProgress,
1746 cx: &mut ModelContext<Self>,
1747 ) {
1748 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1749 status.pending_work.insert(token, progress);
1750 cx.notify();
1751 }
1752 }
1753
1754 fn on_lsp_work_end(
1755 &mut self,
1756 language_server_id: usize,
1757 token: String,
1758 cx: &mut ModelContext<Self>,
1759 ) {
1760 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1761 status.pending_work.remove(&token);
1762 cx.notify();
1763 }
1764 }
1765
1766 async fn on_lsp_workspace_edit(
1767 this: WeakModelHandle<Self>,
1768 params: lsp::ApplyWorkspaceEditParams,
1769 server_id: usize,
1770 adapter: Arc<dyn LspAdapter>,
1771 language_server: Arc<LanguageServer>,
1772 mut cx: AsyncAppContext,
1773 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1774 let this = this
1775 .upgrade(&cx)
1776 .ok_or_else(|| anyhow!("project project closed"))?;
1777 let transaction = Self::deserialize_workspace_edit(
1778 this.clone(),
1779 params.edit,
1780 true,
1781 adapter.clone(),
1782 language_server.clone(),
1783 &mut cx,
1784 )
1785 .await
1786 .log_err();
1787 this.update(&mut cx, |this, _| {
1788 if let Some(transaction) = transaction {
1789 this.last_workspace_edits_by_language_server
1790 .insert(server_id, transaction);
1791 }
1792 });
1793 Ok(lsp::ApplyWorkspaceEditResponse {
1794 applied: true,
1795 failed_change: None,
1796 failure_reason: None,
1797 })
1798 }
1799
1800 fn broadcast_language_server_update(
1801 &self,
1802 language_server_id: usize,
1803 event: proto::update_language_server::Variant,
1804 ) {
1805 if let Some(project_id) = self.remote_id() {
1806 self.client
1807 .send(proto::UpdateLanguageServer {
1808 project_id,
1809 language_server_id: language_server_id as u64,
1810 variant: Some(event),
1811 })
1812 .log_err();
1813 }
1814 }
1815
1816 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1817 for (_, server) in self.language_servers.values() {
1818 server
1819 .notify::<lsp::notification::DidChangeConfiguration>(
1820 lsp::DidChangeConfigurationParams {
1821 settings: settings.clone(),
1822 },
1823 )
1824 .ok();
1825 }
1826 *self.language_server_settings.lock() = settings;
1827 }
1828
1829 pub fn language_server_statuses(
1830 &self,
1831 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1832 self.language_server_statuses.values()
1833 }
1834
1835 pub fn update_diagnostics(
1836 &mut self,
1837 params: lsp::PublishDiagnosticsParams,
1838 disk_based_sources: &[&str],
1839 cx: &mut ModelContext<Self>,
1840 ) -> Result<()> {
1841 let abs_path = params
1842 .uri
1843 .to_file_path()
1844 .map_err(|_| anyhow!("URI is not a file"))?;
1845 let mut next_group_id = 0;
1846 let mut diagnostics = Vec::default();
1847 let mut primary_diagnostic_group_ids = HashMap::default();
1848 let mut sources_by_group_id = HashMap::default();
1849 let mut supporting_diagnostics = HashMap::default();
1850 for diagnostic in ¶ms.diagnostics {
1851 let source = diagnostic.source.as_ref();
1852 let code = diagnostic.code.as_ref().map(|code| match code {
1853 lsp::NumberOrString::Number(code) => code.to_string(),
1854 lsp::NumberOrString::String(code) => code.clone(),
1855 });
1856 let range = range_from_lsp(diagnostic.range);
1857 let is_supporting = diagnostic
1858 .related_information
1859 .as_ref()
1860 .map_or(false, |infos| {
1861 infos.iter().any(|info| {
1862 primary_diagnostic_group_ids.contains_key(&(
1863 source,
1864 code.clone(),
1865 range_from_lsp(info.location.range),
1866 ))
1867 })
1868 });
1869
1870 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1871 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1872 });
1873
1874 if is_supporting {
1875 supporting_diagnostics.insert(
1876 (source, code.clone(), range),
1877 (diagnostic.severity, is_unnecessary),
1878 );
1879 } else {
1880 let group_id = post_inc(&mut next_group_id);
1881 let is_disk_based = source.map_or(false, |source| {
1882 disk_based_sources.contains(&source.as_str())
1883 });
1884
1885 sources_by_group_id.insert(group_id, source);
1886 primary_diagnostic_group_ids
1887 .insert((source, code.clone(), range.clone()), group_id);
1888
1889 diagnostics.push(DiagnosticEntry {
1890 range,
1891 diagnostic: Diagnostic {
1892 code: code.clone(),
1893 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1894 message: diagnostic.message.clone(),
1895 group_id,
1896 is_primary: true,
1897 is_valid: true,
1898 is_disk_based,
1899 is_unnecessary,
1900 },
1901 });
1902 if let Some(infos) = &diagnostic.related_information {
1903 for info in infos {
1904 if info.location.uri == params.uri && !info.message.is_empty() {
1905 let range = range_from_lsp(info.location.range);
1906 diagnostics.push(DiagnosticEntry {
1907 range,
1908 diagnostic: Diagnostic {
1909 code: code.clone(),
1910 severity: DiagnosticSeverity::INFORMATION,
1911 message: info.message.clone(),
1912 group_id,
1913 is_primary: false,
1914 is_valid: true,
1915 is_disk_based,
1916 is_unnecessary: false,
1917 },
1918 });
1919 }
1920 }
1921 }
1922 }
1923 }
1924
1925 for entry in &mut diagnostics {
1926 let diagnostic = &mut entry.diagnostic;
1927 if !diagnostic.is_primary {
1928 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1929 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1930 source,
1931 diagnostic.code.clone(),
1932 entry.range.clone(),
1933 )) {
1934 if let Some(severity) = severity {
1935 diagnostic.severity = severity;
1936 }
1937 diagnostic.is_unnecessary = is_unnecessary;
1938 }
1939 }
1940 }
1941
1942 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1943 Ok(())
1944 }
1945
1946 pub fn update_diagnostic_entries(
1947 &mut self,
1948 abs_path: PathBuf,
1949 version: Option<i32>,
1950 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1951 cx: &mut ModelContext<Project>,
1952 ) -> Result<(), anyhow::Error> {
1953 let (worktree, relative_path) = self
1954 .find_local_worktree(&abs_path, cx)
1955 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1956 if !worktree.read(cx).is_visible() {
1957 return Ok(());
1958 }
1959
1960 let project_path = ProjectPath {
1961 worktree_id: worktree.read(cx).id(),
1962 path: relative_path.into(),
1963 };
1964
1965 for buffer in self.opened_buffers.values() {
1966 if let Some(buffer) = buffer.upgrade(cx) {
1967 if buffer
1968 .read(cx)
1969 .file()
1970 .map_or(false, |file| *file.path() == project_path.path)
1971 {
1972 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1973 break;
1974 }
1975 }
1976 }
1977 worktree.update(cx, |worktree, cx| {
1978 worktree
1979 .as_local_mut()
1980 .ok_or_else(|| anyhow!("not a local worktree"))?
1981 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1982 })?;
1983 cx.emit(Event::DiagnosticsUpdated(project_path));
1984 Ok(())
1985 }
1986
1987 fn update_buffer_diagnostics(
1988 &mut self,
1989 buffer: &ModelHandle<Buffer>,
1990 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1991 version: Option<i32>,
1992 cx: &mut ModelContext<Self>,
1993 ) -> Result<()> {
1994 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1995 Ordering::Equal
1996 .then_with(|| b.is_primary.cmp(&a.is_primary))
1997 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1998 .then_with(|| a.severity.cmp(&b.severity))
1999 .then_with(|| a.message.cmp(&b.message))
2000 }
2001
2002 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2003
2004 diagnostics.sort_unstable_by(|a, b| {
2005 Ordering::Equal
2006 .then_with(|| a.range.start.cmp(&b.range.start))
2007 .then_with(|| b.range.end.cmp(&a.range.end))
2008 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2009 });
2010
2011 let mut sanitized_diagnostics = Vec::new();
2012 let edits_since_save = Patch::new(
2013 snapshot
2014 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2015 .collect(),
2016 );
2017 for entry in diagnostics {
2018 let start;
2019 let end;
2020 if entry.diagnostic.is_disk_based {
2021 // Some diagnostics are based on files on disk instead of buffers'
2022 // current contents. Adjust these diagnostics' ranges to reflect
2023 // any unsaved edits.
2024 start = edits_since_save.old_to_new(entry.range.start);
2025 end = edits_since_save.old_to_new(entry.range.end);
2026 } else {
2027 start = entry.range.start;
2028 end = entry.range.end;
2029 }
2030
2031 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2032 ..snapshot.clip_point_utf16(end, Bias::Right);
2033
2034 // Expand empty ranges by one character
2035 if range.start == range.end {
2036 range.end.column += 1;
2037 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2038 if range.start == range.end && range.end.column > 0 {
2039 range.start.column -= 1;
2040 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2041 }
2042 }
2043
2044 sanitized_diagnostics.push(DiagnosticEntry {
2045 range,
2046 diagnostic: entry.diagnostic,
2047 });
2048 }
2049 drop(edits_since_save);
2050
2051 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2052 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2053 Ok(())
2054 }
2055
2056 pub fn reload_buffers(
2057 &self,
2058 buffers: HashSet<ModelHandle<Buffer>>,
2059 push_to_history: bool,
2060 cx: &mut ModelContext<Self>,
2061 ) -> Task<Result<ProjectTransaction>> {
2062 let mut local_buffers = Vec::new();
2063 let mut remote_buffers = None;
2064 for buffer_handle in buffers {
2065 let buffer = buffer_handle.read(cx);
2066 if buffer.is_dirty() {
2067 if let Some(file) = File::from_dyn(buffer.file()) {
2068 if file.is_local() {
2069 local_buffers.push(buffer_handle);
2070 } else {
2071 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2072 }
2073 }
2074 }
2075 }
2076
2077 let remote_buffers = self.remote_id().zip(remote_buffers);
2078 let client = self.client.clone();
2079
2080 cx.spawn(|this, mut cx| async move {
2081 let mut project_transaction = ProjectTransaction::default();
2082
2083 if let Some((project_id, remote_buffers)) = remote_buffers {
2084 let response = client
2085 .request(proto::ReloadBuffers {
2086 project_id,
2087 buffer_ids: remote_buffers
2088 .iter()
2089 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2090 .collect(),
2091 })
2092 .await?
2093 .transaction
2094 .ok_or_else(|| anyhow!("missing transaction"))?;
2095 project_transaction = this
2096 .update(&mut cx, |this, cx| {
2097 this.deserialize_project_transaction(response, push_to_history, cx)
2098 })
2099 .await?;
2100 }
2101
2102 for buffer in local_buffers {
2103 let transaction = buffer
2104 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2105 .await?;
2106 buffer.update(&mut cx, |buffer, cx| {
2107 if let Some(transaction) = transaction {
2108 if !push_to_history {
2109 buffer.forget_transaction(transaction.id);
2110 }
2111 project_transaction.0.insert(cx.handle(), transaction);
2112 }
2113 });
2114 }
2115
2116 Ok(project_transaction)
2117 })
2118 }
2119
2120 pub fn format(
2121 &self,
2122 buffers: HashSet<ModelHandle<Buffer>>,
2123 push_to_history: bool,
2124 cx: &mut ModelContext<Project>,
2125 ) -> Task<Result<ProjectTransaction>> {
2126 let mut local_buffers = Vec::new();
2127 let mut remote_buffers = None;
2128 for buffer_handle in buffers {
2129 let buffer = buffer_handle.read(cx);
2130 if let Some(file) = File::from_dyn(buffer.file()) {
2131 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2132 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2133 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2134 }
2135 } else {
2136 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2137 }
2138 } else {
2139 return Task::ready(Ok(Default::default()));
2140 }
2141 }
2142
2143 let remote_buffers = self.remote_id().zip(remote_buffers);
2144 let client = self.client.clone();
2145
2146 cx.spawn(|this, mut cx| async move {
2147 let mut project_transaction = ProjectTransaction::default();
2148
2149 if let Some((project_id, remote_buffers)) = remote_buffers {
2150 let response = client
2151 .request(proto::FormatBuffers {
2152 project_id,
2153 buffer_ids: remote_buffers
2154 .iter()
2155 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2156 .collect(),
2157 })
2158 .await?
2159 .transaction
2160 .ok_or_else(|| anyhow!("missing transaction"))?;
2161 project_transaction = this
2162 .update(&mut cx, |this, cx| {
2163 this.deserialize_project_transaction(response, push_to_history, cx)
2164 })
2165 .await?;
2166 }
2167
2168 for (buffer, buffer_abs_path, language_server) in local_buffers {
2169 let text_document = lsp::TextDocumentIdentifier::new(
2170 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2171 );
2172 let capabilities = &language_server.capabilities();
2173 let tab_size = cx.update(|cx| {
2174 let language_name = buffer.read(cx).language().map(|language| language.name());
2175 cx.global::<Settings>().tab_size(language_name.as_deref())
2176 });
2177 let lsp_edits = if capabilities
2178 .document_formatting_provider
2179 .as_ref()
2180 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2181 {
2182 language_server
2183 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2184 text_document,
2185 options: lsp::FormattingOptions {
2186 tab_size,
2187 insert_spaces: true,
2188 insert_final_newline: Some(true),
2189 ..Default::default()
2190 },
2191 work_done_progress_params: Default::default(),
2192 })
2193 .await?
2194 } else if capabilities
2195 .document_range_formatting_provider
2196 .as_ref()
2197 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2198 {
2199 let buffer_start = lsp::Position::new(0, 0);
2200 let buffer_end =
2201 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2202 language_server
2203 .request::<lsp::request::RangeFormatting>(
2204 lsp::DocumentRangeFormattingParams {
2205 text_document,
2206 range: lsp::Range::new(buffer_start, buffer_end),
2207 options: lsp::FormattingOptions {
2208 tab_size: 4,
2209 insert_spaces: true,
2210 insert_final_newline: Some(true),
2211 ..Default::default()
2212 },
2213 work_done_progress_params: Default::default(),
2214 },
2215 )
2216 .await?
2217 } else {
2218 continue;
2219 };
2220
2221 if let Some(lsp_edits) = lsp_edits {
2222 let edits = this
2223 .update(&mut cx, |this, cx| {
2224 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2225 })
2226 .await?;
2227 buffer.update(&mut cx, |buffer, cx| {
2228 buffer.finalize_last_transaction();
2229 buffer.start_transaction();
2230 for (range, text) in edits {
2231 buffer.edit([range], text, cx);
2232 }
2233 if buffer.end_transaction(cx).is_some() {
2234 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2235 if !push_to_history {
2236 buffer.forget_transaction(transaction.id);
2237 }
2238 project_transaction.0.insert(cx.handle(), transaction);
2239 }
2240 });
2241 }
2242 }
2243
2244 Ok(project_transaction)
2245 })
2246 }
2247
2248 pub fn definition<T: ToPointUtf16>(
2249 &self,
2250 buffer: &ModelHandle<Buffer>,
2251 position: T,
2252 cx: &mut ModelContext<Self>,
2253 ) -> Task<Result<Vec<Location>>> {
2254 let position = position.to_point_utf16(buffer.read(cx));
2255 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2256 }
2257
2258 pub fn references<T: ToPointUtf16>(
2259 &self,
2260 buffer: &ModelHandle<Buffer>,
2261 position: T,
2262 cx: &mut ModelContext<Self>,
2263 ) -> Task<Result<Vec<Location>>> {
2264 let position = position.to_point_utf16(buffer.read(cx));
2265 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2266 }
2267
2268 pub fn document_highlights<T: ToPointUtf16>(
2269 &self,
2270 buffer: &ModelHandle<Buffer>,
2271 position: T,
2272 cx: &mut ModelContext<Self>,
2273 ) -> Task<Result<Vec<DocumentHighlight>>> {
2274 let position = position.to_point_utf16(buffer.read(cx));
2275
2276 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2277 }
2278
2279 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2280 if self.is_local() {
2281 let mut requests = Vec::new();
2282 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2283 let worktree_id = *worktree_id;
2284 if let Some(worktree) = self
2285 .worktree_for_id(worktree_id, cx)
2286 .and_then(|worktree| worktree.read(cx).as_local())
2287 {
2288 let lsp_adapter = lsp_adapter.clone();
2289 let worktree_abs_path = worktree.abs_path().clone();
2290 requests.push(
2291 language_server
2292 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2293 query: query.to_string(),
2294 ..Default::default()
2295 })
2296 .log_err()
2297 .map(move |response| {
2298 (
2299 lsp_adapter,
2300 worktree_id,
2301 worktree_abs_path,
2302 response.unwrap_or_default(),
2303 )
2304 }),
2305 );
2306 }
2307 }
2308
2309 cx.spawn_weak(|this, cx| async move {
2310 let responses = futures::future::join_all(requests).await;
2311 let this = if let Some(this) = this.upgrade(&cx) {
2312 this
2313 } else {
2314 return Ok(Default::default());
2315 };
2316 this.read_with(&cx, |this, cx| {
2317 let mut symbols = Vec::new();
2318 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2319 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2320 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2321 let mut worktree_id = source_worktree_id;
2322 let path;
2323 if let Some((worktree, rel_path)) =
2324 this.find_local_worktree(&abs_path, cx)
2325 {
2326 worktree_id = worktree.read(cx).id();
2327 path = rel_path;
2328 } else {
2329 path = relativize_path(&worktree_abs_path, &abs_path);
2330 }
2331
2332 let label = this
2333 .languages
2334 .select_language(&path)
2335 .and_then(|language| {
2336 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2337 })
2338 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2339 let signature = this.symbol_signature(worktree_id, &path);
2340
2341 Some(Symbol {
2342 source_worktree_id,
2343 worktree_id,
2344 language_server_name: adapter.name(),
2345 name: lsp_symbol.name,
2346 kind: lsp_symbol.kind,
2347 label,
2348 path,
2349 range: range_from_lsp(lsp_symbol.location.range),
2350 signature,
2351 })
2352 }));
2353 }
2354 Ok(symbols)
2355 })
2356 })
2357 } else if let Some(project_id) = self.remote_id() {
2358 let request = self.client.request(proto::GetProjectSymbols {
2359 project_id,
2360 query: query.to_string(),
2361 });
2362 cx.spawn_weak(|this, cx| async move {
2363 let response = request.await?;
2364 let mut symbols = Vec::new();
2365 if let Some(this) = this.upgrade(&cx) {
2366 this.read_with(&cx, |this, _| {
2367 symbols.extend(
2368 response
2369 .symbols
2370 .into_iter()
2371 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2372 );
2373 })
2374 }
2375 Ok(symbols)
2376 })
2377 } else {
2378 Task::ready(Ok(Default::default()))
2379 }
2380 }
2381
2382 pub fn open_buffer_for_symbol(
2383 &mut self,
2384 symbol: &Symbol,
2385 cx: &mut ModelContext<Self>,
2386 ) -> Task<Result<ModelHandle<Buffer>>> {
2387 if self.is_local() {
2388 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2389 symbol.source_worktree_id,
2390 symbol.language_server_name.clone(),
2391 )) {
2392 server.clone()
2393 } else {
2394 return Task::ready(Err(anyhow!(
2395 "language server for worktree and language not found"
2396 )));
2397 };
2398
2399 let worktree_abs_path = if let Some(worktree_abs_path) = self
2400 .worktree_for_id(symbol.worktree_id, cx)
2401 .and_then(|worktree| worktree.read(cx).as_local())
2402 .map(|local_worktree| local_worktree.abs_path())
2403 {
2404 worktree_abs_path
2405 } else {
2406 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2407 };
2408 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2409 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2410 uri
2411 } else {
2412 return Task::ready(Err(anyhow!("invalid symbol path")));
2413 };
2414
2415 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2416 } else if let Some(project_id) = self.remote_id() {
2417 let request = self.client.request(proto::OpenBufferForSymbol {
2418 project_id,
2419 symbol: Some(serialize_symbol(symbol)),
2420 });
2421 cx.spawn(|this, mut cx| async move {
2422 let response = request.await?;
2423 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2424 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2425 .await
2426 })
2427 } else {
2428 Task::ready(Err(anyhow!("project does not have a remote id")))
2429 }
2430 }
2431
2432 pub fn completions<T: ToPointUtf16>(
2433 &self,
2434 source_buffer_handle: &ModelHandle<Buffer>,
2435 position: T,
2436 cx: &mut ModelContext<Self>,
2437 ) -> Task<Result<Vec<Completion>>> {
2438 let source_buffer_handle = source_buffer_handle.clone();
2439 let source_buffer = source_buffer_handle.read(cx);
2440 let buffer_id = source_buffer.remote_id();
2441 let language = source_buffer.language().cloned();
2442 let worktree;
2443 let buffer_abs_path;
2444 if let Some(file) = File::from_dyn(source_buffer.file()) {
2445 worktree = file.worktree.clone();
2446 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2447 } else {
2448 return Task::ready(Ok(Default::default()));
2449 };
2450
2451 let position = position.to_point_utf16(source_buffer);
2452 let anchor = source_buffer.anchor_after(position);
2453
2454 if worktree.read(cx).as_local().is_some() {
2455 let buffer_abs_path = buffer_abs_path.unwrap();
2456 let (_, lang_server) =
2457 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2458 server.clone()
2459 } else {
2460 return Task::ready(Ok(Default::default()));
2461 };
2462
2463 cx.spawn(|_, cx| async move {
2464 let completions = lang_server
2465 .request::<lsp::request::Completion>(lsp::CompletionParams {
2466 text_document_position: lsp::TextDocumentPositionParams::new(
2467 lsp::TextDocumentIdentifier::new(
2468 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2469 ),
2470 point_to_lsp(position),
2471 ),
2472 context: Default::default(),
2473 work_done_progress_params: Default::default(),
2474 partial_result_params: Default::default(),
2475 })
2476 .await
2477 .context("lsp completion request failed")?;
2478
2479 let completions = if let Some(completions) = completions {
2480 match completions {
2481 lsp::CompletionResponse::Array(completions) => completions,
2482 lsp::CompletionResponse::List(list) => list.items,
2483 }
2484 } else {
2485 Default::default()
2486 };
2487
2488 source_buffer_handle.read_with(&cx, |this, _| {
2489 Ok(completions
2490 .into_iter()
2491 .filter_map(|lsp_completion| {
2492 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2493 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2494 (range_from_lsp(edit.range), edit.new_text.clone())
2495 }
2496 None => {
2497 let clipped_position =
2498 this.clip_point_utf16(position, Bias::Left);
2499 if position != clipped_position {
2500 log::info!("completion out of expected range");
2501 return None;
2502 }
2503 (
2504 this.common_prefix_at(
2505 clipped_position,
2506 &lsp_completion.label,
2507 ),
2508 lsp_completion.label.clone(),
2509 )
2510 }
2511 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2512 log::info!("unsupported insert/replace completion");
2513 return None;
2514 }
2515 };
2516
2517 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2518 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2519 if clipped_start == old_range.start && clipped_end == old_range.end {
2520 Some(Completion {
2521 old_range: this.anchor_before(old_range.start)
2522 ..this.anchor_after(old_range.end),
2523 new_text,
2524 label: language
2525 .as_ref()
2526 .and_then(|l| l.label_for_completion(&lsp_completion))
2527 .unwrap_or_else(|| {
2528 CodeLabel::plain(
2529 lsp_completion.label.clone(),
2530 lsp_completion.filter_text.as_deref(),
2531 )
2532 }),
2533 lsp_completion,
2534 })
2535 } else {
2536 log::info!("completion out of expected range");
2537 None
2538 }
2539 })
2540 .collect())
2541 })
2542 })
2543 } else if let Some(project_id) = self.remote_id() {
2544 let rpc = self.client.clone();
2545 let message = proto::GetCompletions {
2546 project_id,
2547 buffer_id,
2548 position: Some(language::proto::serialize_anchor(&anchor)),
2549 version: serialize_version(&source_buffer.version()),
2550 };
2551 cx.spawn_weak(|_, mut cx| async move {
2552 let response = rpc.request(message).await?;
2553
2554 source_buffer_handle
2555 .update(&mut cx, |buffer, _| {
2556 buffer.wait_for_version(deserialize_version(response.version))
2557 })
2558 .await;
2559
2560 response
2561 .completions
2562 .into_iter()
2563 .map(|completion| {
2564 language::proto::deserialize_completion(completion, language.as_ref())
2565 })
2566 .collect()
2567 })
2568 } else {
2569 Task::ready(Ok(Default::default()))
2570 }
2571 }
2572
2573 pub fn apply_additional_edits_for_completion(
2574 &self,
2575 buffer_handle: ModelHandle<Buffer>,
2576 completion: Completion,
2577 push_to_history: bool,
2578 cx: &mut ModelContext<Self>,
2579 ) -> Task<Result<Option<Transaction>>> {
2580 let buffer = buffer_handle.read(cx);
2581 let buffer_id = buffer.remote_id();
2582
2583 if self.is_local() {
2584 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2585 {
2586 server.clone()
2587 } else {
2588 return Task::ready(Ok(Default::default()));
2589 };
2590
2591 cx.spawn(|this, mut cx| async move {
2592 let resolved_completion = lang_server
2593 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2594 .await?;
2595 if let Some(edits) = resolved_completion.additional_text_edits {
2596 let edits = this
2597 .update(&mut cx, |this, cx| {
2598 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2599 })
2600 .await?;
2601 buffer_handle.update(&mut cx, |buffer, cx| {
2602 buffer.finalize_last_transaction();
2603 buffer.start_transaction();
2604 for (range, text) in edits {
2605 buffer.edit([range], text, cx);
2606 }
2607 let transaction = if buffer.end_transaction(cx).is_some() {
2608 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2609 if !push_to_history {
2610 buffer.forget_transaction(transaction.id);
2611 }
2612 Some(transaction)
2613 } else {
2614 None
2615 };
2616 Ok(transaction)
2617 })
2618 } else {
2619 Ok(None)
2620 }
2621 })
2622 } else if let Some(project_id) = self.remote_id() {
2623 let client = self.client.clone();
2624 cx.spawn(|_, mut cx| async move {
2625 let response = client
2626 .request(proto::ApplyCompletionAdditionalEdits {
2627 project_id,
2628 buffer_id,
2629 completion: Some(language::proto::serialize_completion(&completion)),
2630 })
2631 .await?;
2632
2633 if let Some(transaction) = response.transaction {
2634 let transaction = language::proto::deserialize_transaction(transaction)?;
2635 buffer_handle
2636 .update(&mut cx, |buffer, _| {
2637 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2638 })
2639 .await;
2640 if push_to_history {
2641 buffer_handle.update(&mut cx, |buffer, _| {
2642 buffer.push_transaction(transaction.clone(), Instant::now());
2643 });
2644 }
2645 Ok(Some(transaction))
2646 } else {
2647 Ok(None)
2648 }
2649 })
2650 } else {
2651 Task::ready(Err(anyhow!("project does not have a remote id")))
2652 }
2653 }
2654
2655 pub fn code_actions<T: Clone + ToOffset>(
2656 &self,
2657 buffer_handle: &ModelHandle<Buffer>,
2658 range: Range<T>,
2659 cx: &mut ModelContext<Self>,
2660 ) -> Task<Result<Vec<CodeAction>>> {
2661 let buffer_handle = buffer_handle.clone();
2662 let buffer = buffer_handle.read(cx);
2663 let snapshot = buffer.snapshot();
2664 let relevant_diagnostics = snapshot
2665 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2666 .map(|entry| entry.to_lsp_diagnostic_stub())
2667 .collect();
2668 let buffer_id = buffer.remote_id();
2669 let worktree;
2670 let buffer_abs_path;
2671 if let Some(file) = File::from_dyn(buffer.file()) {
2672 worktree = file.worktree.clone();
2673 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2674 } else {
2675 return Task::ready(Ok(Default::default()));
2676 };
2677 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2678
2679 if worktree.read(cx).as_local().is_some() {
2680 let buffer_abs_path = buffer_abs_path.unwrap();
2681 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2682 {
2683 server.clone()
2684 } else {
2685 return Task::ready(Ok(Default::default()));
2686 };
2687
2688 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2689 cx.foreground().spawn(async move {
2690 if !lang_server.capabilities().code_action_provider.is_some() {
2691 return Ok(Default::default());
2692 }
2693
2694 Ok(lang_server
2695 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2696 text_document: lsp::TextDocumentIdentifier::new(
2697 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2698 ),
2699 range: lsp_range,
2700 work_done_progress_params: Default::default(),
2701 partial_result_params: Default::default(),
2702 context: lsp::CodeActionContext {
2703 diagnostics: relevant_diagnostics,
2704 only: Some(vec![
2705 lsp::CodeActionKind::QUICKFIX,
2706 lsp::CodeActionKind::REFACTOR,
2707 lsp::CodeActionKind::REFACTOR_EXTRACT,
2708 lsp::CodeActionKind::SOURCE,
2709 ]),
2710 },
2711 })
2712 .await?
2713 .unwrap_or_default()
2714 .into_iter()
2715 .filter_map(|entry| {
2716 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2717 Some(CodeAction {
2718 range: range.clone(),
2719 lsp_action,
2720 })
2721 } else {
2722 None
2723 }
2724 })
2725 .collect())
2726 })
2727 } else if let Some(project_id) = self.remote_id() {
2728 let rpc = self.client.clone();
2729 let version = buffer.version();
2730 cx.spawn_weak(|_, mut cx| async move {
2731 let response = rpc
2732 .request(proto::GetCodeActions {
2733 project_id,
2734 buffer_id,
2735 start: Some(language::proto::serialize_anchor(&range.start)),
2736 end: Some(language::proto::serialize_anchor(&range.end)),
2737 version: serialize_version(&version),
2738 })
2739 .await?;
2740
2741 buffer_handle
2742 .update(&mut cx, |buffer, _| {
2743 buffer.wait_for_version(deserialize_version(response.version))
2744 })
2745 .await;
2746
2747 response
2748 .actions
2749 .into_iter()
2750 .map(language::proto::deserialize_code_action)
2751 .collect()
2752 })
2753 } else {
2754 Task::ready(Ok(Default::default()))
2755 }
2756 }
2757
2758 pub fn apply_code_action(
2759 &self,
2760 buffer_handle: ModelHandle<Buffer>,
2761 mut action: CodeAction,
2762 push_to_history: bool,
2763 cx: &mut ModelContext<Self>,
2764 ) -> Task<Result<ProjectTransaction>> {
2765 if self.is_local() {
2766 let buffer = buffer_handle.read(cx);
2767 let (lsp_adapter, lang_server) =
2768 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2769 server.clone()
2770 } else {
2771 return Task::ready(Ok(Default::default()));
2772 };
2773 let range = action.range.to_point_utf16(buffer);
2774
2775 cx.spawn(|this, mut cx| async move {
2776 if let Some(lsp_range) = action
2777 .lsp_action
2778 .data
2779 .as_mut()
2780 .and_then(|d| d.get_mut("codeActionParams"))
2781 .and_then(|d| d.get_mut("range"))
2782 {
2783 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2784 action.lsp_action = lang_server
2785 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2786 .await?;
2787 } else {
2788 let actions = this
2789 .update(&mut cx, |this, cx| {
2790 this.code_actions(&buffer_handle, action.range, cx)
2791 })
2792 .await?;
2793 action.lsp_action = actions
2794 .into_iter()
2795 .find(|a| a.lsp_action.title == action.lsp_action.title)
2796 .ok_or_else(|| anyhow!("code action is outdated"))?
2797 .lsp_action;
2798 }
2799
2800 if let Some(edit) = action.lsp_action.edit {
2801 Self::deserialize_workspace_edit(
2802 this,
2803 edit,
2804 push_to_history,
2805 lsp_adapter,
2806 lang_server,
2807 &mut cx,
2808 )
2809 .await
2810 } else if let Some(command) = action.lsp_action.command {
2811 this.update(&mut cx, |this, _| {
2812 this.last_workspace_edits_by_language_server
2813 .remove(&lang_server.server_id());
2814 });
2815 lang_server
2816 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2817 command: command.command,
2818 arguments: command.arguments.unwrap_or_default(),
2819 ..Default::default()
2820 })
2821 .await?;
2822 Ok(this.update(&mut cx, |this, _| {
2823 this.last_workspace_edits_by_language_server
2824 .remove(&lang_server.server_id())
2825 .unwrap_or_default()
2826 }))
2827 } else {
2828 Ok(ProjectTransaction::default())
2829 }
2830 })
2831 } else if let Some(project_id) = self.remote_id() {
2832 let client = self.client.clone();
2833 let request = proto::ApplyCodeAction {
2834 project_id,
2835 buffer_id: buffer_handle.read(cx).remote_id(),
2836 action: Some(language::proto::serialize_code_action(&action)),
2837 };
2838 cx.spawn(|this, mut cx| async move {
2839 let response = client
2840 .request(request)
2841 .await?
2842 .transaction
2843 .ok_or_else(|| anyhow!("missing transaction"))?;
2844 this.update(&mut cx, |this, cx| {
2845 this.deserialize_project_transaction(response, push_to_history, cx)
2846 })
2847 .await
2848 })
2849 } else {
2850 Task::ready(Err(anyhow!("project does not have a remote id")))
2851 }
2852 }
2853
2854 async fn deserialize_workspace_edit(
2855 this: ModelHandle<Self>,
2856 edit: lsp::WorkspaceEdit,
2857 push_to_history: bool,
2858 lsp_adapter: Arc<dyn LspAdapter>,
2859 language_server: Arc<LanguageServer>,
2860 cx: &mut AsyncAppContext,
2861 ) -> Result<ProjectTransaction> {
2862 let fs = this.read_with(cx, |this, _| this.fs.clone());
2863 let mut operations = Vec::new();
2864 if let Some(document_changes) = edit.document_changes {
2865 match document_changes {
2866 lsp::DocumentChanges::Edits(edits) => {
2867 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2868 }
2869 lsp::DocumentChanges::Operations(ops) => operations = ops,
2870 }
2871 } else if let Some(changes) = edit.changes {
2872 operations.extend(changes.into_iter().map(|(uri, edits)| {
2873 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2874 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2875 uri,
2876 version: None,
2877 },
2878 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2879 })
2880 }));
2881 }
2882
2883 let mut project_transaction = ProjectTransaction::default();
2884 for operation in operations {
2885 match operation {
2886 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2887 let abs_path = op
2888 .uri
2889 .to_file_path()
2890 .map_err(|_| anyhow!("can't convert URI to path"))?;
2891
2892 if let Some(parent_path) = abs_path.parent() {
2893 fs.create_dir(parent_path).await?;
2894 }
2895 if abs_path.ends_with("/") {
2896 fs.create_dir(&abs_path).await?;
2897 } else {
2898 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2899 .await?;
2900 }
2901 }
2902 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2903 let source_abs_path = op
2904 .old_uri
2905 .to_file_path()
2906 .map_err(|_| anyhow!("can't convert URI to path"))?;
2907 let target_abs_path = op
2908 .new_uri
2909 .to_file_path()
2910 .map_err(|_| anyhow!("can't convert URI to path"))?;
2911 fs.rename(
2912 &source_abs_path,
2913 &target_abs_path,
2914 op.options.map(Into::into).unwrap_or_default(),
2915 )
2916 .await?;
2917 }
2918 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2919 let abs_path = op
2920 .uri
2921 .to_file_path()
2922 .map_err(|_| anyhow!("can't convert URI to path"))?;
2923 let options = op.options.map(Into::into).unwrap_or_default();
2924 if abs_path.ends_with("/") {
2925 fs.remove_dir(&abs_path, options).await?;
2926 } else {
2927 fs.remove_file(&abs_path, options).await?;
2928 }
2929 }
2930 lsp::DocumentChangeOperation::Edit(op) => {
2931 let buffer_to_edit = this
2932 .update(cx, |this, cx| {
2933 this.open_local_buffer_via_lsp(
2934 op.text_document.uri,
2935 lsp_adapter.clone(),
2936 language_server.clone(),
2937 cx,
2938 )
2939 })
2940 .await?;
2941
2942 let edits = this
2943 .update(cx, |this, cx| {
2944 let edits = op.edits.into_iter().map(|edit| match edit {
2945 lsp::OneOf::Left(edit) => edit,
2946 lsp::OneOf::Right(edit) => edit.text_edit,
2947 });
2948 this.edits_from_lsp(
2949 &buffer_to_edit,
2950 edits,
2951 op.text_document.version,
2952 cx,
2953 )
2954 })
2955 .await?;
2956
2957 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2958 buffer.finalize_last_transaction();
2959 buffer.start_transaction();
2960 for (range, text) in edits {
2961 buffer.edit([range], text, cx);
2962 }
2963 let transaction = if buffer.end_transaction(cx).is_some() {
2964 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2965 if !push_to_history {
2966 buffer.forget_transaction(transaction.id);
2967 }
2968 Some(transaction)
2969 } else {
2970 None
2971 };
2972
2973 transaction
2974 });
2975 if let Some(transaction) = transaction {
2976 project_transaction.0.insert(buffer_to_edit, transaction);
2977 }
2978 }
2979 }
2980 }
2981
2982 Ok(project_transaction)
2983 }
2984
2985 pub fn prepare_rename<T: ToPointUtf16>(
2986 &self,
2987 buffer: ModelHandle<Buffer>,
2988 position: T,
2989 cx: &mut ModelContext<Self>,
2990 ) -> Task<Result<Option<Range<Anchor>>>> {
2991 let position = position.to_point_utf16(buffer.read(cx));
2992 self.request_lsp(buffer, PrepareRename { position }, cx)
2993 }
2994
2995 pub fn perform_rename<T: ToPointUtf16>(
2996 &self,
2997 buffer: ModelHandle<Buffer>,
2998 position: T,
2999 new_name: String,
3000 push_to_history: bool,
3001 cx: &mut ModelContext<Self>,
3002 ) -> Task<Result<ProjectTransaction>> {
3003 let position = position.to_point_utf16(buffer.read(cx));
3004 self.request_lsp(
3005 buffer,
3006 PerformRename {
3007 position,
3008 new_name,
3009 push_to_history,
3010 },
3011 cx,
3012 )
3013 }
3014
3015 pub fn search(
3016 &self,
3017 query: SearchQuery,
3018 cx: &mut ModelContext<Self>,
3019 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3020 if self.is_local() {
3021 let snapshots = self
3022 .visible_worktrees(cx)
3023 .filter_map(|tree| {
3024 let tree = tree.read(cx).as_local()?;
3025 Some(tree.snapshot())
3026 })
3027 .collect::<Vec<_>>();
3028
3029 let background = cx.background().clone();
3030 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3031 if path_count == 0 {
3032 return Task::ready(Ok(Default::default()));
3033 }
3034 let workers = background.num_cpus().min(path_count);
3035 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3036 cx.background()
3037 .spawn({
3038 let fs = self.fs.clone();
3039 let background = cx.background().clone();
3040 let query = query.clone();
3041 async move {
3042 let fs = &fs;
3043 let query = &query;
3044 let matching_paths_tx = &matching_paths_tx;
3045 let paths_per_worker = (path_count + workers - 1) / workers;
3046 let snapshots = &snapshots;
3047 background
3048 .scoped(|scope| {
3049 for worker_ix in 0..workers {
3050 let worker_start_ix = worker_ix * paths_per_worker;
3051 let worker_end_ix = worker_start_ix + paths_per_worker;
3052 scope.spawn(async move {
3053 let mut snapshot_start_ix = 0;
3054 let mut abs_path = PathBuf::new();
3055 for snapshot in snapshots {
3056 let snapshot_end_ix =
3057 snapshot_start_ix + snapshot.visible_file_count();
3058 if worker_end_ix <= snapshot_start_ix {
3059 break;
3060 } else if worker_start_ix > snapshot_end_ix {
3061 snapshot_start_ix = snapshot_end_ix;
3062 continue;
3063 } else {
3064 let start_in_snapshot = worker_start_ix
3065 .saturating_sub(snapshot_start_ix);
3066 let end_in_snapshot =
3067 cmp::min(worker_end_ix, snapshot_end_ix)
3068 - snapshot_start_ix;
3069
3070 for entry in snapshot
3071 .files(false, start_in_snapshot)
3072 .take(end_in_snapshot - start_in_snapshot)
3073 {
3074 if matching_paths_tx.is_closed() {
3075 break;
3076 }
3077
3078 abs_path.clear();
3079 abs_path.push(&snapshot.abs_path());
3080 abs_path.push(&entry.path);
3081 let matches = if let Some(file) =
3082 fs.open_sync(&abs_path).await.log_err()
3083 {
3084 query.detect(file).unwrap_or(false)
3085 } else {
3086 false
3087 };
3088
3089 if matches {
3090 let project_path =
3091 (snapshot.id(), entry.path.clone());
3092 if matching_paths_tx
3093 .send(project_path)
3094 .await
3095 .is_err()
3096 {
3097 break;
3098 }
3099 }
3100 }
3101
3102 snapshot_start_ix = snapshot_end_ix;
3103 }
3104 }
3105 });
3106 }
3107 })
3108 .await;
3109 }
3110 })
3111 .detach();
3112
3113 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3114 let open_buffers = self
3115 .opened_buffers
3116 .values()
3117 .filter_map(|b| b.upgrade(cx))
3118 .collect::<HashSet<_>>();
3119 cx.spawn(|this, cx| async move {
3120 for buffer in &open_buffers {
3121 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3122 buffers_tx.send((buffer.clone(), snapshot)).await?;
3123 }
3124
3125 let open_buffers = Rc::new(RefCell::new(open_buffers));
3126 while let Some(project_path) = matching_paths_rx.next().await {
3127 if buffers_tx.is_closed() {
3128 break;
3129 }
3130
3131 let this = this.clone();
3132 let open_buffers = open_buffers.clone();
3133 let buffers_tx = buffers_tx.clone();
3134 cx.spawn(|mut cx| async move {
3135 if let Some(buffer) = this
3136 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3137 .await
3138 .log_err()
3139 {
3140 if open_buffers.borrow_mut().insert(buffer.clone()) {
3141 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3142 buffers_tx.send((buffer, snapshot)).await?;
3143 }
3144 }
3145
3146 Ok::<_, anyhow::Error>(())
3147 })
3148 .detach();
3149 }
3150
3151 Ok::<_, anyhow::Error>(())
3152 })
3153 .detach_and_log_err(cx);
3154
3155 let background = cx.background().clone();
3156 cx.background().spawn(async move {
3157 let query = &query;
3158 let mut matched_buffers = Vec::new();
3159 for _ in 0..workers {
3160 matched_buffers.push(HashMap::default());
3161 }
3162 background
3163 .scoped(|scope| {
3164 for worker_matched_buffers in matched_buffers.iter_mut() {
3165 let mut buffers_rx = buffers_rx.clone();
3166 scope.spawn(async move {
3167 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3168 let buffer_matches = query
3169 .search(snapshot.as_rope())
3170 .await
3171 .iter()
3172 .map(|range| {
3173 snapshot.anchor_before(range.start)
3174 ..snapshot.anchor_after(range.end)
3175 })
3176 .collect::<Vec<_>>();
3177 if !buffer_matches.is_empty() {
3178 worker_matched_buffers
3179 .insert(buffer.clone(), buffer_matches);
3180 }
3181 }
3182 });
3183 }
3184 })
3185 .await;
3186 Ok(matched_buffers.into_iter().flatten().collect())
3187 })
3188 } else if let Some(project_id) = self.remote_id() {
3189 let request = self.client.request(query.to_proto(project_id));
3190 cx.spawn(|this, mut cx| async move {
3191 let response = request.await?;
3192 let mut result = HashMap::default();
3193 for location in response.locations {
3194 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3195 let target_buffer = this
3196 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3197 .await?;
3198 let start = location
3199 .start
3200 .and_then(deserialize_anchor)
3201 .ok_or_else(|| anyhow!("missing target start"))?;
3202 let end = location
3203 .end
3204 .and_then(deserialize_anchor)
3205 .ok_or_else(|| anyhow!("missing target end"))?;
3206 result
3207 .entry(target_buffer)
3208 .or_insert(Vec::new())
3209 .push(start..end)
3210 }
3211 Ok(result)
3212 })
3213 } else {
3214 Task::ready(Ok(Default::default()))
3215 }
3216 }
3217
3218 fn request_lsp<R: LspCommand>(
3219 &self,
3220 buffer_handle: ModelHandle<Buffer>,
3221 request: R,
3222 cx: &mut ModelContext<Self>,
3223 ) -> Task<Result<R::Response>>
3224 where
3225 <R::LspRequest as lsp::request::Request>::Result: Send,
3226 {
3227 let buffer = buffer_handle.read(cx);
3228 if self.is_local() {
3229 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3230 if let Some((file, (_, language_server))) =
3231 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3232 {
3233 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3234 return cx.spawn(|this, cx| async move {
3235 if !request.check_capabilities(&language_server.capabilities()) {
3236 return Ok(Default::default());
3237 }
3238
3239 let response = language_server
3240 .request::<R::LspRequest>(lsp_params)
3241 .await
3242 .context("lsp request failed")?;
3243 request
3244 .response_from_lsp(response, this, buffer_handle, cx)
3245 .await
3246 });
3247 }
3248 } else if let Some(project_id) = self.remote_id() {
3249 let rpc = self.client.clone();
3250 let message = request.to_proto(project_id, buffer);
3251 return cx.spawn(|this, cx| async move {
3252 let response = rpc.request(message).await?;
3253 request
3254 .response_from_proto(response, this, buffer_handle, cx)
3255 .await
3256 });
3257 }
3258 Task::ready(Ok(Default::default()))
3259 }
3260
3261 pub fn find_or_create_local_worktree(
3262 &mut self,
3263 abs_path: impl AsRef<Path>,
3264 visible: bool,
3265 cx: &mut ModelContext<Self>,
3266 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3267 let abs_path = abs_path.as_ref();
3268 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3269 Task::ready(Ok((tree.clone(), relative_path.into())))
3270 } else {
3271 let worktree = self.create_local_worktree(abs_path, visible, cx);
3272 cx.foreground()
3273 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3274 }
3275 }
3276
3277 pub fn find_local_worktree(
3278 &self,
3279 abs_path: &Path,
3280 cx: &AppContext,
3281 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3282 for tree in self.worktrees(cx) {
3283 if let Some(relative_path) = tree
3284 .read(cx)
3285 .as_local()
3286 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3287 {
3288 return Some((tree.clone(), relative_path.into()));
3289 }
3290 }
3291 None
3292 }
3293
3294 pub fn is_shared(&self) -> bool {
3295 match &self.client_state {
3296 ProjectClientState::Local { is_shared, .. } => *is_shared,
3297 ProjectClientState::Remote { .. } => false,
3298 }
3299 }
3300
3301 fn create_local_worktree(
3302 &mut self,
3303 abs_path: impl AsRef<Path>,
3304 visible: bool,
3305 cx: &mut ModelContext<Self>,
3306 ) -> Task<Result<ModelHandle<Worktree>>> {
3307 let fs = self.fs.clone();
3308 let client = self.client.clone();
3309 let next_entry_id = self.next_entry_id.clone();
3310 let path: Arc<Path> = abs_path.as_ref().into();
3311 let task = self
3312 .loading_local_worktrees
3313 .entry(path.clone())
3314 .or_insert_with(|| {
3315 cx.spawn(|project, mut cx| {
3316 async move {
3317 let worktree = Worktree::local(
3318 client.clone(),
3319 path.clone(),
3320 visible,
3321 fs,
3322 next_entry_id,
3323 &mut cx,
3324 )
3325 .await;
3326 project.update(&mut cx, |project, _| {
3327 project.loading_local_worktrees.remove(&path);
3328 });
3329 let worktree = worktree?;
3330
3331 let (remote_project_id, is_shared) =
3332 project.update(&mut cx, |project, cx| {
3333 project.add_worktree(&worktree, cx);
3334 (project.remote_id(), project.is_shared())
3335 });
3336
3337 if let Some(project_id) = remote_project_id {
3338 if is_shared {
3339 worktree
3340 .update(&mut cx, |worktree, cx| {
3341 worktree.as_local_mut().unwrap().share(project_id, cx)
3342 })
3343 .await?;
3344 } else {
3345 worktree
3346 .update(&mut cx, |worktree, cx| {
3347 worktree.as_local_mut().unwrap().register(project_id, cx)
3348 })
3349 .await?;
3350 }
3351 }
3352
3353 Ok(worktree)
3354 }
3355 .map_err(|err| Arc::new(err))
3356 })
3357 .shared()
3358 })
3359 .clone();
3360 cx.foreground().spawn(async move {
3361 match task.await {
3362 Ok(worktree) => Ok(worktree),
3363 Err(err) => Err(anyhow!("{}", err)),
3364 }
3365 })
3366 }
3367
3368 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3369 self.worktrees.retain(|worktree| {
3370 worktree
3371 .upgrade(cx)
3372 .map_or(false, |w| w.read(cx).id() != id)
3373 });
3374 cx.notify();
3375 }
3376
3377 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3378 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3379 if worktree.read(cx).is_local() {
3380 cx.subscribe(&worktree, |this, worktree, _, cx| {
3381 this.update_local_worktree_buffers(worktree, cx);
3382 })
3383 .detach();
3384 }
3385
3386 let push_strong_handle = {
3387 let worktree = worktree.read(cx);
3388 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3389 };
3390 if push_strong_handle {
3391 self.worktrees
3392 .push(WorktreeHandle::Strong(worktree.clone()));
3393 } else {
3394 cx.observe_release(&worktree, |this, _, cx| {
3395 this.worktrees
3396 .retain(|worktree| worktree.upgrade(cx).is_some());
3397 cx.notify();
3398 })
3399 .detach();
3400 self.worktrees
3401 .push(WorktreeHandle::Weak(worktree.downgrade()));
3402 }
3403 cx.notify();
3404 }
3405
3406 fn update_local_worktree_buffers(
3407 &mut self,
3408 worktree_handle: ModelHandle<Worktree>,
3409 cx: &mut ModelContext<Self>,
3410 ) {
3411 let snapshot = worktree_handle.read(cx).snapshot();
3412 let mut buffers_to_delete = Vec::new();
3413 let mut renamed_buffers = Vec::new();
3414 for (buffer_id, buffer) in &self.opened_buffers {
3415 if let Some(buffer) = buffer.upgrade(cx) {
3416 buffer.update(cx, |buffer, cx| {
3417 if let Some(old_file) = File::from_dyn(buffer.file()) {
3418 if old_file.worktree != worktree_handle {
3419 return;
3420 }
3421
3422 let new_file = if let Some(entry) = old_file
3423 .entry_id
3424 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3425 {
3426 File {
3427 is_local: true,
3428 entry_id: Some(entry.id),
3429 mtime: entry.mtime,
3430 path: entry.path.clone(),
3431 worktree: worktree_handle.clone(),
3432 }
3433 } else if let Some(entry) =
3434 snapshot.entry_for_path(old_file.path().as_ref())
3435 {
3436 File {
3437 is_local: true,
3438 entry_id: Some(entry.id),
3439 mtime: entry.mtime,
3440 path: entry.path.clone(),
3441 worktree: worktree_handle.clone(),
3442 }
3443 } else {
3444 File {
3445 is_local: true,
3446 entry_id: None,
3447 path: old_file.path().clone(),
3448 mtime: old_file.mtime(),
3449 worktree: worktree_handle.clone(),
3450 }
3451 };
3452
3453 let old_path = old_file.abs_path(cx);
3454 if new_file.abs_path(cx) != old_path {
3455 renamed_buffers.push((cx.handle(), old_path));
3456 }
3457
3458 if let Some(project_id) = self.remote_id() {
3459 self.client
3460 .send(proto::UpdateBufferFile {
3461 project_id,
3462 buffer_id: *buffer_id as u64,
3463 file: Some(new_file.to_proto()),
3464 })
3465 .log_err();
3466 }
3467 buffer.file_updated(Box::new(new_file), cx).detach();
3468 }
3469 });
3470 } else {
3471 buffers_to_delete.push(*buffer_id);
3472 }
3473 }
3474
3475 for buffer_id in buffers_to_delete {
3476 self.opened_buffers.remove(&buffer_id);
3477 }
3478
3479 for (buffer, old_path) in renamed_buffers {
3480 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3481 self.assign_language_to_buffer(&buffer, cx);
3482 self.register_buffer_with_language_server(&buffer, cx);
3483 }
3484 }
3485
3486 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3487 let new_active_entry = entry.and_then(|project_path| {
3488 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3489 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3490 Some(entry.id)
3491 });
3492 if new_active_entry != self.active_entry {
3493 self.active_entry = new_active_entry;
3494 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3495 }
3496 }
3497
3498 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3499 self.language_server_statuses
3500 .values()
3501 .any(|status| status.pending_diagnostic_updates > 0)
3502 }
3503
3504 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3505 let mut summary = DiagnosticSummary::default();
3506 for (_, path_summary) in self.diagnostic_summaries(cx) {
3507 summary.error_count += path_summary.error_count;
3508 summary.warning_count += path_summary.warning_count;
3509 summary.info_count += path_summary.info_count;
3510 summary.hint_count += path_summary.hint_count;
3511 }
3512 summary
3513 }
3514
3515 pub fn diagnostic_summaries<'a>(
3516 &'a self,
3517 cx: &'a AppContext,
3518 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3519 self.worktrees(cx).flat_map(move |worktree| {
3520 let worktree = worktree.read(cx);
3521 let worktree_id = worktree.id();
3522 worktree
3523 .diagnostic_summaries()
3524 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3525 })
3526 }
3527
3528 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3529 if self
3530 .language_server_statuses
3531 .values()
3532 .map(|status| status.pending_diagnostic_updates)
3533 .sum::<isize>()
3534 == 1
3535 {
3536 cx.emit(Event::DiskBasedDiagnosticsStarted);
3537 }
3538 }
3539
3540 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3541 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3542 if self
3543 .language_server_statuses
3544 .values()
3545 .map(|status| status.pending_diagnostic_updates)
3546 .sum::<isize>()
3547 == 0
3548 {
3549 cx.emit(Event::DiskBasedDiagnosticsFinished);
3550 }
3551 }
3552
3553 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3554 self.active_entry
3555 }
3556
3557 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3558 self.worktree_for_id(path.worktree_id, cx)?
3559 .read(cx)
3560 .entry_for_path(&path.path)
3561 .map(|entry| entry.id)
3562 }
3563
3564 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3565 let worktree = self.worktree_for_entry(entry_id, cx)?;
3566 let worktree = worktree.read(cx);
3567 let worktree_id = worktree.id();
3568 let path = worktree.entry_for_id(entry_id)?.path.clone();
3569 Some(ProjectPath { worktree_id, path })
3570 }
3571
3572 // RPC message handlers
3573
3574 async fn handle_unshare_project(
3575 this: ModelHandle<Self>,
3576 _: TypedEnvelope<proto::UnshareProject>,
3577 _: Arc<Client>,
3578 mut cx: AsyncAppContext,
3579 ) -> Result<()> {
3580 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3581 Ok(())
3582 }
3583
3584 async fn handle_add_collaborator(
3585 this: ModelHandle<Self>,
3586 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3587 _: Arc<Client>,
3588 mut cx: AsyncAppContext,
3589 ) -> Result<()> {
3590 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3591 let collaborator = envelope
3592 .payload
3593 .collaborator
3594 .take()
3595 .ok_or_else(|| anyhow!("empty collaborator"))?;
3596
3597 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3598 this.update(&mut cx, |this, cx| {
3599 this.collaborators
3600 .insert(collaborator.peer_id, collaborator);
3601 cx.notify();
3602 });
3603
3604 Ok(())
3605 }
3606
3607 async fn handle_remove_collaborator(
3608 this: ModelHandle<Self>,
3609 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3610 _: Arc<Client>,
3611 mut cx: AsyncAppContext,
3612 ) -> Result<()> {
3613 this.update(&mut cx, |this, cx| {
3614 let peer_id = PeerId(envelope.payload.peer_id);
3615 let replica_id = this
3616 .collaborators
3617 .remove(&peer_id)
3618 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3619 .replica_id;
3620 for (_, buffer) in &this.opened_buffers {
3621 if let Some(buffer) = buffer.upgrade(cx) {
3622 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3623 }
3624 }
3625 cx.emit(Event::CollaboratorLeft(peer_id));
3626 cx.notify();
3627 Ok(())
3628 })
3629 }
3630
3631 async fn handle_register_worktree(
3632 this: ModelHandle<Self>,
3633 envelope: TypedEnvelope<proto::RegisterWorktree>,
3634 client: Arc<Client>,
3635 mut cx: AsyncAppContext,
3636 ) -> Result<()> {
3637 this.update(&mut cx, |this, cx| {
3638 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3639 let replica_id = this.replica_id();
3640 let worktree = proto::Worktree {
3641 id: envelope.payload.worktree_id,
3642 root_name: envelope.payload.root_name,
3643 entries: Default::default(),
3644 diagnostic_summaries: Default::default(),
3645 visible: envelope.payload.visible,
3646 };
3647 let (worktree, load_task) =
3648 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3649 this.add_worktree(&worktree, cx);
3650 load_task.detach();
3651 Ok(())
3652 })
3653 }
3654
3655 async fn handle_unregister_worktree(
3656 this: ModelHandle<Self>,
3657 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3658 _: Arc<Client>,
3659 mut cx: AsyncAppContext,
3660 ) -> Result<()> {
3661 this.update(&mut cx, |this, cx| {
3662 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3663 this.remove_worktree(worktree_id, cx);
3664 Ok(())
3665 })
3666 }
3667
3668 async fn handle_update_worktree(
3669 this: ModelHandle<Self>,
3670 envelope: TypedEnvelope<proto::UpdateWorktree>,
3671 _: Arc<Client>,
3672 mut cx: AsyncAppContext,
3673 ) -> Result<()> {
3674 this.update(&mut cx, |this, cx| {
3675 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3676 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3677 worktree.update(cx, |worktree, _| {
3678 let worktree = worktree.as_remote_mut().unwrap();
3679 worktree.update_from_remote(envelope)
3680 })?;
3681 }
3682 Ok(())
3683 })
3684 }
3685
3686 async fn handle_update_diagnostic_summary(
3687 this: ModelHandle<Self>,
3688 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3689 _: Arc<Client>,
3690 mut cx: AsyncAppContext,
3691 ) -> Result<()> {
3692 this.update(&mut cx, |this, cx| {
3693 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3694 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3695 if let Some(summary) = envelope.payload.summary {
3696 let project_path = ProjectPath {
3697 worktree_id,
3698 path: Path::new(&summary.path).into(),
3699 };
3700 worktree.update(cx, |worktree, _| {
3701 worktree
3702 .as_remote_mut()
3703 .unwrap()
3704 .update_diagnostic_summary(project_path.path.clone(), &summary);
3705 });
3706 cx.emit(Event::DiagnosticsUpdated(project_path));
3707 }
3708 }
3709 Ok(())
3710 })
3711 }
3712
3713 async fn handle_start_language_server(
3714 this: ModelHandle<Self>,
3715 envelope: TypedEnvelope<proto::StartLanguageServer>,
3716 _: Arc<Client>,
3717 mut cx: AsyncAppContext,
3718 ) -> Result<()> {
3719 let server = envelope
3720 .payload
3721 .server
3722 .ok_or_else(|| anyhow!("invalid server"))?;
3723 this.update(&mut cx, |this, cx| {
3724 this.language_server_statuses.insert(
3725 server.id as usize,
3726 LanguageServerStatus {
3727 name: server.name,
3728 pending_work: Default::default(),
3729 pending_diagnostic_updates: 0,
3730 },
3731 );
3732 cx.notify();
3733 });
3734 Ok(())
3735 }
3736
3737 async fn handle_update_language_server(
3738 this: ModelHandle<Self>,
3739 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3740 _: Arc<Client>,
3741 mut cx: AsyncAppContext,
3742 ) -> Result<()> {
3743 let language_server_id = envelope.payload.language_server_id as usize;
3744 match envelope
3745 .payload
3746 .variant
3747 .ok_or_else(|| anyhow!("invalid variant"))?
3748 {
3749 proto::update_language_server::Variant::WorkStart(payload) => {
3750 this.update(&mut cx, |this, cx| {
3751 this.on_lsp_work_start(language_server_id, payload.token, cx);
3752 })
3753 }
3754 proto::update_language_server::Variant::WorkProgress(payload) => {
3755 this.update(&mut cx, |this, cx| {
3756 this.on_lsp_work_progress(
3757 language_server_id,
3758 payload.token,
3759 LanguageServerProgress {
3760 message: payload.message,
3761 percentage: payload.percentage.map(|p| p as usize),
3762 last_update_at: Instant::now(),
3763 },
3764 cx,
3765 );
3766 })
3767 }
3768 proto::update_language_server::Variant::WorkEnd(payload) => {
3769 this.update(&mut cx, |this, cx| {
3770 this.on_lsp_work_end(language_server_id, payload.token, cx);
3771 })
3772 }
3773 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3774 this.update(&mut cx, |this, cx| {
3775 this.disk_based_diagnostics_started(cx);
3776 })
3777 }
3778 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3779 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3780 }
3781 }
3782
3783 Ok(())
3784 }
3785
3786 async fn handle_update_buffer(
3787 this: ModelHandle<Self>,
3788 envelope: TypedEnvelope<proto::UpdateBuffer>,
3789 _: Arc<Client>,
3790 mut cx: AsyncAppContext,
3791 ) -> Result<()> {
3792 this.update(&mut cx, |this, cx| {
3793 let payload = envelope.payload.clone();
3794 let buffer_id = payload.buffer_id;
3795 let ops = payload
3796 .operations
3797 .into_iter()
3798 .map(|op| language::proto::deserialize_operation(op))
3799 .collect::<Result<Vec<_>, _>>()?;
3800 match this.opened_buffers.entry(buffer_id) {
3801 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3802 OpenBuffer::Strong(buffer) => {
3803 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3804 }
3805 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3806 OpenBuffer::Weak(_) => {}
3807 },
3808 hash_map::Entry::Vacant(e) => {
3809 e.insert(OpenBuffer::Loading(ops));
3810 }
3811 }
3812 Ok(())
3813 })
3814 }
3815
3816 async fn handle_update_buffer_file(
3817 this: ModelHandle<Self>,
3818 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3819 _: Arc<Client>,
3820 mut cx: AsyncAppContext,
3821 ) -> Result<()> {
3822 this.update(&mut cx, |this, cx| {
3823 let payload = envelope.payload.clone();
3824 let buffer_id = payload.buffer_id;
3825 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3826 let worktree = this
3827 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3828 .ok_or_else(|| anyhow!("no such worktree"))?;
3829 let file = File::from_proto(file, worktree.clone(), cx)?;
3830 let buffer = this
3831 .opened_buffers
3832 .get_mut(&buffer_id)
3833 .and_then(|b| b.upgrade(cx))
3834 .ok_or_else(|| anyhow!("no such buffer"))?;
3835 buffer.update(cx, |buffer, cx| {
3836 buffer.file_updated(Box::new(file), cx).detach();
3837 });
3838 Ok(())
3839 })
3840 }
3841
3842 async fn handle_save_buffer(
3843 this: ModelHandle<Self>,
3844 envelope: TypedEnvelope<proto::SaveBuffer>,
3845 _: Arc<Client>,
3846 mut cx: AsyncAppContext,
3847 ) -> Result<proto::BufferSaved> {
3848 let buffer_id = envelope.payload.buffer_id;
3849 let requested_version = deserialize_version(envelope.payload.version);
3850
3851 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3852 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3853 let buffer = this
3854 .opened_buffers
3855 .get(&buffer_id)
3856 .and_then(|buffer| buffer.upgrade(cx))
3857 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3858 Ok::<_, anyhow::Error>((project_id, buffer))
3859 })?;
3860 buffer
3861 .update(&mut cx, |buffer, _| {
3862 buffer.wait_for_version(requested_version)
3863 })
3864 .await;
3865
3866 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3867 Ok(proto::BufferSaved {
3868 project_id,
3869 buffer_id,
3870 version: serialize_version(&saved_version),
3871 mtime: Some(mtime.into()),
3872 })
3873 }
3874
3875 async fn handle_reload_buffers(
3876 this: ModelHandle<Self>,
3877 envelope: TypedEnvelope<proto::ReloadBuffers>,
3878 _: Arc<Client>,
3879 mut cx: AsyncAppContext,
3880 ) -> Result<proto::ReloadBuffersResponse> {
3881 let sender_id = envelope.original_sender_id()?;
3882 let reload = this.update(&mut cx, |this, cx| {
3883 let mut buffers = HashSet::default();
3884 for buffer_id in &envelope.payload.buffer_ids {
3885 buffers.insert(
3886 this.opened_buffers
3887 .get(buffer_id)
3888 .and_then(|buffer| buffer.upgrade(cx))
3889 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3890 );
3891 }
3892 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3893 })?;
3894
3895 let project_transaction = reload.await?;
3896 let project_transaction = this.update(&mut cx, |this, cx| {
3897 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3898 });
3899 Ok(proto::ReloadBuffersResponse {
3900 transaction: Some(project_transaction),
3901 })
3902 }
3903
3904 async fn handle_format_buffers(
3905 this: ModelHandle<Self>,
3906 envelope: TypedEnvelope<proto::FormatBuffers>,
3907 _: Arc<Client>,
3908 mut cx: AsyncAppContext,
3909 ) -> Result<proto::FormatBuffersResponse> {
3910 let sender_id = envelope.original_sender_id()?;
3911 let format = this.update(&mut cx, |this, cx| {
3912 let mut buffers = HashSet::default();
3913 for buffer_id in &envelope.payload.buffer_ids {
3914 buffers.insert(
3915 this.opened_buffers
3916 .get(buffer_id)
3917 .and_then(|buffer| buffer.upgrade(cx))
3918 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3919 );
3920 }
3921 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3922 })?;
3923
3924 let project_transaction = format.await?;
3925 let project_transaction = this.update(&mut cx, |this, cx| {
3926 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3927 });
3928 Ok(proto::FormatBuffersResponse {
3929 transaction: Some(project_transaction),
3930 })
3931 }
3932
3933 async fn handle_get_completions(
3934 this: ModelHandle<Self>,
3935 envelope: TypedEnvelope<proto::GetCompletions>,
3936 _: Arc<Client>,
3937 mut cx: AsyncAppContext,
3938 ) -> Result<proto::GetCompletionsResponse> {
3939 let position = envelope
3940 .payload
3941 .position
3942 .and_then(language::proto::deserialize_anchor)
3943 .ok_or_else(|| anyhow!("invalid position"))?;
3944 let version = deserialize_version(envelope.payload.version);
3945 let buffer = this.read_with(&cx, |this, cx| {
3946 this.opened_buffers
3947 .get(&envelope.payload.buffer_id)
3948 .and_then(|buffer| buffer.upgrade(cx))
3949 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3950 })?;
3951 buffer
3952 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3953 .await;
3954 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3955 let completions = this
3956 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3957 .await?;
3958
3959 Ok(proto::GetCompletionsResponse {
3960 completions: completions
3961 .iter()
3962 .map(language::proto::serialize_completion)
3963 .collect(),
3964 version: serialize_version(&version),
3965 })
3966 }
3967
3968 async fn handle_apply_additional_edits_for_completion(
3969 this: ModelHandle<Self>,
3970 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3971 _: Arc<Client>,
3972 mut cx: AsyncAppContext,
3973 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3974 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3975 let buffer = this
3976 .opened_buffers
3977 .get(&envelope.payload.buffer_id)
3978 .and_then(|buffer| buffer.upgrade(cx))
3979 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3980 let language = buffer.read(cx).language();
3981 let completion = language::proto::deserialize_completion(
3982 envelope
3983 .payload
3984 .completion
3985 .ok_or_else(|| anyhow!("invalid completion"))?,
3986 language,
3987 )?;
3988 Ok::<_, anyhow::Error>(
3989 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3990 )
3991 })?;
3992
3993 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3994 transaction: apply_additional_edits
3995 .await?
3996 .as_ref()
3997 .map(language::proto::serialize_transaction),
3998 })
3999 }
4000
4001 async fn handle_get_code_actions(
4002 this: ModelHandle<Self>,
4003 envelope: TypedEnvelope<proto::GetCodeActions>,
4004 _: Arc<Client>,
4005 mut cx: AsyncAppContext,
4006 ) -> Result<proto::GetCodeActionsResponse> {
4007 let start = envelope
4008 .payload
4009 .start
4010 .and_then(language::proto::deserialize_anchor)
4011 .ok_or_else(|| anyhow!("invalid start"))?;
4012 let end = envelope
4013 .payload
4014 .end
4015 .and_then(language::proto::deserialize_anchor)
4016 .ok_or_else(|| anyhow!("invalid end"))?;
4017 let buffer = this.update(&mut cx, |this, cx| {
4018 this.opened_buffers
4019 .get(&envelope.payload.buffer_id)
4020 .and_then(|buffer| buffer.upgrade(cx))
4021 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4022 })?;
4023 buffer
4024 .update(&mut cx, |buffer, _| {
4025 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4026 })
4027 .await;
4028
4029 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4030 let code_actions = this.update(&mut cx, |this, cx| {
4031 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4032 })?;
4033
4034 Ok(proto::GetCodeActionsResponse {
4035 actions: code_actions
4036 .await?
4037 .iter()
4038 .map(language::proto::serialize_code_action)
4039 .collect(),
4040 version: serialize_version(&version),
4041 })
4042 }
4043
4044 async fn handle_apply_code_action(
4045 this: ModelHandle<Self>,
4046 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4047 _: Arc<Client>,
4048 mut cx: AsyncAppContext,
4049 ) -> Result<proto::ApplyCodeActionResponse> {
4050 let sender_id = envelope.original_sender_id()?;
4051 let action = language::proto::deserialize_code_action(
4052 envelope
4053 .payload
4054 .action
4055 .ok_or_else(|| anyhow!("invalid action"))?,
4056 )?;
4057 let apply_code_action = this.update(&mut cx, |this, cx| {
4058 let buffer = this
4059 .opened_buffers
4060 .get(&envelope.payload.buffer_id)
4061 .and_then(|buffer| buffer.upgrade(cx))
4062 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4063 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4064 })?;
4065
4066 let project_transaction = apply_code_action.await?;
4067 let project_transaction = this.update(&mut cx, |this, cx| {
4068 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4069 });
4070 Ok(proto::ApplyCodeActionResponse {
4071 transaction: Some(project_transaction),
4072 })
4073 }
4074
4075 async fn handle_lsp_command<T: LspCommand>(
4076 this: ModelHandle<Self>,
4077 envelope: TypedEnvelope<T::ProtoRequest>,
4078 _: Arc<Client>,
4079 mut cx: AsyncAppContext,
4080 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4081 where
4082 <T::LspRequest as lsp::request::Request>::Result: Send,
4083 {
4084 let sender_id = envelope.original_sender_id()?;
4085 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4086 let buffer_handle = this.read_with(&cx, |this, _| {
4087 this.opened_buffers
4088 .get(&buffer_id)
4089 .and_then(|buffer| buffer.upgrade(&cx))
4090 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4091 })?;
4092 let request = T::from_proto(
4093 envelope.payload,
4094 this.clone(),
4095 buffer_handle.clone(),
4096 cx.clone(),
4097 )
4098 .await?;
4099 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4100 let response = this
4101 .update(&mut cx, |this, cx| {
4102 this.request_lsp(buffer_handle, request, cx)
4103 })
4104 .await?;
4105 this.update(&mut cx, |this, cx| {
4106 Ok(T::response_to_proto(
4107 response,
4108 this,
4109 sender_id,
4110 &buffer_version,
4111 cx,
4112 ))
4113 })
4114 }
4115
4116 async fn handle_get_project_symbols(
4117 this: ModelHandle<Self>,
4118 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4119 _: Arc<Client>,
4120 mut cx: AsyncAppContext,
4121 ) -> Result<proto::GetProjectSymbolsResponse> {
4122 let symbols = this
4123 .update(&mut cx, |this, cx| {
4124 this.symbols(&envelope.payload.query, cx)
4125 })
4126 .await?;
4127
4128 Ok(proto::GetProjectSymbolsResponse {
4129 symbols: symbols.iter().map(serialize_symbol).collect(),
4130 })
4131 }
4132
4133 async fn handle_search_project(
4134 this: ModelHandle<Self>,
4135 envelope: TypedEnvelope<proto::SearchProject>,
4136 _: Arc<Client>,
4137 mut cx: AsyncAppContext,
4138 ) -> Result<proto::SearchProjectResponse> {
4139 let peer_id = envelope.original_sender_id()?;
4140 let query = SearchQuery::from_proto(envelope.payload)?;
4141 let result = this
4142 .update(&mut cx, |this, cx| this.search(query, cx))
4143 .await?;
4144
4145 this.update(&mut cx, |this, cx| {
4146 let mut locations = Vec::new();
4147 for (buffer, ranges) in result {
4148 for range in ranges {
4149 let start = serialize_anchor(&range.start);
4150 let end = serialize_anchor(&range.end);
4151 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4152 locations.push(proto::Location {
4153 buffer: Some(buffer),
4154 start: Some(start),
4155 end: Some(end),
4156 });
4157 }
4158 }
4159 Ok(proto::SearchProjectResponse { locations })
4160 })
4161 }
4162
4163 async fn handle_open_buffer_for_symbol(
4164 this: ModelHandle<Self>,
4165 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4166 _: Arc<Client>,
4167 mut cx: AsyncAppContext,
4168 ) -> Result<proto::OpenBufferForSymbolResponse> {
4169 let peer_id = envelope.original_sender_id()?;
4170 let symbol = envelope
4171 .payload
4172 .symbol
4173 .ok_or_else(|| anyhow!("invalid symbol"))?;
4174 let symbol = this.read_with(&cx, |this, _| {
4175 let symbol = this.deserialize_symbol(symbol)?;
4176 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4177 if signature == symbol.signature {
4178 Ok(symbol)
4179 } else {
4180 Err(anyhow!("invalid symbol signature"))
4181 }
4182 })?;
4183 let buffer = this
4184 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4185 .await?;
4186
4187 Ok(proto::OpenBufferForSymbolResponse {
4188 buffer: Some(this.update(&mut cx, |this, cx| {
4189 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4190 })),
4191 })
4192 }
4193
4194 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4195 let mut hasher = Sha256::new();
4196 hasher.update(worktree_id.to_proto().to_be_bytes());
4197 hasher.update(path.to_string_lossy().as_bytes());
4198 hasher.update(self.nonce.to_be_bytes());
4199 hasher.finalize().as_slice().try_into().unwrap()
4200 }
4201
4202 async fn handle_open_buffer_by_id(
4203 this: ModelHandle<Self>,
4204 envelope: TypedEnvelope<proto::OpenBufferById>,
4205 _: Arc<Client>,
4206 mut cx: AsyncAppContext,
4207 ) -> Result<proto::OpenBufferResponse> {
4208 let peer_id = envelope.original_sender_id()?;
4209 let buffer = this
4210 .update(&mut cx, |this, cx| {
4211 this.open_buffer_by_id(envelope.payload.id, cx)
4212 })
4213 .await?;
4214 this.update(&mut cx, |this, cx| {
4215 Ok(proto::OpenBufferResponse {
4216 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4217 })
4218 })
4219 }
4220
4221 async fn handle_open_buffer_by_path(
4222 this: ModelHandle<Self>,
4223 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4224 _: Arc<Client>,
4225 mut cx: AsyncAppContext,
4226 ) -> Result<proto::OpenBufferResponse> {
4227 let peer_id = envelope.original_sender_id()?;
4228 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4229 let open_buffer = this.update(&mut cx, |this, cx| {
4230 this.open_buffer(
4231 ProjectPath {
4232 worktree_id,
4233 path: PathBuf::from(envelope.payload.path).into(),
4234 },
4235 cx,
4236 )
4237 });
4238
4239 let buffer = open_buffer.await?;
4240 this.update(&mut cx, |this, cx| {
4241 Ok(proto::OpenBufferResponse {
4242 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4243 })
4244 })
4245 }
4246
4247 fn serialize_project_transaction_for_peer(
4248 &mut self,
4249 project_transaction: ProjectTransaction,
4250 peer_id: PeerId,
4251 cx: &AppContext,
4252 ) -> proto::ProjectTransaction {
4253 let mut serialized_transaction = proto::ProjectTransaction {
4254 buffers: Default::default(),
4255 transactions: Default::default(),
4256 };
4257 for (buffer, transaction) in project_transaction.0 {
4258 serialized_transaction
4259 .buffers
4260 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4261 serialized_transaction
4262 .transactions
4263 .push(language::proto::serialize_transaction(&transaction));
4264 }
4265 serialized_transaction
4266 }
4267
4268 fn deserialize_project_transaction(
4269 &mut self,
4270 message: proto::ProjectTransaction,
4271 push_to_history: bool,
4272 cx: &mut ModelContext<Self>,
4273 ) -> Task<Result<ProjectTransaction>> {
4274 cx.spawn(|this, mut cx| async move {
4275 let mut project_transaction = ProjectTransaction::default();
4276 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4277 let buffer = this
4278 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4279 .await?;
4280 let transaction = language::proto::deserialize_transaction(transaction)?;
4281 project_transaction.0.insert(buffer, transaction);
4282 }
4283
4284 for (buffer, transaction) in &project_transaction.0 {
4285 buffer
4286 .update(&mut cx, |buffer, _| {
4287 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4288 })
4289 .await;
4290
4291 if push_to_history {
4292 buffer.update(&mut cx, |buffer, _| {
4293 buffer.push_transaction(transaction.clone(), Instant::now());
4294 });
4295 }
4296 }
4297
4298 Ok(project_transaction)
4299 })
4300 }
4301
4302 fn serialize_buffer_for_peer(
4303 &mut self,
4304 buffer: &ModelHandle<Buffer>,
4305 peer_id: PeerId,
4306 cx: &AppContext,
4307 ) -> proto::Buffer {
4308 let buffer_id = buffer.read(cx).remote_id();
4309 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4310 if shared_buffers.insert(buffer_id) {
4311 proto::Buffer {
4312 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4313 }
4314 } else {
4315 proto::Buffer {
4316 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4317 }
4318 }
4319 }
4320
4321 fn deserialize_buffer(
4322 &mut self,
4323 buffer: proto::Buffer,
4324 cx: &mut ModelContext<Self>,
4325 ) -> Task<Result<ModelHandle<Buffer>>> {
4326 let replica_id = self.replica_id();
4327
4328 let opened_buffer_tx = self.opened_buffer.0.clone();
4329 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4330 cx.spawn(|this, mut cx| async move {
4331 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4332 proto::buffer::Variant::Id(id) => {
4333 let buffer = loop {
4334 let buffer = this.read_with(&cx, |this, cx| {
4335 this.opened_buffers
4336 .get(&id)
4337 .and_then(|buffer| buffer.upgrade(cx))
4338 });
4339 if let Some(buffer) = buffer {
4340 break buffer;
4341 }
4342 opened_buffer_rx
4343 .next()
4344 .await
4345 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4346 };
4347 Ok(buffer)
4348 }
4349 proto::buffer::Variant::State(mut buffer) => {
4350 let mut buffer_worktree = None;
4351 let mut buffer_file = None;
4352 if let Some(file) = buffer.file.take() {
4353 this.read_with(&cx, |this, cx| {
4354 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4355 let worktree =
4356 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4357 anyhow!("no worktree found for id {}", file.worktree_id)
4358 })?;
4359 buffer_file =
4360 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4361 as Box<dyn language::File>);
4362 buffer_worktree = Some(worktree);
4363 Ok::<_, anyhow::Error>(())
4364 })?;
4365 }
4366
4367 let buffer = cx.add_model(|cx| {
4368 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4369 });
4370
4371 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4372
4373 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4374 Ok(buffer)
4375 }
4376 }
4377 })
4378 }
4379
4380 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4381 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4382 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4383 let start = serialized_symbol
4384 .start
4385 .ok_or_else(|| anyhow!("invalid start"))?;
4386 let end = serialized_symbol
4387 .end
4388 .ok_or_else(|| anyhow!("invalid end"))?;
4389 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4390 let path = PathBuf::from(serialized_symbol.path);
4391 let language = self.languages.select_language(&path);
4392 Ok(Symbol {
4393 source_worktree_id,
4394 worktree_id,
4395 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4396 label: language
4397 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4398 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4399 name: serialized_symbol.name,
4400 path,
4401 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4402 kind,
4403 signature: serialized_symbol
4404 .signature
4405 .try_into()
4406 .map_err(|_| anyhow!("invalid signature"))?,
4407 })
4408 }
4409
4410 async fn handle_buffer_saved(
4411 this: ModelHandle<Self>,
4412 envelope: TypedEnvelope<proto::BufferSaved>,
4413 _: Arc<Client>,
4414 mut cx: AsyncAppContext,
4415 ) -> Result<()> {
4416 let version = deserialize_version(envelope.payload.version);
4417 let mtime = envelope
4418 .payload
4419 .mtime
4420 .ok_or_else(|| anyhow!("missing mtime"))?
4421 .into();
4422
4423 this.update(&mut cx, |this, cx| {
4424 let buffer = this
4425 .opened_buffers
4426 .get(&envelope.payload.buffer_id)
4427 .and_then(|buffer| buffer.upgrade(cx));
4428 if let Some(buffer) = buffer {
4429 buffer.update(cx, |buffer, cx| {
4430 buffer.did_save(version, mtime, None, cx);
4431 });
4432 }
4433 Ok(())
4434 })
4435 }
4436
4437 async fn handle_buffer_reloaded(
4438 this: ModelHandle<Self>,
4439 envelope: TypedEnvelope<proto::BufferReloaded>,
4440 _: Arc<Client>,
4441 mut cx: AsyncAppContext,
4442 ) -> Result<()> {
4443 let payload = envelope.payload.clone();
4444 let version = deserialize_version(payload.version);
4445 let mtime = payload
4446 .mtime
4447 .ok_or_else(|| anyhow!("missing mtime"))?
4448 .into();
4449 this.update(&mut cx, |this, cx| {
4450 let buffer = this
4451 .opened_buffers
4452 .get(&payload.buffer_id)
4453 .and_then(|buffer| buffer.upgrade(cx));
4454 if let Some(buffer) = buffer {
4455 buffer.update(cx, |buffer, cx| {
4456 buffer.did_reload(version, mtime, cx);
4457 });
4458 }
4459 Ok(())
4460 })
4461 }
4462
4463 pub fn match_paths<'a>(
4464 &self,
4465 query: &'a str,
4466 include_ignored: bool,
4467 smart_case: bool,
4468 max_results: usize,
4469 cancel_flag: &'a AtomicBool,
4470 cx: &AppContext,
4471 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4472 let worktrees = self
4473 .worktrees(cx)
4474 .filter(|worktree| worktree.read(cx).is_visible())
4475 .collect::<Vec<_>>();
4476 let include_root_name = worktrees.len() > 1;
4477 let candidate_sets = worktrees
4478 .into_iter()
4479 .map(|worktree| CandidateSet {
4480 snapshot: worktree.read(cx).snapshot(),
4481 include_ignored,
4482 include_root_name,
4483 })
4484 .collect::<Vec<_>>();
4485
4486 let background = cx.background().clone();
4487 async move {
4488 fuzzy::match_paths(
4489 candidate_sets.as_slice(),
4490 query,
4491 smart_case,
4492 max_results,
4493 cancel_flag,
4494 background,
4495 )
4496 .await
4497 }
4498 }
4499
4500 fn edits_from_lsp(
4501 &mut self,
4502 buffer: &ModelHandle<Buffer>,
4503 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4504 version: Option<i32>,
4505 cx: &mut ModelContext<Self>,
4506 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4507 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4508 cx.background().spawn(async move {
4509 let snapshot = snapshot?;
4510 let mut lsp_edits = lsp_edits
4511 .into_iter()
4512 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4513 .peekable();
4514
4515 let mut edits = Vec::new();
4516 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4517 // Combine any LSP edits that are adjacent.
4518 //
4519 // Also, combine LSP edits that are separated from each other by only
4520 // a newline. This is important because for some code actions,
4521 // Rust-analyzer rewrites the entire buffer via a series of edits that
4522 // are separated by unchanged newline characters.
4523 //
4524 // In order for the diffing logic below to work properly, any edits that
4525 // cancel each other out must be combined into one.
4526 while let Some((next_range, next_text)) = lsp_edits.peek() {
4527 if next_range.start > range.end {
4528 if next_range.start.row > range.end.row + 1
4529 || next_range.start.column > 0
4530 || snapshot.clip_point_utf16(
4531 PointUtf16::new(range.end.row, u32::MAX),
4532 Bias::Left,
4533 ) > range.end
4534 {
4535 break;
4536 }
4537 new_text.push('\n');
4538 }
4539 range.end = next_range.end;
4540 new_text.push_str(&next_text);
4541 lsp_edits.next();
4542 }
4543
4544 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4545 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4546 {
4547 return Err(anyhow!("invalid edits received from language server"));
4548 }
4549
4550 // For multiline edits, perform a diff of the old and new text so that
4551 // we can identify the changes more precisely, preserving the locations
4552 // of any anchors positioned in the unchanged regions.
4553 if range.end.row > range.start.row {
4554 let mut offset = range.start.to_offset(&snapshot);
4555 let old_text = snapshot.text_for_range(range).collect::<String>();
4556
4557 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4558 let mut moved_since_edit = true;
4559 for change in diff.iter_all_changes() {
4560 let tag = change.tag();
4561 let value = change.value();
4562 match tag {
4563 ChangeTag::Equal => {
4564 offset += value.len();
4565 moved_since_edit = true;
4566 }
4567 ChangeTag::Delete => {
4568 let start = snapshot.anchor_after(offset);
4569 let end = snapshot.anchor_before(offset + value.len());
4570 if moved_since_edit {
4571 edits.push((start..end, String::new()));
4572 } else {
4573 edits.last_mut().unwrap().0.end = end;
4574 }
4575 offset += value.len();
4576 moved_since_edit = false;
4577 }
4578 ChangeTag::Insert => {
4579 if moved_since_edit {
4580 let anchor = snapshot.anchor_after(offset);
4581 edits.push((anchor.clone()..anchor, value.to_string()));
4582 } else {
4583 edits.last_mut().unwrap().1.push_str(value);
4584 }
4585 moved_since_edit = false;
4586 }
4587 }
4588 }
4589 } else if range.end == range.start {
4590 let anchor = snapshot.anchor_after(range.start);
4591 edits.push((anchor.clone()..anchor, new_text));
4592 } else {
4593 let edit_start = snapshot.anchor_after(range.start);
4594 let edit_end = snapshot.anchor_before(range.end);
4595 edits.push((edit_start..edit_end, new_text));
4596 }
4597 }
4598
4599 Ok(edits)
4600 })
4601 }
4602
4603 fn buffer_snapshot_for_lsp_version(
4604 &mut self,
4605 buffer: &ModelHandle<Buffer>,
4606 version: Option<i32>,
4607 cx: &AppContext,
4608 ) -> Result<TextBufferSnapshot> {
4609 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4610
4611 if let Some(version) = version {
4612 let buffer_id = buffer.read(cx).remote_id();
4613 let snapshots = self
4614 .buffer_snapshots
4615 .get_mut(&buffer_id)
4616 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4617 let mut found_snapshot = None;
4618 snapshots.retain(|(snapshot_version, snapshot)| {
4619 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4620 false
4621 } else {
4622 if *snapshot_version == version {
4623 found_snapshot = Some(snapshot.clone());
4624 }
4625 true
4626 }
4627 });
4628
4629 found_snapshot.ok_or_else(|| {
4630 anyhow!(
4631 "snapshot not found for buffer {} at version {}",
4632 buffer_id,
4633 version
4634 )
4635 })
4636 } else {
4637 Ok((buffer.read(cx)).text_snapshot())
4638 }
4639 }
4640
4641 fn language_server_for_buffer(
4642 &self,
4643 buffer: &Buffer,
4644 cx: &AppContext,
4645 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4646 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4647 let worktree_id = file.worktree_id(cx);
4648 self.language_servers
4649 .get(&(worktree_id, language.lsp_adapter()?.name()))
4650 } else {
4651 None
4652 }
4653 }
4654}
4655
4656impl WorktreeHandle {
4657 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4658 match self {
4659 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4660 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4661 }
4662 }
4663}
4664
4665impl OpenBuffer {
4666 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4667 match self {
4668 OpenBuffer::Strong(handle) => Some(handle.clone()),
4669 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4670 OpenBuffer::Loading(_) => None,
4671 }
4672 }
4673}
4674
4675struct CandidateSet {
4676 snapshot: Snapshot,
4677 include_ignored: bool,
4678 include_root_name: bool,
4679}
4680
4681impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4682 type Candidates = CandidateSetIter<'a>;
4683
4684 fn id(&self) -> usize {
4685 self.snapshot.id().to_usize()
4686 }
4687
4688 fn len(&self) -> usize {
4689 if self.include_ignored {
4690 self.snapshot.file_count()
4691 } else {
4692 self.snapshot.visible_file_count()
4693 }
4694 }
4695
4696 fn prefix(&self) -> Arc<str> {
4697 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4698 self.snapshot.root_name().into()
4699 } else if self.include_root_name {
4700 format!("{}/", self.snapshot.root_name()).into()
4701 } else {
4702 "".into()
4703 }
4704 }
4705
4706 fn candidates(&'a self, start: usize) -> Self::Candidates {
4707 CandidateSetIter {
4708 traversal: self.snapshot.files(self.include_ignored, start),
4709 }
4710 }
4711}
4712
4713struct CandidateSetIter<'a> {
4714 traversal: Traversal<'a>,
4715}
4716
4717impl<'a> Iterator for CandidateSetIter<'a> {
4718 type Item = PathMatchCandidate<'a>;
4719
4720 fn next(&mut self) -> Option<Self::Item> {
4721 self.traversal.next().map(|entry| {
4722 if let EntryKind::File(char_bag) = entry.kind {
4723 PathMatchCandidate {
4724 path: &entry.path,
4725 char_bag,
4726 }
4727 } else {
4728 unreachable!()
4729 }
4730 })
4731 }
4732}
4733
4734impl Entity for Project {
4735 type Event = Event;
4736
4737 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4738 match &self.client_state {
4739 ProjectClientState::Local { remote_id_rx, .. } => {
4740 if let Some(project_id) = *remote_id_rx.borrow() {
4741 self.client
4742 .send(proto::UnregisterProject { project_id })
4743 .log_err();
4744 }
4745 }
4746 ProjectClientState::Remote { remote_id, .. } => {
4747 self.client
4748 .send(proto::LeaveProject {
4749 project_id: *remote_id,
4750 })
4751 .log_err();
4752 }
4753 }
4754 }
4755
4756 fn app_will_quit(
4757 &mut self,
4758 _: &mut MutableAppContext,
4759 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4760 let shutdown_futures = self
4761 .language_servers
4762 .drain()
4763 .filter_map(|(_, (_, server))| server.shutdown())
4764 .collect::<Vec<_>>();
4765 Some(
4766 async move {
4767 futures::future::join_all(shutdown_futures).await;
4768 }
4769 .boxed(),
4770 )
4771 }
4772}
4773
4774impl Collaborator {
4775 fn from_proto(
4776 message: proto::Collaborator,
4777 user_store: &ModelHandle<UserStore>,
4778 cx: &mut AsyncAppContext,
4779 ) -> impl Future<Output = Result<Self>> {
4780 let user = user_store.update(cx, |user_store, cx| {
4781 user_store.fetch_user(message.user_id, cx)
4782 });
4783
4784 async move {
4785 Ok(Self {
4786 peer_id: PeerId(message.peer_id),
4787 user: user.await?,
4788 replica_id: message.replica_id as ReplicaId,
4789 })
4790 }
4791 }
4792}
4793
4794impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4795 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4796 Self {
4797 worktree_id,
4798 path: path.as_ref().into(),
4799 }
4800 }
4801}
4802
4803impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4804 fn from(options: lsp::CreateFileOptions) -> Self {
4805 Self {
4806 overwrite: options.overwrite.unwrap_or(false),
4807 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4808 }
4809 }
4810}
4811
4812impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4813 fn from(options: lsp::RenameFileOptions) -> Self {
4814 Self {
4815 overwrite: options.overwrite.unwrap_or(false),
4816 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4817 }
4818 }
4819}
4820
4821impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4822 fn from(options: lsp::DeleteFileOptions) -> Self {
4823 Self {
4824 recursive: options.recursive.unwrap_or(false),
4825 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4826 }
4827 }
4828}
4829
4830fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4831 proto::Symbol {
4832 source_worktree_id: symbol.source_worktree_id.to_proto(),
4833 worktree_id: symbol.worktree_id.to_proto(),
4834 language_server_name: symbol.language_server_name.0.to_string(),
4835 name: symbol.name.clone(),
4836 kind: unsafe { mem::transmute(symbol.kind) },
4837 path: symbol.path.to_string_lossy().to_string(),
4838 start: Some(proto::Point {
4839 row: symbol.range.start.row,
4840 column: symbol.range.start.column,
4841 }),
4842 end: Some(proto::Point {
4843 row: symbol.range.end.row,
4844 column: symbol.range.end.column,
4845 }),
4846 signature: symbol.signature.to_vec(),
4847 }
4848}
4849
4850fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4851 let mut path_components = path.components();
4852 let mut base_components = base.components();
4853 let mut components: Vec<Component> = Vec::new();
4854 loop {
4855 match (path_components.next(), base_components.next()) {
4856 (None, None) => break,
4857 (Some(a), None) => {
4858 components.push(a);
4859 components.extend(path_components.by_ref());
4860 break;
4861 }
4862 (None, _) => components.push(Component::ParentDir),
4863 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4864 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4865 (Some(a), Some(_)) => {
4866 components.push(Component::ParentDir);
4867 for _ in base_components {
4868 components.push(Component::ParentDir);
4869 }
4870 components.push(a);
4871 components.extend(path_components.by_ref());
4872 break;
4873 }
4874 }
4875 }
4876 components.iter().map(|c| c.as_os_str()).collect()
4877}
4878
4879impl Item for Buffer {
4880 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4881 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4882 }
4883}
4884
4885#[cfg(test)]
4886mod tests {
4887 use super::{Event, *};
4888 use fs::RealFs;
4889 use futures::{future, StreamExt};
4890 use gpui::test::subscribe;
4891 use language::{
4892 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4893 ToPoint,
4894 };
4895 use lsp::Url;
4896 use serde_json::json;
4897 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4898 use unindent::Unindent as _;
4899 use util::{assert_set_eq, test::temp_tree};
4900 use worktree::WorktreeHandle as _;
4901
4902 #[gpui::test]
4903 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4904 let dir = temp_tree(json!({
4905 "root": {
4906 "apple": "",
4907 "banana": {
4908 "carrot": {
4909 "date": "",
4910 "endive": "",
4911 }
4912 },
4913 "fennel": {
4914 "grape": "",
4915 }
4916 }
4917 }));
4918
4919 let root_link_path = dir.path().join("root_link");
4920 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4921 unix::fs::symlink(
4922 &dir.path().join("root/fennel"),
4923 &dir.path().join("root/finnochio"),
4924 )
4925 .unwrap();
4926
4927 let project = Project::test(Arc::new(RealFs), cx);
4928
4929 let (tree, _) = project
4930 .update(cx, |project, cx| {
4931 project.find_or_create_local_worktree(&root_link_path, true, cx)
4932 })
4933 .await
4934 .unwrap();
4935
4936 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4937 .await;
4938 cx.read(|cx| {
4939 let tree = tree.read(cx);
4940 assert_eq!(tree.file_count(), 5);
4941 assert_eq!(
4942 tree.inode_for_path("fennel/grape"),
4943 tree.inode_for_path("finnochio/grape")
4944 );
4945 });
4946
4947 let cancel_flag = Default::default();
4948 let results = project
4949 .read_with(cx, |project, cx| {
4950 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4951 })
4952 .await;
4953 assert_eq!(
4954 results
4955 .into_iter()
4956 .map(|result| result.path)
4957 .collect::<Vec<Arc<Path>>>(),
4958 vec![
4959 PathBuf::from("banana/carrot/date").into(),
4960 PathBuf::from("banana/carrot/endive").into(),
4961 ]
4962 );
4963 }
4964
4965 #[gpui::test]
4966 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4967 cx.foreground().forbid_parking();
4968
4969 let mut rust_language = Language::new(
4970 LanguageConfig {
4971 name: "Rust".into(),
4972 path_suffixes: vec!["rs".to_string()],
4973 ..Default::default()
4974 },
4975 Some(tree_sitter_rust::language()),
4976 );
4977 let mut json_language = Language::new(
4978 LanguageConfig {
4979 name: "JSON".into(),
4980 path_suffixes: vec!["json".to_string()],
4981 ..Default::default()
4982 },
4983 None,
4984 );
4985 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4986 name: "the-rust-language-server",
4987 capabilities: lsp::ServerCapabilities {
4988 completion_provider: Some(lsp::CompletionOptions {
4989 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4990 ..Default::default()
4991 }),
4992 ..Default::default()
4993 },
4994 ..Default::default()
4995 });
4996 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4997 name: "the-json-language-server",
4998 capabilities: lsp::ServerCapabilities {
4999 completion_provider: Some(lsp::CompletionOptions {
5000 trigger_characters: Some(vec![":".to_string()]),
5001 ..Default::default()
5002 }),
5003 ..Default::default()
5004 },
5005 ..Default::default()
5006 });
5007
5008 let fs = FakeFs::new(cx.background());
5009 fs.insert_tree(
5010 "/the-root",
5011 json!({
5012 "test.rs": "const A: i32 = 1;",
5013 "test2.rs": "",
5014 "Cargo.toml": "a = 1",
5015 "package.json": "{\"a\": 1}",
5016 }),
5017 )
5018 .await;
5019
5020 let project = Project::test(fs.clone(), cx);
5021 project.update(cx, |project, _| {
5022 project.languages.add(Arc::new(rust_language));
5023 project.languages.add(Arc::new(json_language));
5024 });
5025
5026 let worktree_id = project
5027 .update(cx, |project, cx| {
5028 project.find_or_create_local_worktree("/the-root", true, cx)
5029 })
5030 .await
5031 .unwrap()
5032 .0
5033 .read_with(cx, |tree, _| tree.id());
5034
5035 // Open a buffer without an associated language server.
5036 let toml_buffer = project
5037 .update(cx, |project, cx| {
5038 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5039 })
5040 .await
5041 .unwrap();
5042
5043 // Open a buffer with an associated language server.
5044 let rust_buffer = project
5045 .update(cx, |project, cx| {
5046 project.open_buffer((worktree_id, "test.rs"), cx)
5047 })
5048 .await
5049 .unwrap();
5050
5051 // A server is started up, and it is notified about Rust files.
5052 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5053 assert_eq!(
5054 fake_rust_server
5055 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5056 .await
5057 .text_document,
5058 lsp::TextDocumentItem {
5059 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5060 version: 0,
5061 text: "const A: i32 = 1;".to_string(),
5062 language_id: Default::default()
5063 }
5064 );
5065
5066 // The buffer is configured based on the language server's capabilities.
5067 rust_buffer.read_with(cx, |buffer, _| {
5068 assert_eq!(
5069 buffer.completion_triggers(),
5070 &[".".to_string(), "::".to_string()]
5071 );
5072 });
5073 toml_buffer.read_with(cx, |buffer, _| {
5074 assert!(buffer.completion_triggers().is_empty());
5075 });
5076
5077 // Edit a buffer. The changes are reported to the language server.
5078 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5079 assert_eq!(
5080 fake_rust_server
5081 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5082 .await
5083 .text_document,
5084 lsp::VersionedTextDocumentIdentifier::new(
5085 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5086 1
5087 )
5088 );
5089
5090 // Open a third buffer with a different associated language server.
5091 let json_buffer = project
5092 .update(cx, |project, cx| {
5093 project.open_buffer((worktree_id, "package.json"), cx)
5094 })
5095 .await
5096 .unwrap();
5097
5098 // A json language server is started up and is only notified about the json buffer.
5099 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5100 assert_eq!(
5101 fake_json_server
5102 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5103 .await
5104 .text_document,
5105 lsp::TextDocumentItem {
5106 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5107 version: 0,
5108 text: "{\"a\": 1}".to_string(),
5109 language_id: Default::default()
5110 }
5111 );
5112
5113 // This buffer is configured based on the second language server's
5114 // capabilities.
5115 json_buffer.read_with(cx, |buffer, _| {
5116 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5117 });
5118
5119 // When opening another buffer whose language server is already running,
5120 // it is also configured based on the existing language server's capabilities.
5121 let rust_buffer2 = project
5122 .update(cx, |project, cx| {
5123 project.open_buffer((worktree_id, "test2.rs"), cx)
5124 })
5125 .await
5126 .unwrap();
5127 rust_buffer2.read_with(cx, |buffer, _| {
5128 assert_eq!(
5129 buffer.completion_triggers(),
5130 &[".".to_string(), "::".to_string()]
5131 );
5132 });
5133
5134 // Changes are reported only to servers matching the buffer's language.
5135 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5136 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5137 assert_eq!(
5138 fake_rust_server
5139 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5140 .await
5141 .text_document,
5142 lsp::VersionedTextDocumentIdentifier::new(
5143 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5144 1
5145 )
5146 );
5147
5148 // Save notifications are reported to all servers.
5149 toml_buffer
5150 .update(cx, |buffer, cx| buffer.save(cx))
5151 .await
5152 .unwrap();
5153 assert_eq!(
5154 fake_rust_server
5155 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5156 .await
5157 .text_document,
5158 lsp::TextDocumentIdentifier::new(
5159 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5160 )
5161 );
5162 assert_eq!(
5163 fake_json_server
5164 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5165 .await
5166 .text_document,
5167 lsp::TextDocumentIdentifier::new(
5168 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5169 )
5170 );
5171
5172 // Renames are reported only to servers matching the buffer's language.
5173 fs.rename(
5174 Path::new("/the-root/test2.rs"),
5175 Path::new("/the-root/test3.rs"),
5176 Default::default(),
5177 )
5178 .await
5179 .unwrap();
5180 assert_eq!(
5181 fake_rust_server
5182 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5183 .await
5184 .text_document,
5185 lsp::TextDocumentIdentifier::new(
5186 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5187 ),
5188 );
5189 assert_eq!(
5190 fake_rust_server
5191 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5192 .await
5193 .text_document,
5194 lsp::TextDocumentItem {
5195 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5196 version: 0,
5197 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5198 language_id: Default::default()
5199 },
5200 );
5201
5202 rust_buffer2.update(cx, |buffer, cx| {
5203 buffer.update_diagnostics(
5204 DiagnosticSet::from_sorted_entries(
5205 vec![DiagnosticEntry {
5206 diagnostic: Default::default(),
5207 range: Anchor::MIN..Anchor::MAX,
5208 }],
5209 &buffer.snapshot(),
5210 ),
5211 cx,
5212 );
5213 assert_eq!(
5214 buffer
5215 .snapshot()
5216 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5217 .count(),
5218 1
5219 );
5220 });
5221
5222 // When the rename changes the extension of the file, the buffer gets closed on the old
5223 // language server and gets opened on the new one.
5224 fs.rename(
5225 Path::new("/the-root/test3.rs"),
5226 Path::new("/the-root/test3.json"),
5227 Default::default(),
5228 )
5229 .await
5230 .unwrap();
5231 assert_eq!(
5232 fake_rust_server
5233 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5234 .await
5235 .text_document,
5236 lsp::TextDocumentIdentifier::new(
5237 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5238 ),
5239 );
5240 assert_eq!(
5241 fake_json_server
5242 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5243 .await
5244 .text_document,
5245 lsp::TextDocumentItem {
5246 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5247 version: 0,
5248 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5249 language_id: Default::default()
5250 },
5251 );
5252 // We clear the diagnostics, since the language has changed.
5253 rust_buffer2.read_with(cx, |buffer, _| {
5254 assert_eq!(
5255 buffer
5256 .snapshot()
5257 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5258 .count(),
5259 0
5260 );
5261 });
5262
5263 // The renamed file's version resets after changing language server.
5264 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5265 assert_eq!(
5266 fake_json_server
5267 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5268 .await
5269 .text_document,
5270 lsp::VersionedTextDocumentIdentifier::new(
5271 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5272 1
5273 )
5274 );
5275
5276 // Restart language servers
5277 project.update(cx, |project, cx| {
5278 project.restart_language_servers_for_buffers(
5279 vec![rust_buffer.clone(), json_buffer.clone()],
5280 cx,
5281 );
5282 });
5283
5284 let mut rust_shutdown_requests = fake_rust_server
5285 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5286 let mut json_shutdown_requests = fake_json_server
5287 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5288 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5289
5290 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5291 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5292
5293 // Ensure rust document is reopened in new rust language server
5294 assert_eq!(
5295 fake_rust_server
5296 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5297 .await
5298 .text_document,
5299 lsp::TextDocumentItem {
5300 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5301 version: 1,
5302 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5303 language_id: Default::default()
5304 }
5305 );
5306
5307 // Ensure json documents are reopened in new json language server
5308 assert_set_eq!(
5309 [
5310 fake_json_server
5311 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5312 .await
5313 .text_document,
5314 fake_json_server
5315 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5316 .await
5317 .text_document,
5318 ],
5319 [
5320 lsp::TextDocumentItem {
5321 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5322 version: 0,
5323 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5324 language_id: Default::default()
5325 },
5326 lsp::TextDocumentItem {
5327 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5328 version: 1,
5329 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5330 language_id: Default::default()
5331 }
5332 ]
5333 );
5334
5335 // Close notifications are reported only to servers matching the buffer's language.
5336 cx.update(|_| drop(json_buffer));
5337 let close_message = lsp::DidCloseTextDocumentParams {
5338 text_document: lsp::TextDocumentIdentifier::new(
5339 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5340 ),
5341 };
5342 assert_eq!(
5343 fake_json_server
5344 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5345 .await,
5346 close_message,
5347 );
5348 }
5349
5350 #[gpui::test]
5351 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5352 cx.foreground().forbid_parking();
5353
5354 let progress_token = "the-progress-token";
5355 let mut language = Language::new(
5356 LanguageConfig {
5357 name: "Rust".into(),
5358 path_suffixes: vec!["rs".to_string()],
5359 ..Default::default()
5360 },
5361 Some(tree_sitter_rust::language()),
5362 );
5363 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5364 disk_based_diagnostics_progress_token: Some(progress_token),
5365 disk_based_diagnostics_sources: &["disk"],
5366 ..Default::default()
5367 });
5368
5369 let fs = FakeFs::new(cx.background());
5370 fs.insert_tree(
5371 "/dir",
5372 json!({
5373 "a.rs": "fn a() { A }",
5374 "b.rs": "const y: i32 = 1",
5375 }),
5376 )
5377 .await;
5378
5379 let project = Project::test(fs, cx);
5380 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5381
5382 let (tree, _) = project
5383 .update(cx, |project, cx| {
5384 project.find_or_create_local_worktree("/dir", true, cx)
5385 })
5386 .await
5387 .unwrap();
5388 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5389
5390 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5391 .await;
5392
5393 // Cause worktree to start the fake language server
5394 let _buffer = project
5395 .update(cx, |project, cx| {
5396 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5397 })
5398 .await
5399 .unwrap();
5400
5401 let mut events = subscribe(&project, cx);
5402
5403 let mut fake_server = fake_servers.next().await.unwrap();
5404 fake_server.start_progress(progress_token).await;
5405 assert_eq!(
5406 events.next().await.unwrap(),
5407 Event::DiskBasedDiagnosticsStarted
5408 );
5409
5410 fake_server.start_progress(progress_token).await;
5411 fake_server.end_progress(progress_token).await;
5412 fake_server.start_progress(progress_token).await;
5413
5414 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5415 lsp::PublishDiagnosticsParams {
5416 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5417 version: None,
5418 diagnostics: vec![lsp::Diagnostic {
5419 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5420 severity: Some(lsp::DiagnosticSeverity::ERROR),
5421 message: "undefined variable 'A'".to_string(),
5422 ..Default::default()
5423 }],
5424 },
5425 );
5426 assert_eq!(
5427 events.next().await.unwrap(),
5428 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5429 );
5430
5431 fake_server.end_progress(progress_token).await;
5432 fake_server.end_progress(progress_token).await;
5433 assert_eq!(
5434 events.next().await.unwrap(),
5435 Event::DiskBasedDiagnosticsUpdated
5436 );
5437 assert_eq!(
5438 events.next().await.unwrap(),
5439 Event::DiskBasedDiagnosticsFinished
5440 );
5441
5442 let buffer = project
5443 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5444 .await
5445 .unwrap();
5446
5447 buffer.read_with(cx, |buffer, _| {
5448 let snapshot = buffer.snapshot();
5449 let diagnostics = snapshot
5450 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5451 .collect::<Vec<_>>();
5452 assert_eq!(
5453 diagnostics,
5454 &[DiagnosticEntry {
5455 range: Point::new(0, 9)..Point::new(0, 10),
5456 diagnostic: Diagnostic {
5457 severity: lsp::DiagnosticSeverity::ERROR,
5458 message: "undefined variable 'A'".to_string(),
5459 group_id: 0,
5460 is_primary: true,
5461 ..Default::default()
5462 }
5463 }]
5464 )
5465 });
5466 }
5467
5468 #[gpui::test]
5469 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5470 cx.foreground().forbid_parking();
5471
5472 let progress_token = "the-progress-token";
5473 let mut language = Language::new(
5474 LanguageConfig {
5475 path_suffixes: vec!["rs".to_string()],
5476 ..Default::default()
5477 },
5478 None,
5479 );
5480 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5481 disk_based_diagnostics_sources: &["disk"],
5482 disk_based_diagnostics_progress_token: Some(progress_token),
5483 ..Default::default()
5484 });
5485
5486 let fs = FakeFs::new(cx.background());
5487 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5488
5489 let project = Project::test(fs, cx);
5490 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5491
5492 let worktree_id = project
5493 .update(cx, |project, cx| {
5494 project.find_or_create_local_worktree("/dir", true, cx)
5495 })
5496 .await
5497 .unwrap()
5498 .0
5499 .read_with(cx, |tree, _| tree.id());
5500
5501 let buffer = project
5502 .update(cx, |project, cx| {
5503 project.open_buffer((worktree_id, "a.rs"), cx)
5504 })
5505 .await
5506 .unwrap();
5507
5508 // Simulate diagnostics starting to update.
5509 let mut fake_server = fake_servers.next().await.unwrap();
5510 fake_server.start_progress(progress_token).await;
5511
5512 // Restart the server before the diagnostics finish updating.
5513 project.update(cx, |project, cx| {
5514 project.restart_language_servers_for_buffers([buffer], cx);
5515 });
5516 let mut events = subscribe(&project, cx);
5517
5518 // Simulate the newly started server sending more diagnostics.
5519 let mut fake_server = fake_servers.next().await.unwrap();
5520 fake_server.start_progress(progress_token).await;
5521 assert_eq!(
5522 events.next().await.unwrap(),
5523 Event::DiskBasedDiagnosticsStarted
5524 );
5525
5526 // All diagnostics are considered done, despite the old server's diagnostic
5527 // task never completing.
5528 fake_server.end_progress(progress_token).await;
5529 assert_eq!(
5530 events.next().await.unwrap(),
5531 Event::DiskBasedDiagnosticsUpdated
5532 );
5533 assert_eq!(
5534 events.next().await.unwrap(),
5535 Event::DiskBasedDiagnosticsFinished
5536 );
5537 project.read_with(cx, |project, _| {
5538 assert!(!project.is_running_disk_based_diagnostics());
5539 });
5540 }
5541
5542 #[gpui::test]
5543 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5544 cx.foreground().forbid_parking();
5545
5546 let mut language = Language::new(
5547 LanguageConfig {
5548 name: "Rust".into(),
5549 path_suffixes: vec!["rs".to_string()],
5550 ..Default::default()
5551 },
5552 Some(tree_sitter_rust::language()),
5553 );
5554 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5555 disk_based_diagnostics_sources: &["disk"],
5556 ..Default::default()
5557 });
5558
5559 let text = "
5560 fn a() { A }
5561 fn b() { BB }
5562 fn c() { CCC }
5563 "
5564 .unindent();
5565
5566 let fs = FakeFs::new(cx.background());
5567 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5568
5569 let project = Project::test(fs, cx);
5570 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5571
5572 let worktree_id = project
5573 .update(cx, |project, cx| {
5574 project.find_or_create_local_worktree("/dir", true, cx)
5575 })
5576 .await
5577 .unwrap()
5578 .0
5579 .read_with(cx, |tree, _| tree.id());
5580
5581 let buffer = project
5582 .update(cx, |project, cx| {
5583 project.open_buffer((worktree_id, "a.rs"), cx)
5584 })
5585 .await
5586 .unwrap();
5587
5588 let mut fake_server = fake_servers.next().await.unwrap();
5589 let open_notification = fake_server
5590 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5591 .await;
5592
5593 // Edit the buffer, moving the content down
5594 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5595 let change_notification_1 = fake_server
5596 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5597 .await;
5598 assert!(
5599 change_notification_1.text_document.version > open_notification.text_document.version
5600 );
5601
5602 // Report some diagnostics for the initial version of the buffer
5603 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5604 lsp::PublishDiagnosticsParams {
5605 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5606 version: Some(open_notification.text_document.version),
5607 diagnostics: vec![
5608 lsp::Diagnostic {
5609 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5610 severity: Some(DiagnosticSeverity::ERROR),
5611 message: "undefined variable 'A'".to_string(),
5612 source: Some("disk".to_string()),
5613 ..Default::default()
5614 },
5615 lsp::Diagnostic {
5616 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5617 severity: Some(DiagnosticSeverity::ERROR),
5618 message: "undefined variable 'BB'".to_string(),
5619 source: Some("disk".to_string()),
5620 ..Default::default()
5621 },
5622 lsp::Diagnostic {
5623 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5624 severity: Some(DiagnosticSeverity::ERROR),
5625 source: Some("disk".to_string()),
5626 message: "undefined variable 'CCC'".to_string(),
5627 ..Default::default()
5628 },
5629 ],
5630 },
5631 );
5632
5633 // The diagnostics have moved down since they were created.
5634 buffer.next_notification(cx).await;
5635 buffer.read_with(cx, |buffer, _| {
5636 assert_eq!(
5637 buffer
5638 .snapshot()
5639 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5640 .collect::<Vec<_>>(),
5641 &[
5642 DiagnosticEntry {
5643 range: Point::new(3, 9)..Point::new(3, 11),
5644 diagnostic: Diagnostic {
5645 severity: DiagnosticSeverity::ERROR,
5646 message: "undefined variable 'BB'".to_string(),
5647 is_disk_based: true,
5648 group_id: 1,
5649 is_primary: true,
5650 ..Default::default()
5651 },
5652 },
5653 DiagnosticEntry {
5654 range: Point::new(4, 9)..Point::new(4, 12),
5655 diagnostic: Diagnostic {
5656 severity: DiagnosticSeverity::ERROR,
5657 message: "undefined variable 'CCC'".to_string(),
5658 is_disk_based: true,
5659 group_id: 2,
5660 is_primary: true,
5661 ..Default::default()
5662 }
5663 }
5664 ]
5665 );
5666 assert_eq!(
5667 chunks_with_diagnostics(buffer, 0..buffer.len()),
5668 [
5669 ("\n\nfn a() { ".to_string(), None),
5670 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5671 (" }\nfn b() { ".to_string(), None),
5672 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5673 (" }\nfn c() { ".to_string(), None),
5674 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5675 (" }\n".to_string(), None),
5676 ]
5677 );
5678 assert_eq!(
5679 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5680 [
5681 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5682 (" }\nfn c() { ".to_string(), None),
5683 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5684 ]
5685 );
5686 });
5687
5688 // Ensure overlapping diagnostics are highlighted correctly.
5689 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5690 lsp::PublishDiagnosticsParams {
5691 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5692 version: Some(open_notification.text_document.version),
5693 diagnostics: vec![
5694 lsp::Diagnostic {
5695 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5696 severity: Some(DiagnosticSeverity::ERROR),
5697 message: "undefined variable 'A'".to_string(),
5698 source: Some("disk".to_string()),
5699 ..Default::default()
5700 },
5701 lsp::Diagnostic {
5702 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5703 severity: Some(DiagnosticSeverity::WARNING),
5704 message: "unreachable statement".to_string(),
5705 source: Some("disk".to_string()),
5706 ..Default::default()
5707 },
5708 ],
5709 },
5710 );
5711
5712 buffer.next_notification(cx).await;
5713 buffer.read_with(cx, |buffer, _| {
5714 assert_eq!(
5715 buffer
5716 .snapshot()
5717 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5718 .collect::<Vec<_>>(),
5719 &[
5720 DiagnosticEntry {
5721 range: Point::new(2, 9)..Point::new(2, 12),
5722 diagnostic: Diagnostic {
5723 severity: DiagnosticSeverity::WARNING,
5724 message: "unreachable statement".to_string(),
5725 is_disk_based: true,
5726 group_id: 1,
5727 is_primary: true,
5728 ..Default::default()
5729 }
5730 },
5731 DiagnosticEntry {
5732 range: Point::new(2, 9)..Point::new(2, 10),
5733 diagnostic: Diagnostic {
5734 severity: DiagnosticSeverity::ERROR,
5735 message: "undefined variable 'A'".to_string(),
5736 is_disk_based: true,
5737 group_id: 0,
5738 is_primary: true,
5739 ..Default::default()
5740 },
5741 }
5742 ]
5743 );
5744 assert_eq!(
5745 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5746 [
5747 ("fn a() { ".to_string(), None),
5748 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5749 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5750 ("\n".to_string(), None),
5751 ]
5752 );
5753 assert_eq!(
5754 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5755 [
5756 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5757 ("\n".to_string(), None),
5758 ]
5759 );
5760 });
5761
5762 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5763 // changes since the last save.
5764 buffer.update(cx, |buffer, cx| {
5765 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5766 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5767 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5768 });
5769 let change_notification_2 = fake_server
5770 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5771 .await;
5772 assert!(
5773 change_notification_2.text_document.version
5774 > change_notification_1.text_document.version
5775 );
5776
5777 // Handle out-of-order diagnostics
5778 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5779 lsp::PublishDiagnosticsParams {
5780 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5781 version: Some(change_notification_2.text_document.version),
5782 diagnostics: vec![
5783 lsp::Diagnostic {
5784 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5785 severity: Some(DiagnosticSeverity::ERROR),
5786 message: "undefined variable 'BB'".to_string(),
5787 source: Some("disk".to_string()),
5788 ..Default::default()
5789 },
5790 lsp::Diagnostic {
5791 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5792 severity: Some(DiagnosticSeverity::WARNING),
5793 message: "undefined variable 'A'".to_string(),
5794 source: Some("disk".to_string()),
5795 ..Default::default()
5796 },
5797 ],
5798 },
5799 );
5800
5801 buffer.next_notification(cx).await;
5802 buffer.read_with(cx, |buffer, _| {
5803 assert_eq!(
5804 buffer
5805 .snapshot()
5806 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5807 .collect::<Vec<_>>(),
5808 &[
5809 DiagnosticEntry {
5810 range: Point::new(2, 21)..Point::new(2, 22),
5811 diagnostic: Diagnostic {
5812 severity: DiagnosticSeverity::WARNING,
5813 message: "undefined variable 'A'".to_string(),
5814 is_disk_based: true,
5815 group_id: 1,
5816 is_primary: true,
5817 ..Default::default()
5818 }
5819 },
5820 DiagnosticEntry {
5821 range: Point::new(3, 9)..Point::new(3, 14),
5822 diagnostic: Diagnostic {
5823 severity: DiagnosticSeverity::ERROR,
5824 message: "undefined variable 'BB'".to_string(),
5825 is_disk_based: true,
5826 group_id: 0,
5827 is_primary: true,
5828 ..Default::default()
5829 },
5830 }
5831 ]
5832 );
5833 });
5834 }
5835
5836 #[gpui::test]
5837 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5838 cx.foreground().forbid_parking();
5839
5840 let text = concat!(
5841 "let one = ;\n", //
5842 "let two = \n",
5843 "let three = 3;\n",
5844 );
5845
5846 let fs = FakeFs::new(cx.background());
5847 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5848
5849 let project = Project::test(fs, cx);
5850 let worktree_id = project
5851 .update(cx, |project, cx| {
5852 project.find_or_create_local_worktree("/dir", true, cx)
5853 })
5854 .await
5855 .unwrap()
5856 .0
5857 .read_with(cx, |tree, _| tree.id());
5858
5859 let buffer = project
5860 .update(cx, |project, cx| {
5861 project.open_buffer((worktree_id, "a.rs"), cx)
5862 })
5863 .await
5864 .unwrap();
5865
5866 project.update(cx, |project, cx| {
5867 project
5868 .update_buffer_diagnostics(
5869 &buffer,
5870 vec![
5871 DiagnosticEntry {
5872 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5873 diagnostic: Diagnostic {
5874 severity: DiagnosticSeverity::ERROR,
5875 message: "syntax error 1".to_string(),
5876 ..Default::default()
5877 },
5878 },
5879 DiagnosticEntry {
5880 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5881 diagnostic: Diagnostic {
5882 severity: DiagnosticSeverity::ERROR,
5883 message: "syntax error 2".to_string(),
5884 ..Default::default()
5885 },
5886 },
5887 ],
5888 None,
5889 cx,
5890 )
5891 .unwrap();
5892 });
5893
5894 // An empty range is extended forward to include the following character.
5895 // At the end of a line, an empty range is extended backward to include
5896 // the preceding character.
5897 buffer.read_with(cx, |buffer, _| {
5898 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5899 assert_eq!(
5900 chunks
5901 .iter()
5902 .map(|(s, d)| (s.as_str(), *d))
5903 .collect::<Vec<_>>(),
5904 &[
5905 ("let one = ", None),
5906 (";", Some(DiagnosticSeverity::ERROR)),
5907 ("\nlet two =", None),
5908 (" ", Some(DiagnosticSeverity::ERROR)),
5909 ("\nlet three = 3;\n", None)
5910 ]
5911 );
5912 });
5913 }
5914
5915 #[gpui::test]
5916 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5917 cx.foreground().forbid_parking();
5918
5919 let mut language = Language::new(
5920 LanguageConfig {
5921 name: "Rust".into(),
5922 path_suffixes: vec!["rs".to_string()],
5923 ..Default::default()
5924 },
5925 Some(tree_sitter_rust::language()),
5926 );
5927 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5928
5929 let text = "
5930 fn a() {
5931 f1();
5932 }
5933 fn b() {
5934 f2();
5935 }
5936 fn c() {
5937 f3();
5938 }
5939 "
5940 .unindent();
5941
5942 let fs = FakeFs::new(cx.background());
5943 fs.insert_tree(
5944 "/dir",
5945 json!({
5946 "a.rs": text.clone(),
5947 }),
5948 )
5949 .await;
5950
5951 let project = Project::test(fs, cx);
5952 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5953
5954 let worktree_id = project
5955 .update(cx, |project, cx| {
5956 project.find_or_create_local_worktree("/dir", true, cx)
5957 })
5958 .await
5959 .unwrap()
5960 .0
5961 .read_with(cx, |tree, _| tree.id());
5962
5963 let buffer = project
5964 .update(cx, |project, cx| {
5965 project.open_buffer((worktree_id, "a.rs"), cx)
5966 })
5967 .await
5968 .unwrap();
5969
5970 let mut fake_server = fake_servers.next().await.unwrap();
5971 let lsp_document_version = fake_server
5972 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5973 .await
5974 .text_document
5975 .version;
5976
5977 // Simulate editing the buffer after the language server computes some edits.
5978 buffer.update(cx, |buffer, cx| {
5979 buffer.edit(
5980 [Point::new(0, 0)..Point::new(0, 0)],
5981 "// above first function\n",
5982 cx,
5983 );
5984 buffer.edit(
5985 [Point::new(2, 0)..Point::new(2, 0)],
5986 " // inside first function\n",
5987 cx,
5988 );
5989 buffer.edit(
5990 [Point::new(6, 4)..Point::new(6, 4)],
5991 "// inside second function ",
5992 cx,
5993 );
5994
5995 assert_eq!(
5996 buffer.text(),
5997 "
5998 // above first function
5999 fn a() {
6000 // inside first function
6001 f1();
6002 }
6003 fn b() {
6004 // inside second function f2();
6005 }
6006 fn c() {
6007 f3();
6008 }
6009 "
6010 .unindent()
6011 );
6012 });
6013
6014 let edits = project
6015 .update(cx, |project, cx| {
6016 project.edits_from_lsp(
6017 &buffer,
6018 vec![
6019 // replace body of first function
6020 lsp::TextEdit {
6021 range: lsp::Range::new(
6022 lsp::Position::new(0, 0),
6023 lsp::Position::new(3, 0),
6024 ),
6025 new_text: "
6026 fn a() {
6027 f10();
6028 }
6029 "
6030 .unindent(),
6031 },
6032 // edit inside second function
6033 lsp::TextEdit {
6034 range: lsp::Range::new(
6035 lsp::Position::new(4, 6),
6036 lsp::Position::new(4, 6),
6037 ),
6038 new_text: "00".into(),
6039 },
6040 // edit inside third function via two distinct edits
6041 lsp::TextEdit {
6042 range: lsp::Range::new(
6043 lsp::Position::new(7, 5),
6044 lsp::Position::new(7, 5),
6045 ),
6046 new_text: "4000".into(),
6047 },
6048 lsp::TextEdit {
6049 range: lsp::Range::new(
6050 lsp::Position::new(7, 5),
6051 lsp::Position::new(7, 6),
6052 ),
6053 new_text: "".into(),
6054 },
6055 ],
6056 Some(lsp_document_version),
6057 cx,
6058 )
6059 })
6060 .await
6061 .unwrap();
6062
6063 buffer.update(cx, |buffer, cx| {
6064 for (range, new_text) in edits {
6065 buffer.edit([range], new_text, cx);
6066 }
6067 assert_eq!(
6068 buffer.text(),
6069 "
6070 // above first function
6071 fn a() {
6072 // inside first function
6073 f10();
6074 }
6075 fn b() {
6076 // inside second function f200();
6077 }
6078 fn c() {
6079 f4000();
6080 }
6081 "
6082 .unindent()
6083 );
6084 });
6085 }
6086
6087 #[gpui::test]
6088 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6089 cx.foreground().forbid_parking();
6090
6091 let text = "
6092 use a::b;
6093 use a::c;
6094
6095 fn f() {
6096 b();
6097 c();
6098 }
6099 "
6100 .unindent();
6101
6102 let fs = FakeFs::new(cx.background());
6103 fs.insert_tree(
6104 "/dir",
6105 json!({
6106 "a.rs": text.clone(),
6107 }),
6108 )
6109 .await;
6110
6111 let project = Project::test(fs, cx);
6112 let worktree_id = project
6113 .update(cx, |project, cx| {
6114 project.find_or_create_local_worktree("/dir", true, cx)
6115 })
6116 .await
6117 .unwrap()
6118 .0
6119 .read_with(cx, |tree, _| tree.id());
6120
6121 let buffer = project
6122 .update(cx, |project, cx| {
6123 project.open_buffer((worktree_id, "a.rs"), cx)
6124 })
6125 .await
6126 .unwrap();
6127
6128 // Simulate the language server sending us a small edit in the form of a very large diff.
6129 // Rust-analyzer does this when performing a merge-imports code action.
6130 let edits = project
6131 .update(cx, |project, cx| {
6132 project.edits_from_lsp(
6133 &buffer,
6134 [
6135 // Replace the first use statement without editing the semicolon.
6136 lsp::TextEdit {
6137 range: lsp::Range::new(
6138 lsp::Position::new(0, 4),
6139 lsp::Position::new(0, 8),
6140 ),
6141 new_text: "a::{b, c}".into(),
6142 },
6143 // Reinsert the remainder of the file between the semicolon and the final
6144 // newline of the file.
6145 lsp::TextEdit {
6146 range: lsp::Range::new(
6147 lsp::Position::new(0, 9),
6148 lsp::Position::new(0, 9),
6149 ),
6150 new_text: "\n\n".into(),
6151 },
6152 lsp::TextEdit {
6153 range: lsp::Range::new(
6154 lsp::Position::new(0, 9),
6155 lsp::Position::new(0, 9),
6156 ),
6157 new_text: "
6158 fn f() {
6159 b();
6160 c();
6161 }"
6162 .unindent(),
6163 },
6164 // Delete everything after the first newline of the file.
6165 lsp::TextEdit {
6166 range: lsp::Range::new(
6167 lsp::Position::new(1, 0),
6168 lsp::Position::new(7, 0),
6169 ),
6170 new_text: "".into(),
6171 },
6172 ],
6173 None,
6174 cx,
6175 )
6176 })
6177 .await
6178 .unwrap();
6179
6180 buffer.update(cx, |buffer, cx| {
6181 let edits = edits
6182 .into_iter()
6183 .map(|(range, text)| {
6184 (
6185 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6186 text,
6187 )
6188 })
6189 .collect::<Vec<_>>();
6190
6191 assert_eq!(
6192 edits,
6193 [
6194 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6195 (Point::new(1, 0)..Point::new(2, 0), "".into())
6196 ]
6197 );
6198
6199 for (range, new_text) in edits {
6200 buffer.edit([range], new_text, cx);
6201 }
6202 assert_eq!(
6203 buffer.text(),
6204 "
6205 use a::{b, c};
6206
6207 fn f() {
6208 b();
6209 c();
6210 }
6211 "
6212 .unindent()
6213 );
6214 });
6215 }
6216
6217 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6218 buffer: &Buffer,
6219 range: Range<T>,
6220 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6221 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6222 for chunk in buffer.snapshot().chunks(range, true) {
6223 if chunks.last().map_or(false, |prev_chunk| {
6224 prev_chunk.1 == chunk.diagnostic_severity
6225 }) {
6226 chunks.last_mut().unwrap().0.push_str(chunk.text);
6227 } else {
6228 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6229 }
6230 }
6231 chunks
6232 }
6233
6234 #[gpui::test]
6235 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6236 let dir = temp_tree(json!({
6237 "root": {
6238 "dir1": {},
6239 "dir2": {
6240 "dir3": {}
6241 }
6242 }
6243 }));
6244
6245 let project = Project::test(Arc::new(RealFs), cx);
6246 let (tree, _) = project
6247 .update(cx, |project, cx| {
6248 project.find_or_create_local_worktree(&dir.path(), true, cx)
6249 })
6250 .await
6251 .unwrap();
6252
6253 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6254 .await;
6255
6256 let cancel_flag = Default::default();
6257 let results = project
6258 .read_with(cx, |project, cx| {
6259 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6260 })
6261 .await;
6262
6263 assert!(results.is_empty());
6264 }
6265
6266 #[gpui::test]
6267 async fn test_definition(cx: &mut gpui::TestAppContext) {
6268 let mut language = Language::new(
6269 LanguageConfig {
6270 name: "Rust".into(),
6271 path_suffixes: vec!["rs".to_string()],
6272 ..Default::default()
6273 },
6274 Some(tree_sitter_rust::language()),
6275 );
6276 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6277
6278 let fs = FakeFs::new(cx.background());
6279 fs.insert_tree(
6280 "/dir",
6281 json!({
6282 "a.rs": "const fn a() { A }",
6283 "b.rs": "const y: i32 = crate::a()",
6284 }),
6285 )
6286 .await;
6287
6288 let project = Project::test(fs, cx);
6289 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6290
6291 let (tree, _) = project
6292 .update(cx, |project, cx| {
6293 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6294 })
6295 .await
6296 .unwrap();
6297 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6298 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6299 .await;
6300
6301 let buffer = project
6302 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6303 .await
6304 .unwrap();
6305
6306 let fake_server = fake_servers.next().await.unwrap();
6307 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6308 let params = params.text_document_position_params;
6309 assert_eq!(
6310 params.text_document.uri.to_file_path().unwrap(),
6311 Path::new("/dir/b.rs"),
6312 );
6313 assert_eq!(params.position, lsp::Position::new(0, 22));
6314
6315 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6316 lsp::Location::new(
6317 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6318 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6319 ),
6320 )))
6321 });
6322
6323 let mut definitions = project
6324 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6325 .await
6326 .unwrap();
6327
6328 assert_eq!(definitions.len(), 1);
6329 let definition = definitions.pop().unwrap();
6330 cx.update(|cx| {
6331 let target_buffer = definition.buffer.read(cx);
6332 assert_eq!(
6333 target_buffer
6334 .file()
6335 .unwrap()
6336 .as_local()
6337 .unwrap()
6338 .abs_path(cx),
6339 Path::new("/dir/a.rs"),
6340 );
6341 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6342 assert_eq!(
6343 list_worktrees(&project, cx),
6344 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6345 );
6346
6347 drop(definition);
6348 });
6349 cx.read(|cx| {
6350 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6351 });
6352
6353 fn list_worktrees<'a>(
6354 project: &'a ModelHandle<Project>,
6355 cx: &'a AppContext,
6356 ) -> Vec<(&'a Path, bool)> {
6357 project
6358 .read(cx)
6359 .worktrees(cx)
6360 .map(|worktree| {
6361 let worktree = worktree.read(cx);
6362 (
6363 worktree.as_local().unwrap().abs_path().as_ref(),
6364 worktree.is_visible(),
6365 )
6366 })
6367 .collect::<Vec<_>>()
6368 }
6369 }
6370
6371 #[gpui::test(iterations = 10)]
6372 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6373 let mut language = Language::new(
6374 LanguageConfig {
6375 name: "TypeScript".into(),
6376 path_suffixes: vec!["ts".to_string()],
6377 ..Default::default()
6378 },
6379 None,
6380 );
6381 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6382
6383 let fs = FakeFs::new(cx.background());
6384 fs.insert_tree(
6385 "/dir",
6386 json!({
6387 "a.ts": "a",
6388 }),
6389 )
6390 .await;
6391
6392 let project = Project::test(fs, cx);
6393 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6394
6395 let (tree, _) = project
6396 .update(cx, |project, cx| {
6397 project.find_or_create_local_worktree("/dir", true, cx)
6398 })
6399 .await
6400 .unwrap();
6401 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6402 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6403 .await;
6404
6405 let buffer = project
6406 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6407 .await
6408 .unwrap();
6409
6410 let fake_server = fake_language_servers.next().await.unwrap();
6411
6412 // Language server returns code actions that contain commands, and not edits.
6413 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6414 fake_server
6415 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6416 Ok(Some(vec![
6417 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6418 title: "The code action".into(),
6419 command: Some(lsp::Command {
6420 title: "The command".into(),
6421 command: "_the/command".into(),
6422 arguments: Some(vec![json!("the-argument")]),
6423 }),
6424 ..Default::default()
6425 }),
6426 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6427 title: "two".into(),
6428 ..Default::default()
6429 }),
6430 ]))
6431 })
6432 .next()
6433 .await;
6434
6435 let action = actions.await.unwrap()[0].clone();
6436 let apply = project.update(cx, |project, cx| {
6437 project.apply_code_action(buffer.clone(), action, true, cx)
6438 });
6439
6440 // Resolving the code action does not populate its edits. In absence of
6441 // edits, we must execute the given command.
6442 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6443 |action, _| async move { Ok(action) },
6444 );
6445
6446 // While executing the command, the language server sends the editor
6447 // a `workspaceEdit` request.
6448 fake_server
6449 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6450 let fake = fake_server.clone();
6451 move |params, _| {
6452 assert_eq!(params.command, "_the/command");
6453 let fake = fake.clone();
6454 async move {
6455 fake.server
6456 .request::<lsp::request::ApplyWorkspaceEdit>(
6457 lsp::ApplyWorkspaceEditParams {
6458 label: None,
6459 edit: lsp::WorkspaceEdit {
6460 changes: Some(
6461 [(
6462 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6463 vec![lsp::TextEdit {
6464 range: lsp::Range::new(
6465 lsp::Position::new(0, 0),
6466 lsp::Position::new(0, 0),
6467 ),
6468 new_text: "X".into(),
6469 }],
6470 )]
6471 .into_iter()
6472 .collect(),
6473 ),
6474 ..Default::default()
6475 },
6476 },
6477 )
6478 .await
6479 .unwrap();
6480 Ok(Some(json!(null)))
6481 }
6482 }
6483 })
6484 .next()
6485 .await;
6486
6487 // Applying the code action returns a project transaction containing the edits
6488 // sent by the language server in its `workspaceEdit` request.
6489 let transaction = apply.await.unwrap();
6490 assert!(transaction.0.contains_key(&buffer));
6491 buffer.update(cx, |buffer, cx| {
6492 assert_eq!(buffer.text(), "Xa");
6493 buffer.undo(cx);
6494 assert_eq!(buffer.text(), "a");
6495 });
6496 }
6497
6498 #[gpui::test]
6499 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6500 let fs = FakeFs::new(cx.background());
6501 fs.insert_tree(
6502 "/dir",
6503 json!({
6504 "file1": "the old contents",
6505 }),
6506 )
6507 .await;
6508
6509 let project = Project::test(fs.clone(), cx);
6510 let worktree_id = project
6511 .update(cx, |p, cx| {
6512 p.find_or_create_local_worktree("/dir", true, cx)
6513 })
6514 .await
6515 .unwrap()
6516 .0
6517 .read_with(cx, |tree, _| tree.id());
6518
6519 let buffer = project
6520 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6521 .await
6522 .unwrap();
6523 buffer
6524 .update(cx, |buffer, cx| {
6525 assert_eq!(buffer.text(), "the old contents");
6526 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6527 buffer.save(cx)
6528 })
6529 .await
6530 .unwrap();
6531
6532 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6533 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6534 }
6535
6536 #[gpui::test]
6537 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6538 let fs = FakeFs::new(cx.background());
6539 fs.insert_tree(
6540 "/dir",
6541 json!({
6542 "file1": "the old contents",
6543 }),
6544 )
6545 .await;
6546
6547 let project = Project::test(fs.clone(), cx);
6548 let worktree_id = project
6549 .update(cx, |p, cx| {
6550 p.find_or_create_local_worktree("/dir/file1", true, cx)
6551 })
6552 .await
6553 .unwrap()
6554 .0
6555 .read_with(cx, |tree, _| tree.id());
6556
6557 let buffer = project
6558 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6559 .await
6560 .unwrap();
6561 buffer
6562 .update(cx, |buffer, cx| {
6563 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6564 buffer.save(cx)
6565 })
6566 .await
6567 .unwrap();
6568
6569 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6570 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6571 }
6572
6573 #[gpui::test]
6574 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6575 let fs = FakeFs::new(cx.background());
6576 fs.insert_tree("/dir", json!({})).await;
6577
6578 let project = Project::test(fs.clone(), cx);
6579 let (worktree, _) = project
6580 .update(cx, |project, cx| {
6581 project.find_or_create_local_worktree("/dir", true, cx)
6582 })
6583 .await
6584 .unwrap();
6585 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6586
6587 let buffer = project.update(cx, |project, cx| {
6588 project.create_buffer("", None, cx).unwrap()
6589 });
6590 buffer.update(cx, |buffer, cx| {
6591 buffer.edit([0..0], "abc", cx);
6592 assert!(buffer.is_dirty());
6593 assert!(!buffer.has_conflict());
6594 });
6595 project
6596 .update(cx, |project, cx| {
6597 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6598 })
6599 .await
6600 .unwrap();
6601 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6602 buffer.read_with(cx, |buffer, cx| {
6603 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6604 assert!(!buffer.is_dirty());
6605 assert!(!buffer.has_conflict());
6606 });
6607
6608 let opened_buffer = project
6609 .update(cx, |project, cx| {
6610 project.open_buffer((worktree_id, "file1"), cx)
6611 })
6612 .await
6613 .unwrap();
6614 assert_eq!(opened_buffer, buffer);
6615 }
6616
6617 #[gpui::test(retries = 5)]
6618 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6619 let dir = temp_tree(json!({
6620 "a": {
6621 "file1": "",
6622 "file2": "",
6623 "file3": "",
6624 },
6625 "b": {
6626 "c": {
6627 "file4": "",
6628 "file5": "",
6629 }
6630 }
6631 }));
6632
6633 let project = Project::test(Arc::new(RealFs), cx);
6634 let rpc = project.read_with(cx, |p, _| p.client.clone());
6635
6636 let (tree, _) = project
6637 .update(cx, |p, cx| {
6638 p.find_or_create_local_worktree(dir.path(), true, cx)
6639 })
6640 .await
6641 .unwrap();
6642 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6643
6644 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6645 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6646 async move { buffer.await.unwrap() }
6647 };
6648 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6649 tree.read_with(cx, |tree, _| {
6650 tree.entry_for_path(path)
6651 .expect(&format!("no entry for path {}", path))
6652 .id
6653 })
6654 };
6655
6656 let buffer2 = buffer_for_path("a/file2", cx).await;
6657 let buffer3 = buffer_for_path("a/file3", cx).await;
6658 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6659 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6660
6661 let file2_id = id_for_path("a/file2", &cx);
6662 let file3_id = id_for_path("a/file3", &cx);
6663 let file4_id = id_for_path("b/c/file4", &cx);
6664
6665 // Wait for the initial scan.
6666 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6667 .await;
6668
6669 // Create a remote copy of this worktree.
6670 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6671 let (remote, load_task) = cx.update(|cx| {
6672 Worktree::remote(
6673 1,
6674 1,
6675 initial_snapshot.to_proto(&Default::default(), true),
6676 rpc.clone(),
6677 cx,
6678 )
6679 });
6680 load_task.await;
6681
6682 cx.read(|cx| {
6683 assert!(!buffer2.read(cx).is_dirty());
6684 assert!(!buffer3.read(cx).is_dirty());
6685 assert!(!buffer4.read(cx).is_dirty());
6686 assert!(!buffer5.read(cx).is_dirty());
6687 });
6688
6689 // Rename and delete files and directories.
6690 tree.flush_fs_events(&cx).await;
6691 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6692 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6693 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6694 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6695 tree.flush_fs_events(&cx).await;
6696
6697 let expected_paths = vec![
6698 "a",
6699 "a/file1",
6700 "a/file2.new",
6701 "b",
6702 "d",
6703 "d/file3",
6704 "d/file4",
6705 ];
6706
6707 cx.read(|app| {
6708 assert_eq!(
6709 tree.read(app)
6710 .paths()
6711 .map(|p| p.to_str().unwrap())
6712 .collect::<Vec<_>>(),
6713 expected_paths
6714 );
6715
6716 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6717 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6718 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6719
6720 assert_eq!(
6721 buffer2.read(app).file().unwrap().path().as_ref(),
6722 Path::new("a/file2.new")
6723 );
6724 assert_eq!(
6725 buffer3.read(app).file().unwrap().path().as_ref(),
6726 Path::new("d/file3")
6727 );
6728 assert_eq!(
6729 buffer4.read(app).file().unwrap().path().as_ref(),
6730 Path::new("d/file4")
6731 );
6732 assert_eq!(
6733 buffer5.read(app).file().unwrap().path().as_ref(),
6734 Path::new("b/c/file5")
6735 );
6736
6737 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6738 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6739 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6740 assert!(buffer5.read(app).file().unwrap().is_deleted());
6741 });
6742
6743 // Update the remote worktree. Check that it becomes consistent with the
6744 // local worktree.
6745 remote.update(cx, |remote, cx| {
6746 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6747 &initial_snapshot,
6748 1,
6749 1,
6750 true,
6751 );
6752 remote
6753 .as_remote_mut()
6754 .unwrap()
6755 .snapshot
6756 .apply_remote_update(update_message)
6757 .unwrap();
6758
6759 assert_eq!(
6760 remote
6761 .paths()
6762 .map(|p| p.to_str().unwrap())
6763 .collect::<Vec<_>>(),
6764 expected_paths
6765 );
6766 });
6767 }
6768
6769 #[gpui::test]
6770 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6771 let fs = FakeFs::new(cx.background());
6772 fs.insert_tree(
6773 "/the-dir",
6774 json!({
6775 "a.txt": "a-contents",
6776 "b.txt": "b-contents",
6777 }),
6778 )
6779 .await;
6780
6781 let project = Project::test(fs.clone(), cx);
6782 let worktree_id = project
6783 .update(cx, |p, cx| {
6784 p.find_or_create_local_worktree("/the-dir", true, cx)
6785 })
6786 .await
6787 .unwrap()
6788 .0
6789 .read_with(cx, |tree, _| tree.id());
6790
6791 // Spawn multiple tasks to open paths, repeating some paths.
6792 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6793 (
6794 p.open_buffer((worktree_id, "a.txt"), cx),
6795 p.open_buffer((worktree_id, "b.txt"), cx),
6796 p.open_buffer((worktree_id, "a.txt"), cx),
6797 )
6798 });
6799
6800 let buffer_a_1 = buffer_a_1.await.unwrap();
6801 let buffer_a_2 = buffer_a_2.await.unwrap();
6802 let buffer_b = buffer_b.await.unwrap();
6803 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6804 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6805
6806 // There is only one buffer per path.
6807 let buffer_a_id = buffer_a_1.id();
6808 assert_eq!(buffer_a_2.id(), buffer_a_id);
6809
6810 // Open the same path again while it is still open.
6811 drop(buffer_a_1);
6812 let buffer_a_3 = project
6813 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6814 .await
6815 .unwrap();
6816
6817 // There's still only one buffer per path.
6818 assert_eq!(buffer_a_3.id(), buffer_a_id);
6819 }
6820
6821 #[gpui::test]
6822 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6823 use std::fs;
6824
6825 let dir = temp_tree(json!({
6826 "file1": "abc",
6827 "file2": "def",
6828 "file3": "ghi",
6829 }));
6830
6831 let project = Project::test(Arc::new(RealFs), cx);
6832 let (worktree, _) = project
6833 .update(cx, |p, cx| {
6834 p.find_or_create_local_worktree(dir.path(), true, cx)
6835 })
6836 .await
6837 .unwrap();
6838 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6839
6840 worktree.flush_fs_events(&cx).await;
6841 worktree
6842 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6843 .await;
6844
6845 let buffer1 = project
6846 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6847 .await
6848 .unwrap();
6849 let events = Rc::new(RefCell::new(Vec::new()));
6850
6851 // initially, the buffer isn't dirty.
6852 buffer1.update(cx, |buffer, cx| {
6853 cx.subscribe(&buffer1, {
6854 let events = events.clone();
6855 move |_, _, event, _| match event {
6856 BufferEvent::Operation(_) => {}
6857 _ => events.borrow_mut().push(event.clone()),
6858 }
6859 })
6860 .detach();
6861
6862 assert!(!buffer.is_dirty());
6863 assert!(events.borrow().is_empty());
6864
6865 buffer.edit(vec![1..2], "", cx);
6866 });
6867
6868 // after the first edit, the buffer is dirty, and emits a dirtied event.
6869 buffer1.update(cx, |buffer, cx| {
6870 assert!(buffer.text() == "ac");
6871 assert!(buffer.is_dirty());
6872 assert_eq!(
6873 *events.borrow(),
6874 &[language::Event::Edited, language::Event::Dirtied]
6875 );
6876 events.borrow_mut().clear();
6877 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6878 });
6879
6880 // after saving, the buffer is not dirty, and emits a saved event.
6881 buffer1.update(cx, |buffer, cx| {
6882 assert!(!buffer.is_dirty());
6883 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6884 events.borrow_mut().clear();
6885
6886 buffer.edit(vec![1..1], "B", cx);
6887 buffer.edit(vec![2..2], "D", cx);
6888 });
6889
6890 // after editing again, the buffer is dirty, and emits another dirty event.
6891 buffer1.update(cx, |buffer, cx| {
6892 assert!(buffer.text() == "aBDc");
6893 assert!(buffer.is_dirty());
6894 assert_eq!(
6895 *events.borrow(),
6896 &[
6897 language::Event::Edited,
6898 language::Event::Dirtied,
6899 language::Event::Edited,
6900 ],
6901 );
6902 events.borrow_mut().clear();
6903
6904 // TODO - currently, after restoring the buffer to its
6905 // previously-saved state, the is still considered dirty.
6906 buffer.edit([1..3], "", cx);
6907 assert!(buffer.text() == "ac");
6908 assert!(buffer.is_dirty());
6909 });
6910
6911 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6912
6913 // When a file is deleted, the buffer is considered dirty.
6914 let events = Rc::new(RefCell::new(Vec::new()));
6915 let buffer2 = project
6916 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6917 .await
6918 .unwrap();
6919 buffer2.update(cx, |_, cx| {
6920 cx.subscribe(&buffer2, {
6921 let events = events.clone();
6922 move |_, _, event, _| events.borrow_mut().push(event.clone())
6923 })
6924 .detach();
6925 });
6926
6927 fs::remove_file(dir.path().join("file2")).unwrap();
6928 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6929 assert_eq!(
6930 *events.borrow(),
6931 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6932 );
6933
6934 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6935 let events = Rc::new(RefCell::new(Vec::new()));
6936 let buffer3 = project
6937 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6938 .await
6939 .unwrap();
6940 buffer3.update(cx, |_, cx| {
6941 cx.subscribe(&buffer3, {
6942 let events = events.clone();
6943 move |_, _, event, _| events.borrow_mut().push(event.clone())
6944 })
6945 .detach();
6946 });
6947
6948 worktree.flush_fs_events(&cx).await;
6949 buffer3.update(cx, |buffer, cx| {
6950 buffer.edit(Some(0..0), "x", cx);
6951 });
6952 events.borrow_mut().clear();
6953 fs::remove_file(dir.path().join("file3")).unwrap();
6954 buffer3
6955 .condition(&cx, |_, _| !events.borrow().is_empty())
6956 .await;
6957 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6958 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6959 }
6960
6961 #[gpui::test]
6962 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6963 use std::fs;
6964
6965 let initial_contents = "aaa\nbbbbb\nc\n";
6966 let dir = temp_tree(json!({ "the-file": initial_contents }));
6967
6968 let project = Project::test(Arc::new(RealFs), cx);
6969 let (worktree, _) = project
6970 .update(cx, |p, cx| {
6971 p.find_or_create_local_worktree(dir.path(), true, cx)
6972 })
6973 .await
6974 .unwrap();
6975 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6976
6977 worktree
6978 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6979 .await;
6980
6981 let abs_path = dir.path().join("the-file");
6982 let buffer = project
6983 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6984 .await
6985 .unwrap();
6986
6987 // TODO
6988 // Add a cursor on each row.
6989 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6990 // assert!(!buffer.is_dirty());
6991 // buffer.add_selection_set(
6992 // &(0..3)
6993 // .map(|row| Selection {
6994 // id: row as usize,
6995 // start: Point::new(row, 1),
6996 // end: Point::new(row, 1),
6997 // reversed: false,
6998 // goal: SelectionGoal::None,
6999 // })
7000 // .collect::<Vec<_>>(),
7001 // cx,
7002 // )
7003 // });
7004
7005 // Change the file on disk, adding two new lines of text, and removing
7006 // one line.
7007 buffer.read_with(cx, |buffer, _| {
7008 assert!(!buffer.is_dirty());
7009 assert!(!buffer.has_conflict());
7010 });
7011 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7012 fs::write(&abs_path, new_contents).unwrap();
7013
7014 // Because the buffer was not modified, it is reloaded from disk. Its
7015 // contents are edited according to the diff between the old and new
7016 // file contents.
7017 buffer
7018 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7019 .await;
7020
7021 buffer.update(cx, |buffer, _| {
7022 assert_eq!(buffer.text(), new_contents);
7023 assert!(!buffer.is_dirty());
7024 assert!(!buffer.has_conflict());
7025
7026 // TODO
7027 // let cursor_positions = buffer
7028 // .selection_set(selection_set_id)
7029 // .unwrap()
7030 // .selections::<Point>(&*buffer)
7031 // .map(|selection| {
7032 // assert_eq!(selection.start, selection.end);
7033 // selection.start
7034 // })
7035 // .collect::<Vec<_>>();
7036 // assert_eq!(
7037 // cursor_positions,
7038 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7039 // );
7040 });
7041
7042 // Modify the buffer
7043 buffer.update(cx, |buffer, cx| {
7044 buffer.edit(vec![0..0], " ", cx);
7045 assert!(buffer.is_dirty());
7046 assert!(!buffer.has_conflict());
7047 });
7048
7049 // Change the file on disk again, adding blank lines to the beginning.
7050 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
7051
7052 // Because the buffer is modified, it doesn't reload from disk, but is
7053 // marked as having a conflict.
7054 buffer
7055 .condition(&cx, |buffer, _| buffer.has_conflict())
7056 .await;
7057 }
7058
7059 #[gpui::test]
7060 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7061 cx.foreground().forbid_parking();
7062
7063 let fs = FakeFs::new(cx.background());
7064 fs.insert_tree(
7065 "/the-dir",
7066 json!({
7067 "a.rs": "
7068 fn foo(mut v: Vec<usize>) {
7069 for x in &v {
7070 v.push(1);
7071 }
7072 }
7073 "
7074 .unindent(),
7075 }),
7076 )
7077 .await;
7078
7079 let project = Project::test(fs.clone(), cx);
7080 let (worktree, _) = project
7081 .update(cx, |p, cx| {
7082 p.find_or_create_local_worktree("/the-dir", true, cx)
7083 })
7084 .await
7085 .unwrap();
7086 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7087
7088 let buffer = project
7089 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7090 .await
7091 .unwrap();
7092
7093 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7094 let message = lsp::PublishDiagnosticsParams {
7095 uri: buffer_uri.clone(),
7096 diagnostics: vec![
7097 lsp::Diagnostic {
7098 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7099 severity: Some(DiagnosticSeverity::WARNING),
7100 message: "error 1".to_string(),
7101 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7102 location: lsp::Location {
7103 uri: buffer_uri.clone(),
7104 range: lsp::Range::new(
7105 lsp::Position::new(1, 8),
7106 lsp::Position::new(1, 9),
7107 ),
7108 },
7109 message: "error 1 hint 1".to_string(),
7110 }]),
7111 ..Default::default()
7112 },
7113 lsp::Diagnostic {
7114 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7115 severity: Some(DiagnosticSeverity::HINT),
7116 message: "error 1 hint 1".to_string(),
7117 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7118 location: lsp::Location {
7119 uri: buffer_uri.clone(),
7120 range: lsp::Range::new(
7121 lsp::Position::new(1, 8),
7122 lsp::Position::new(1, 9),
7123 ),
7124 },
7125 message: "original diagnostic".to_string(),
7126 }]),
7127 ..Default::default()
7128 },
7129 lsp::Diagnostic {
7130 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7131 severity: Some(DiagnosticSeverity::ERROR),
7132 message: "error 2".to_string(),
7133 related_information: Some(vec![
7134 lsp::DiagnosticRelatedInformation {
7135 location: lsp::Location {
7136 uri: buffer_uri.clone(),
7137 range: lsp::Range::new(
7138 lsp::Position::new(1, 13),
7139 lsp::Position::new(1, 15),
7140 ),
7141 },
7142 message: "error 2 hint 1".to_string(),
7143 },
7144 lsp::DiagnosticRelatedInformation {
7145 location: lsp::Location {
7146 uri: buffer_uri.clone(),
7147 range: lsp::Range::new(
7148 lsp::Position::new(1, 13),
7149 lsp::Position::new(1, 15),
7150 ),
7151 },
7152 message: "error 2 hint 2".to_string(),
7153 },
7154 ]),
7155 ..Default::default()
7156 },
7157 lsp::Diagnostic {
7158 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7159 severity: Some(DiagnosticSeverity::HINT),
7160 message: "error 2 hint 1".to_string(),
7161 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7162 location: lsp::Location {
7163 uri: buffer_uri.clone(),
7164 range: lsp::Range::new(
7165 lsp::Position::new(2, 8),
7166 lsp::Position::new(2, 17),
7167 ),
7168 },
7169 message: "original diagnostic".to_string(),
7170 }]),
7171 ..Default::default()
7172 },
7173 lsp::Diagnostic {
7174 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7175 severity: Some(DiagnosticSeverity::HINT),
7176 message: "error 2 hint 2".to_string(),
7177 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7178 location: lsp::Location {
7179 uri: buffer_uri.clone(),
7180 range: lsp::Range::new(
7181 lsp::Position::new(2, 8),
7182 lsp::Position::new(2, 17),
7183 ),
7184 },
7185 message: "original diagnostic".to_string(),
7186 }]),
7187 ..Default::default()
7188 },
7189 ],
7190 version: None,
7191 };
7192
7193 project
7194 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7195 .unwrap();
7196 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7197
7198 assert_eq!(
7199 buffer
7200 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7201 .collect::<Vec<_>>(),
7202 &[
7203 DiagnosticEntry {
7204 range: Point::new(1, 8)..Point::new(1, 9),
7205 diagnostic: Diagnostic {
7206 severity: DiagnosticSeverity::WARNING,
7207 message: "error 1".to_string(),
7208 group_id: 0,
7209 is_primary: true,
7210 ..Default::default()
7211 }
7212 },
7213 DiagnosticEntry {
7214 range: Point::new(1, 8)..Point::new(1, 9),
7215 diagnostic: Diagnostic {
7216 severity: DiagnosticSeverity::HINT,
7217 message: "error 1 hint 1".to_string(),
7218 group_id: 0,
7219 is_primary: false,
7220 ..Default::default()
7221 }
7222 },
7223 DiagnosticEntry {
7224 range: Point::new(1, 13)..Point::new(1, 15),
7225 diagnostic: Diagnostic {
7226 severity: DiagnosticSeverity::HINT,
7227 message: "error 2 hint 1".to_string(),
7228 group_id: 1,
7229 is_primary: false,
7230 ..Default::default()
7231 }
7232 },
7233 DiagnosticEntry {
7234 range: Point::new(1, 13)..Point::new(1, 15),
7235 diagnostic: Diagnostic {
7236 severity: DiagnosticSeverity::HINT,
7237 message: "error 2 hint 2".to_string(),
7238 group_id: 1,
7239 is_primary: false,
7240 ..Default::default()
7241 }
7242 },
7243 DiagnosticEntry {
7244 range: Point::new(2, 8)..Point::new(2, 17),
7245 diagnostic: Diagnostic {
7246 severity: DiagnosticSeverity::ERROR,
7247 message: "error 2".to_string(),
7248 group_id: 1,
7249 is_primary: true,
7250 ..Default::default()
7251 }
7252 }
7253 ]
7254 );
7255
7256 assert_eq!(
7257 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7258 &[
7259 DiagnosticEntry {
7260 range: Point::new(1, 8)..Point::new(1, 9),
7261 diagnostic: Diagnostic {
7262 severity: DiagnosticSeverity::WARNING,
7263 message: "error 1".to_string(),
7264 group_id: 0,
7265 is_primary: true,
7266 ..Default::default()
7267 }
7268 },
7269 DiagnosticEntry {
7270 range: Point::new(1, 8)..Point::new(1, 9),
7271 diagnostic: Diagnostic {
7272 severity: DiagnosticSeverity::HINT,
7273 message: "error 1 hint 1".to_string(),
7274 group_id: 0,
7275 is_primary: false,
7276 ..Default::default()
7277 }
7278 },
7279 ]
7280 );
7281 assert_eq!(
7282 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7283 &[
7284 DiagnosticEntry {
7285 range: Point::new(1, 13)..Point::new(1, 15),
7286 diagnostic: Diagnostic {
7287 severity: DiagnosticSeverity::HINT,
7288 message: "error 2 hint 1".to_string(),
7289 group_id: 1,
7290 is_primary: false,
7291 ..Default::default()
7292 }
7293 },
7294 DiagnosticEntry {
7295 range: Point::new(1, 13)..Point::new(1, 15),
7296 diagnostic: Diagnostic {
7297 severity: DiagnosticSeverity::HINT,
7298 message: "error 2 hint 2".to_string(),
7299 group_id: 1,
7300 is_primary: false,
7301 ..Default::default()
7302 }
7303 },
7304 DiagnosticEntry {
7305 range: Point::new(2, 8)..Point::new(2, 17),
7306 diagnostic: Diagnostic {
7307 severity: DiagnosticSeverity::ERROR,
7308 message: "error 2".to_string(),
7309 group_id: 1,
7310 is_primary: true,
7311 ..Default::default()
7312 }
7313 }
7314 ]
7315 );
7316 }
7317
7318 #[gpui::test]
7319 async fn test_rename(cx: &mut gpui::TestAppContext) {
7320 cx.foreground().forbid_parking();
7321
7322 let mut language = Language::new(
7323 LanguageConfig {
7324 name: "Rust".into(),
7325 path_suffixes: vec!["rs".to_string()],
7326 ..Default::default()
7327 },
7328 Some(tree_sitter_rust::language()),
7329 );
7330 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7331
7332 let fs = FakeFs::new(cx.background());
7333 fs.insert_tree(
7334 "/dir",
7335 json!({
7336 "one.rs": "const ONE: usize = 1;",
7337 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7338 }),
7339 )
7340 .await;
7341
7342 let project = Project::test(fs.clone(), cx);
7343 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7344
7345 let (tree, _) = project
7346 .update(cx, |project, cx| {
7347 project.find_or_create_local_worktree("/dir", true, cx)
7348 })
7349 .await
7350 .unwrap();
7351 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7352 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7353 .await;
7354
7355 let buffer = project
7356 .update(cx, |project, cx| {
7357 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7358 })
7359 .await
7360 .unwrap();
7361
7362 let fake_server = fake_servers.next().await.unwrap();
7363
7364 let response = project.update(cx, |project, cx| {
7365 project.prepare_rename(buffer.clone(), 7, cx)
7366 });
7367 fake_server
7368 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7369 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7370 assert_eq!(params.position, lsp::Position::new(0, 7));
7371 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7372 lsp::Position::new(0, 6),
7373 lsp::Position::new(0, 9),
7374 ))))
7375 })
7376 .next()
7377 .await
7378 .unwrap();
7379 let range = response.await.unwrap().unwrap();
7380 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7381 assert_eq!(range, 6..9);
7382
7383 let response = project.update(cx, |project, cx| {
7384 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7385 });
7386 fake_server
7387 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7388 assert_eq!(
7389 params.text_document_position.text_document.uri.as_str(),
7390 "file:///dir/one.rs"
7391 );
7392 assert_eq!(
7393 params.text_document_position.position,
7394 lsp::Position::new(0, 7)
7395 );
7396 assert_eq!(params.new_name, "THREE");
7397 Ok(Some(lsp::WorkspaceEdit {
7398 changes: Some(
7399 [
7400 (
7401 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7402 vec![lsp::TextEdit::new(
7403 lsp::Range::new(
7404 lsp::Position::new(0, 6),
7405 lsp::Position::new(0, 9),
7406 ),
7407 "THREE".to_string(),
7408 )],
7409 ),
7410 (
7411 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7412 vec![
7413 lsp::TextEdit::new(
7414 lsp::Range::new(
7415 lsp::Position::new(0, 24),
7416 lsp::Position::new(0, 27),
7417 ),
7418 "THREE".to_string(),
7419 ),
7420 lsp::TextEdit::new(
7421 lsp::Range::new(
7422 lsp::Position::new(0, 35),
7423 lsp::Position::new(0, 38),
7424 ),
7425 "THREE".to_string(),
7426 ),
7427 ],
7428 ),
7429 ]
7430 .into_iter()
7431 .collect(),
7432 ),
7433 ..Default::default()
7434 }))
7435 })
7436 .next()
7437 .await
7438 .unwrap();
7439 let mut transaction = response.await.unwrap().0;
7440 assert_eq!(transaction.len(), 2);
7441 assert_eq!(
7442 transaction
7443 .remove_entry(&buffer)
7444 .unwrap()
7445 .0
7446 .read_with(cx, |buffer, _| buffer.text()),
7447 "const THREE: usize = 1;"
7448 );
7449 assert_eq!(
7450 transaction
7451 .into_keys()
7452 .next()
7453 .unwrap()
7454 .read_with(cx, |buffer, _| buffer.text()),
7455 "const TWO: usize = one::THREE + one::THREE;"
7456 );
7457 }
7458
7459 #[gpui::test]
7460 async fn test_search(cx: &mut gpui::TestAppContext) {
7461 let fs = FakeFs::new(cx.background());
7462 fs.insert_tree(
7463 "/dir",
7464 json!({
7465 "one.rs": "const ONE: usize = 1;",
7466 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7467 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7468 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7469 }),
7470 )
7471 .await;
7472 let project = Project::test(fs.clone(), cx);
7473 let (tree, _) = project
7474 .update(cx, |project, cx| {
7475 project.find_or_create_local_worktree("/dir", true, cx)
7476 })
7477 .await
7478 .unwrap();
7479 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7480 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7481 .await;
7482
7483 assert_eq!(
7484 search(&project, SearchQuery::text("TWO", false, true), cx)
7485 .await
7486 .unwrap(),
7487 HashMap::from_iter([
7488 ("two.rs".to_string(), vec![6..9]),
7489 ("three.rs".to_string(), vec![37..40])
7490 ])
7491 );
7492
7493 let buffer_4 = project
7494 .update(cx, |project, cx| {
7495 project.open_buffer((worktree_id, "four.rs"), cx)
7496 })
7497 .await
7498 .unwrap();
7499 buffer_4.update(cx, |buffer, cx| {
7500 buffer.edit([20..28, 31..43], "two::TWO", cx);
7501 });
7502
7503 assert_eq!(
7504 search(&project, SearchQuery::text("TWO", false, true), cx)
7505 .await
7506 .unwrap(),
7507 HashMap::from_iter([
7508 ("two.rs".to_string(), vec![6..9]),
7509 ("three.rs".to_string(), vec![37..40]),
7510 ("four.rs".to_string(), vec![25..28, 36..39])
7511 ])
7512 );
7513
7514 async fn search(
7515 project: &ModelHandle<Project>,
7516 query: SearchQuery,
7517 cx: &mut gpui::TestAppContext,
7518 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7519 let results = project
7520 .update(cx, |project, cx| project.search(query, cx))
7521 .await?;
7522
7523 Ok(results
7524 .into_iter()
7525 .map(|(buffer, ranges)| {
7526 buffer.read_with(cx, |buffer, _| {
7527 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7528 let ranges = ranges
7529 .into_iter()
7530 .map(|range| range.to_offset(buffer))
7531 .collect::<Vec<_>>();
7532 (path, ranges)
7533 })
7534 })
7535 .collect())
7536 }
7537 }
7538}