1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use settings::Settings;
32use sha2::{Digest, Sha256};
33use similar::{ChangeTag, TextDiff};
34use std::{
35 cell::RefCell,
36 cmp::{self, Ordering},
37 convert::TryInto,
38 hash::Hash,
39 mem,
40 ops::Range,
41 path::{Component, Path, PathBuf},
42 rc::Rc,
43 sync::{
44 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
45 Arc,
46 },
47 time::Instant,
48};
49use util::{post_inc, ResultExt, TryFutureExt as _};
50
51pub use fs::*;
52pub use worktree::*;
53
54pub trait Item: Entity {
55 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
56}
57
58pub struct Project {
59 worktrees: Vec<WorktreeHandle>,
60 active_entry: Option<ProjectEntryId>,
61 languages: Arc<LanguageRegistry>,
62 language_servers:
63 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
64 started_language_servers:
65 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
66 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
67 language_server_settings: Arc<Mutex<serde_json::Value>>,
68 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
69 next_language_server_id: usize,
70 client: Arc<client::Client>,
71 next_entry_id: Arc<AtomicUsize>,
72 user_store: ModelHandle<UserStore>,
73 fs: Arc<dyn Fs>,
74 client_state: ProjectClientState,
75 collaborators: HashMap<PeerId, Collaborator>,
76 subscriptions: Vec<client::Subscription>,
77 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
78 shared_buffers: HashMap<PeerId, HashSet<u64>>,
79 loading_buffers: HashMap<
80 ProjectPath,
81 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
82 >,
83 loading_local_worktrees:
84 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
85 opened_buffers: HashMap<u64, OpenBuffer>,
86 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
87 nonce: u128,
88}
89
90enum OpenBuffer {
91 Strong(ModelHandle<Buffer>),
92 Weak(WeakModelHandle<Buffer>),
93 Loading(Vec<Operation>),
94}
95
96enum WorktreeHandle {
97 Strong(ModelHandle<Worktree>),
98 Weak(WeakModelHandle<Worktree>),
99}
100
101enum ProjectClientState {
102 Local {
103 is_shared: bool,
104 remote_id_tx: watch::Sender<Option<u64>>,
105 remote_id_rx: watch::Receiver<Option<u64>>,
106 _maintain_remote_id_task: Task<Option<()>>,
107 },
108 Remote {
109 sharing_has_stopped: bool,
110 remote_id: u64,
111 replica_id: ReplicaId,
112 _detect_unshare_task: Task<Option<()>>,
113 },
114}
115
116#[derive(Clone, Debug)]
117pub struct Collaborator {
118 pub user: Arc<User>,
119 pub peer_id: PeerId,
120 pub replica_id: ReplicaId,
121}
122
123#[derive(Clone, Debug, PartialEq)]
124pub enum Event {
125 ActiveEntryChanged(Option<ProjectEntryId>),
126 WorktreeRemoved(WorktreeId),
127 DiskBasedDiagnosticsStarted,
128 DiskBasedDiagnosticsUpdated,
129 DiskBasedDiagnosticsFinished,
130 DiagnosticsUpdated(ProjectPath),
131 RemoteIdChanged(Option<u64>),
132 CollaboratorLeft(PeerId),
133}
134
135pub struct LanguageServerStatus {
136 pub name: String,
137 pub pending_work: BTreeMap<String, LanguageServerProgress>,
138 pending_diagnostic_updates: isize,
139}
140
141#[derive(Clone, Debug)]
142pub struct LanguageServerProgress {
143 pub message: Option<String>,
144 pub percentage: Option<usize>,
145 pub last_update_at: Instant,
146}
147
148#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
149pub struct ProjectPath {
150 pub worktree_id: WorktreeId,
151 pub path: Arc<Path>,
152}
153
154#[derive(Clone, Debug, Default, PartialEq)]
155pub struct DiagnosticSummary {
156 pub error_count: usize,
157 pub warning_count: usize,
158 pub info_count: usize,
159 pub hint_count: usize,
160}
161
162#[derive(Debug)]
163pub struct Location {
164 pub buffer: ModelHandle<Buffer>,
165 pub range: Range<language::Anchor>,
166}
167
168#[derive(Debug)]
169pub struct DocumentHighlight {
170 pub range: Range<language::Anchor>,
171 pub kind: DocumentHighlightKind,
172}
173
174#[derive(Clone, Debug)]
175pub struct Symbol {
176 pub source_worktree_id: WorktreeId,
177 pub worktree_id: WorktreeId,
178 pub language_server_name: LanguageServerName,
179 pub path: PathBuf,
180 pub label: CodeLabel,
181 pub name: String,
182 pub kind: lsp::SymbolKind,
183 pub range: Range<PointUtf16>,
184 pub signature: [u8; 32],
185}
186
187#[derive(Default)]
188pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
189
190impl DiagnosticSummary {
191 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
192 let mut this = Self {
193 error_count: 0,
194 warning_count: 0,
195 info_count: 0,
196 hint_count: 0,
197 };
198
199 for entry in diagnostics {
200 if entry.diagnostic.is_primary {
201 match entry.diagnostic.severity {
202 DiagnosticSeverity::ERROR => this.error_count += 1,
203 DiagnosticSeverity::WARNING => this.warning_count += 1,
204 DiagnosticSeverity::INFORMATION => this.info_count += 1,
205 DiagnosticSeverity::HINT => this.hint_count += 1,
206 _ => {}
207 }
208 }
209 }
210
211 this
212 }
213
214 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
215 proto::DiagnosticSummary {
216 path: path.to_string_lossy().to_string(),
217 error_count: self.error_count as u32,
218 warning_count: self.warning_count as u32,
219 info_count: self.info_count as u32,
220 hint_count: self.hint_count as u32,
221 }
222 }
223}
224
225#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
226pub struct ProjectEntryId(usize);
227
228impl ProjectEntryId {
229 pub fn new(counter: &AtomicUsize) -> Self {
230 Self(counter.fetch_add(1, SeqCst))
231 }
232
233 pub fn from_proto(id: u64) -> Self {
234 Self(id as usize)
235 }
236
237 pub fn to_proto(&self) -> u64 {
238 self.0 as u64
239 }
240
241 pub fn to_usize(&self) -> usize {
242 self.0
243 }
244}
245
246impl Project {
247 pub fn init(client: &Arc<Client>) {
248 client.add_model_message_handler(Self::handle_add_collaborator);
249 client.add_model_message_handler(Self::handle_buffer_reloaded);
250 client.add_model_message_handler(Self::handle_buffer_saved);
251 client.add_model_message_handler(Self::handle_start_language_server);
252 client.add_model_message_handler(Self::handle_update_language_server);
253 client.add_model_message_handler(Self::handle_remove_collaborator);
254 client.add_model_message_handler(Self::handle_register_worktree);
255 client.add_model_message_handler(Self::handle_unregister_worktree);
256 client.add_model_message_handler(Self::handle_unshare_project);
257 client.add_model_message_handler(Self::handle_update_buffer_file);
258 client.add_model_message_handler(Self::handle_update_buffer);
259 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
260 client.add_model_message_handler(Self::handle_update_worktree);
261 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
262 client.add_model_request_handler(Self::handle_apply_code_action);
263 client.add_model_request_handler(Self::handle_reload_buffers);
264 client.add_model_request_handler(Self::handle_format_buffers);
265 client.add_model_request_handler(Self::handle_get_code_actions);
266 client.add_model_request_handler(Self::handle_get_completions);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
269 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
271 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
272 client.add_model_request_handler(Self::handle_search_project);
273 client.add_model_request_handler(Self::handle_get_project_symbols);
274 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
275 client.add_model_request_handler(Self::handle_open_buffer_by_id);
276 client.add_model_request_handler(Self::handle_open_buffer_by_path);
277 client.add_model_request_handler(Self::handle_save_buffer);
278 }
279
280 pub fn local(
281 client: Arc<Client>,
282 user_store: ModelHandle<UserStore>,
283 languages: Arc<LanguageRegistry>,
284 fs: Arc<dyn Fs>,
285 cx: &mut MutableAppContext,
286 ) -> ModelHandle<Self> {
287 cx.add_model(|cx: &mut ModelContext<Self>| {
288 let (remote_id_tx, remote_id_rx) = watch::channel();
289 let _maintain_remote_id_task = cx.spawn_weak({
290 let rpc = client.clone();
291 move |this, mut cx| {
292 async move {
293 let mut status = rpc.status();
294 while let Some(status) = status.next().await {
295 if let Some(this) = this.upgrade(&cx) {
296 if status.is_connected() {
297 this.update(&mut cx, |this, cx| this.register(cx)).await?;
298 } else {
299 this.update(&mut cx, |this, cx| this.unregister(cx));
300 }
301 }
302 }
303 Ok(())
304 }
305 .log_err()
306 }
307 });
308
309 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
310 Self {
311 worktrees: Default::default(),
312 collaborators: Default::default(),
313 opened_buffers: Default::default(),
314 shared_buffers: Default::default(),
315 loading_buffers: Default::default(),
316 loading_local_worktrees: Default::default(),
317 buffer_snapshots: Default::default(),
318 client_state: ProjectClientState::Local {
319 is_shared: false,
320 remote_id_tx,
321 remote_id_rx,
322 _maintain_remote_id_task,
323 },
324 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
325 subscriptions: Vec::new(),
326 active_entry: None,
327 languages,
328 client,
329 user_store,
330 fs,
331 next_entry_id: Default::default(),
332 language_servers: Default::default(),
333 started_language_servers: Default::default(),
334 language_server_statuses: Default::default(),
335 last_workspace_edits_by_language_server: Default::default(),
336 language_server_settings: Default::default(),
337 next_language_server_id: 0,
338 nonce: StdRng::from_entropy().gen(),
339 }
340 })
341 }
342
343 pub async fn remote(
344 remote_id: u64,
345 client: Arc<Client>,
346 user_store: ModelHandle<UserStore>,
347 languages: Arc<LanguageRegistry>,
348 fs: Arc<dyn Fs>,
349 cx: &mut AsyncAppContext,
350 ) -> Result<ModelHandle<Self>> {
351 client.authenticate_and_connect(true, &cx).await?;
352
353 let response = client
354 .request(proto::JoinProject {
355 project_id: remote_id,
356 })
357 .await?;
358
359 let replica_id = response.replica_id as ReplicaId;
360
361 let mut worktrees = Vec::new();
362 for worktree in response.worktrees {
363 let (worktree, load_task) = cx
364 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
365 worktrees.push(worktree);
366 load_task.detach();
367 }
368
369 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
370 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
371 let mut this = Self {
372 worktrees: Vec::new(),
373 loading_buffers: Default::default(),
374 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
375 shared_buffers: Default::default(),
376 loading_local_worktrees: Default::default(),
377 active_entry: None,
378 collaborators: Default::default(),
379 languages,
380 user_store: user_store.clone(),
381 fs,
382 next_entry_id: Default::default(),
383 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
384 client: client.clone(),
385 client_state: ProjectClientState::Remote {
386 sharing_has_stopped: false,
387 remote_id,
388 replica_id,
389 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
390 async move {
391 let mut status = client.status();
392 let is_connected =
393 status.next().await.map_or(false, |s| s.is_connected());
394 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
395 if !is_connected || status.next().await.is_some() {
396 if let Some(this) = this.upgrade(&cx) {
397 this.update(&mut cx, |this, cx| this.project_unshared(cx))
398 }
399 }
400 Ok(())
401 }
402 .log_err()
403 }),
404 },
405 language_servers: Default::default(),
406 started_language_servers: Default::default(),
407 language_server_settings: Default::default(),
408 language_server_statuses: response
409 .language_servers
410 .into_iter()
411 .map(|server| {
412 (
413 server.id as usize,
414 LanguageServerStatus {
415 name: server.name,
416 pending_work: Default::default(),
417 pending_diagnostic_updates: 0,
418 },
419 )
420 })
421 .collect(),
422 last_workspace_edits_by_language_server: Default::default(),
423 next_language_server_id: 0,
424 opened_buffers: Default::default(),
425 buffer_snapshots: Default::default(),
426 nonce: StdRng::from_entropy().gen(),
427 };
428 for worktree in worktrees {
429 this.add_worktree(&worktree, cx);
430 }
431 this
432 });
433
434 let user_ids = response
435 .collaborators
436 .iter()
437 .map(|peer| peer.user_id)
438 .collect();
439 user_store
440 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
441 .await?;
442 let mut collaborators = HashMap::default();
443 for message in response.collaborators {
444 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
445 collaborators.insert(collaborator.peer_id, collaborator);
446 }
447
448 this.update(cx, |this, _| {
449 this.collaborators = collaborators;
450 });
451
452 Ok(this)
453 }
454
455 #[cfg(any(test, feature = "test-support"))]
456 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
457 let languages = Arc::new(LanguageRegistry::test());
458 let http_client = client::test::FakeHttpClient::with_404_response();
459 let client = client::Client::new(http_client.clone());
460 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
461 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
462 }
463
464 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
465 self.opened_buffers
466 .get(&remote_id)
467 .and_then(|buffer| buffer.upgrade(cx))
468 }
469
470 #[cfg(any(test, feature = "test-support"))]
471 pub fn languages(&self) -> &Arc<LanguageRegistry> {
472 &self.languages
473 }
474
475 #[cfg(any(test, feature = "test-support"))]
476 pub fn check_invariants(&self, cx: &AppContext) {
477 if self.is_local() {
478 let mut worktree_root_paths = HashMap::default();
479 for worktree in self.worktrees(cx) {
480 let worktree = worktree.read(cx);
481 let abs_path = worktree.as_local().unwrap().abs_path().clone();
482 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
483 assert_eq!(
484 prev_worktree_id,
485 None,
486 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
487 abs_path,
488 worktree.id(),
489 prev_worktree_id
490 )
491 }
492 } else {
493 let replica_id = self.replica_id();
494 for buffer in self.opened_buffers.values() {
495 if let Some(buffer) = buffer.upgrade(cx) {
496 let buffer = buffer.read(cx);
497 assert_eq!(
498 buffer.deferred_ops_len(),
499 0,
500 "replica {}, buffer {} has deferred operations",
501 replica_id,
502 buffer.remote_id()
503 );
504 }
505 }
506 }
507 }
508
509 #[cfg(any(test, feature = "test-support"))]
510 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
511 let path = path.into();
512 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
513 self.opened_buffers.iter().any(|(_, buffer)| {
514 if let Some(buffer) = buffer.upgrade(cx) {
515 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
516 if file.worktree == worktree && file.path() == &path.path {
517 return true;
518 }
519 }
520 }
521 false
522 })
523 } else {
524 false
525 }
526 }
527
528 pub fn fs(&self) -> &Arc<dyn Fs> {
529 &self.fs
530 }
531
532 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
533 self.unshare(cx);
534 for worktree in &self.worktrees {
535 if let Some(worktree) = worktree.upgrade(cx) {
536 worktree.update(cx, |worktree, _| {
537 worktree.as_local_mut().unwrap().unregister();
538 });
539 }
540 }
541
542 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
543 *remote_id_tx.borrow_mut() = None;
544 }
545
546 self.subscriptions.clear();
547 }
548
549 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
550 self.unregister(cx);
551
552 let response = self.client.request(proto::RegisterProject {});
553 cx.spawn(|this, mut cx| async move {
554 let remote_id = response.await?.project_id;
555
556 let mut registrations = Vec::new();
557 this.update(&mut cx, |this, cx| {
558 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
559 *remote_id_tx.borrow_mut() = Some(remote_id);
560 }
561
562 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
563
564 this.subscriptions
565 .push(this.client.add_model_for_remote_entity(remote_id, cx));
566
567 for worktree in &this.worktrees {
568 if let Some(worktree) = worktree.upgrade(cx) {
569 registrations.push(worktree.update(cx, |worktree, cx| {
570 let worktree = worktree.as_local_mut().unwrap();
571 worktree.register(remote_id, cx)
572 }));
573 }
574 }
575 });
576
577 futures::future::try_join_all(registrations).await?;
578 Ok(())
579 })
580 }
581
582 pub fn remote_id(&self) -> Option<u64> {
583 match &self.client_state {
584 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
585 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
586 }
587 }
588
589 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
590 let mut id = None;
591 let mut watch = None;
592 match &self.client_state {
593 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
594 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
595 }
596
597 async move {
598 if let Some(id) = id {
599 return id;
600 }
601 let mut watch = watch.unwrap();
602 loop {
603 let id = *watch.borrow();
604 if let Some(id) = id {
605 return id;
606 }
607 watch.next().await;
608 }
609 }
610 }
611
612 pub fn replica_id(&self) -> ReplicaId {
613 match &self.client_state {
614 ProjectClientState::Local { .. } => 0,
615 ProjectClientState::Remote { replica_id, .. } => *replica_id,
616 }
617 }
618
619 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
620 &self.collaborators
621 }
622
623 pub fn worktrees<'a>(
624 &'a self,
625 cx: &'a AppContext,
626 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
627 self.worktrees
628 .iter()
629 .filter_map(move |worktree| worktree.upgrade(cx))
630 }
631
632 pub fn visible_worktrees<'a>(
633 &'a self,
634 cx: &'a AppContext,
635 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
636 self.worktrees.iter().filter_map(|worktree| {
637 worktree.upgrade(cx).and_then(|worktree| {
638 if worktree.read(cx).is_visible() {
639 Some(worktree)
640 } else {
641 None
642 }
643 })
644 })
645 }
646
647 pub fn worktree_for_id(
648 &self,
649 id: WorktreeId,
650 cx: &AppContext,
651 ) -> Option<ModelHandle<Worktree>> {
652 self.worktrees(cx)
653 .find(|worktree| worktree.read(cx).id() == id)
654 }
655
656 pub fn worktree_for_entry(
657 &self,
658 entry_id: ProjectEntryId,
659 cx: &AppContext,
660 ) -> Option<ModelHandle<Worktree>> {
661 self.worktrees(cx)
662 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
663 }
664
665 pub fn worktree_id_for_entry(
666 &self,
667 entry_id: ProjectEntryId,
668 cx: &AppContext,
669 ) -> Option<WorktreeId> {
670 self.worktree_for_entry(entry_id, cx)
671 .map(|worktree| worktree.read(cx).id())
672 }
673
674 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
675 let rpc = self.client.clone();
676 cx.spawn(|this, mut cx| async move {
677 let project_id = this.update(&mut cx, |this, cx| {
678 if let ProjectClientState::Local {
679 is_shared,
680 remote_id_rx,
681 ..
682 } = &mut this.client_state
683 {
684 *is_shared = true;
685
686 for open_buffer in this.opened_buffers.values_mut() {
687 match open_buffer {
688 OpenBuffer::Strong(_) => {}
689 OpenBuffer::Weak(buffer) => {
690 if let Some(buffer) = buffer.upgrade(cx) {
691 *open_buffer = OpenBuffer::Strong(buffer);
692 }
693 }
694 OpenBuffer::Loading(_) => unreachable!(),
695 }
696 }
697
698 for worktree_handle in this.worktrees.iter_mut() {
699 match worktree_handle {
700 WorktreeHandle::Strong(_) => {}
701 WorktreeHandle::Weak(worktree) => {
702 if let Some(worktree) = worktree.upgrade(cx) {
703 *worktree_handle = WorktreeHandle::Strong(worktree);
704 }
705 }
706 }
707 }
708
709 remote_id_rx
710 .borrow()
711 .ok_or_else(|| anyhow!("no project id"))
712 } else {
713 Err(anyhow!("can't share a remote project"))
714 }
715 })?;
716
717 rpc.request(proto::ShareProject { project_id }).await?;
718
719 let mut tasks = Vec::new();
720 this.update(&mut cx, |this, cx| {
721 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
722 worktree.update(cx, |worktree, cx| {
723 let worktree = worktree.as_local_mut().unwrap();
724 tasks.push(worktree.share(project_id, cx));
725 });
726 }
727 });
728 for task in tasks {
729 task.await?;
730 }
731 this.update(&mut cx, |_, cx| cx.notify());
732 Ok(())
733 })
734 }
735
736 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
737 let rpc = self.client.clone();
738
739 if let ProjectClientState::Local {
740 is_shared,
741 remote_id_rx,
742 ..
743 } = &mut self.client_state
744 {
745 if !*is_shared {
746 return;
747 }
748
749 *is_shared = false;
750 self.collaborators.clear();
751 self.shared_buffers.clear();
752 for worktree_handle in self.worktrees.iter_mut() {
753 if let WorktreeHandle::Strong(worktree) = worktree_handle {
754 let is_visible = worktree.update(cx, |worktree, _| {
755 worktree.as_local_mut().unwrap().unshare();
756 worktree.is_visible()
757 });
758 if !is_visible {
759 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
760 }
761 }
762 }
763
764 for open_buffer in self.opened_buffers.values_mut() {
765 match open_buffer {
766 OpenBuffer::Strong(buffer) => {
767 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
768 }
769 _ => {}
770 }
771 }
772
773 if let Some(project_id) = *remote_id_rx.borrow() {
774 rpc.send(proto::UnshareProject { project_id }).log_err();
775 }
776
777 cx.notify();
778 } else {
779 log::error!("attempted to unshare a remote project");
780 }
781 }
782
783 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
784 if let ProjectClientState::Remote {
785 sharing_has_stopped,
786 ..
787 } = &mut self.client_state
788 {
789 *sharing_has_stopped = true;
790 self.collaborators.clear();
791 cx.notify();
792 }
793 }
794
795 pub fn is_read_only(&self) -> bool {
796 match &self.client_state {
797 ProjectClientState::Local { .. } => false,
798 ProjectClientState::Remote {
799 sharing_has_stopped,
800 ..
801 } => *sharing_has_stopped,
802 }
803 }
804
805 pub fn is_local(&self) -> bool {
806 match &self.client_state {
807 ProjectClientState::Local { .. } => true,
808 ProjectClientState::Remote { .. } => false,
809 }
810 }
811
812 pub fn is_remote(&self) -> bool {
813 !self.is_local()
814 }
815
816 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
817 if self.is_remote() {
818 return Err(anyhow!("creating buffers as a guest is not supported yet"));
819 }
820
821 let buffer = cx.add_model(|cx| {
822 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
823 });
824 self.register_buffer(&buffer, cx)?;
825 Ok(buffer)
826 }
827
828 pub fn open_path(
829 &mut self,
830 path: impl Into<ProjectPath>,
831 cx: &mut ModelContext<Self>,
832 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
833 let task = self.open_buffer(path, cx);
834 cx.spawn_weak(|_, cx| async move {
835 let buffer = task.await?;
836 let project_entry_id = buffer
837 .read_with(&cx, |buffer, cx| {
838 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
839 })
840 .ok_or_else(|| anyhow!("no project entry"))?;
841 Ok((project_entry_id, buffer.into()))
842 })
843 }
844
845 pub fn open_buffer(
846 &mut self,
847 path: impl Into<ProjectPath>,
848 cx: &mut ModelContext<Self>,
849 ) -> Task<Result<ModelHandle<Buffer>>> {
850 let project_path = path.into();
851 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
852 worktree
853 } else {
854 return Task::ready(Err(anyhow!("no such worktree")));
855 };
856
857 // If there is already a buffer for the given path, then return it.
858 let existing_buffer = self.get_open_buffer(&project_path, cx);
859 if let Some(existing_buffer) = existing_buffer {
860 return Task::ready(Ok(existing_buffer));
861 }
862
863 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
864 // If the given path is already being loaded, then wait for that existing
865 // task to complete and return the same buffer.
866 hash_map::Entry::Occupied(e) => e.get().clone(),
867
868 // Otherwise, record the fact that this path is now being loaded.
869 hash_map::Entry::Vacant(entry) => {
870 let (mut tx, rx) = postage::watch::channel();
871 entry.insert(rx.clone());
872
873 let load_buffer = if worktree.read(cx).is_local() {
874 self.open_local_buffer(&project_path.path, &worktree, cx)
875 } else {
876 self.open_remote_buffer(&project_path.path, &worktree, cx)
877 };
878
879 cx.spawn(move |this, mut cx| async move {
880 let load_result = load_buffer.await;
881 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
882 // Record the fact that the buffer is no longer loading.
883 this.loading_buffers.remove(&project_path);
884 let buffer = load_result.map_err(Arc::new)?;
885 Ok(buffer)
886 }));
887 })
888 .detach();
889 rx
890 }
891 };
892
893 cx.foreground().spawn(async move {
894 loop {
895 if let Some(result) = loading_watch.borrow().as_ref() {
896 match result {
897 Ok(buffer) => return Ok(buffer.clone()),
898 Err(error) => return Err(anyhow!("{}", error)),
899 }
900 }
901 loading_watch.next().await;
902 }
903 })
904 }
905
906 fn open_local_buffer(
907 &mut self,
908 path: &Arc<Path>,
909 worktree: &ModelHandle<Worktree>,
910 cx: &mut ModelContext<Self>,
911 ) -> Task<Result<ModelHandle<Buffer>>> {
912 let load_buffer = worktree.update(cx, |worktree, cx| {
913 let worktree = worktree.as_local_mut().unwrap();
914 worktree.load_buffer(path, cx)
915 });
916 cx.spawn(|this, mut cx| async move {
917 let buffer = load_buffer.await?;
918 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
919 Ok(buffer)
920 })
921 }
922
923 fn open_remote_buffer(
924 &mut self,
925 path: &Arc<Path>,
926 worktree: &ModelHandle<Worktree>,
927 cx: &mut ModelContext<Self>,
928 ) -> Task<Result<ModelHandle<Buffer>>> {
929 let rpc = self.client.clone();
930 let project_id = self.remote_id().unwrap();
931 let remote_worktree_id = worktree.read(cx).id();
932 let path = path.clone();
933 let path_string = path.to_string_lossy().to_string();
934 cx.spawn(|this, mut cx| async move {
935 let response = rpc
936 .request(proto::OpenBufferByPath {
937 project_id,
938 worktree_id: remote_worktree_id.to_proto(),
939 path: path_string,
940 })
941 .await?;
942 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
943 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
944 .await
945 })
946 }
947
948 fn open_local_buffer_via_lsp(
949 &mut self,
950 abs_path: lsp::Url,
951 lsp_adapter: Arc<dyn LspAdapter>,
952 lsp_server: Arc<LanguageServer>,
953 cx: &mut ModelContext<Self>,
954 ) -> Task<Result<ModelHandle<Buffer>>> {
955 cx.spawn(|this, mut cx| async move {
956 let abs_path = abs_path
957 .to_file_path()
958 .map_err(|_| anyhow!("can't convert URI to path"))?;
959 let (worktree, relative_path) = if let Some(result) =
960 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
961 {
962 result
963 } else {
964 let worktree = this
965 .update(&mut cx, |this, cx| {
966 this.create_local_worktree(&abs_path, false, cx)
967 })
968 .await?;
969 this.update(&mut cx, |this, cx| {
970 this.language_servers.insert(
971 (worktree.read(cx).id(), lsp_adapter.name()),
972 (lsp_adapter, lsp_server),
973 );
974 });
975 (worktree, PathBuf::new())
976 };
977
978 let project_path = ProjectPath {
979 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
980 path: relative_path.into(),
981 };
982 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
983 .await
984 })
985 }
986
987 pub fn open_buffer_by_id(
988 &mut self,
989 id: u64,
990 cx: &mut ModelContext<Self>,
991 ) -> Task<Result<ModelHandle<Buffer>>> {
992 if let Some(buffer) = self.buffer_for_id(id, cx) {
993 Task::ready(Ok(buffer))
994 } else if self.is_local() {
995 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
996 } else if let Some(project_id) = self.remote_id() {
997 let request = self
998 .client
999 .request(proto::OpenBufferById { project_id, id });
1000 cx.spawn(|this, mut cx| async move {
1001 let buffer = request
1002 .await?
1003 .buffer
1004 .ok_or_else(|| anyhow!("invalid buffer"))?;
1005 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1006 .await
1007 })
1008 } else {
1009 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1010 }
1011 }
1012
1013 pub fn save_buffer_as(
1014 &mut self,
1015 buffer: ModelHandle<Buffer>,
1016 abs_path: PathBuf,
1017 cx: &mut ModelContext<Project>,
1018 ) -> Task<Result<()>> {
1019 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1020 let old_path =
1021 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1022 cx.spawn(|this, mut cx| async move {
1023 if let Some(old_path) = old_path {
1024 this.update(&mut cx, |this, cx| {
1025 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1026 });
1027 }
1028 let (worktree, path) = worktree_task.await?;
1029 worktree
1030 .update(&mut cx, |worktree, cx| {
1031 worktree
1032 .as_local_mut()
1033 .unwrap()
1034 .save_buffer_as(buffer.clone(), path, cx)
1035 })
1036 .await?;
1037 this.update(&mut cx, |this, cx| {
1038 this.assign_language_to_buffer(&buffer, cx);
1039 this.register_buffer_with_language_server(&buffer, cx);
1040 });
1041 Ok(())
1042 })
1043 }
1044
1045 pub fn get_open_buffer(
1046 &mut self,
1047 path: &ProjectPath,
1048 cx: &mut ModelContext<Self>,
1049 ) -> Option<ModelHandle<Buffer>> {
1050 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1051 self.opened_buffers.values().find_map(|buffer| {
1052 let buffer = buffer.upgrade(cx)?;
1053 let file = File::from_dyn(buffer.read(cx).file())?;
1054 if file.worktree == worktree && file.path() == &path.path {
1055 Some(buffer)
1056 } else {
1057 None
1058 }
1059 })
1060 }
1061
1062 fn register_buffer(
1063 &mut self,
1064 buffer: &ModelHandle<Buffer>,
1065 cx: &mut ModelContext<Self>,
1066 ) -> Result<()> {
1067 let remote_id = buffer.read(cx).remote_id();
1068 let open_buffer = if self.is_remote() || self.is_shared() {
1069 OpenBuffer::Strong(buffer.clone())
1070 } else {
1071 OpenBuffer::Weak(buffer.downgrade())
1072 };
1073
1074 match self.opened_buffers.insert(remote_id, open_buffer) {
1075 None => {}
1076 Some(OpenBuffer::Loading(operations)) => {
1077 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1078 }
1079 Some(OpenBuffer::Weak(existing_handle)) => {
1080 if existing_handle.upgrade(cx).is_some() {
1081 Err(anyhow!(
1082 "already registered buffer with remote id {}",
1083 remote_id
1084 ))?
1085 }
1086 }
1087 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1088 "already registered buffer with remote id {}",
1089 remote_id
1090 ))?,
1091 }
1092 cx.subscribe(buffer, |this, buffer, event, cx| {
1093 this.on_buffer_event(buffer, event, cx);
1094 })
1095 .detach();
1096
1097 self.assign_language_to_buffer(buffer, cx);
1098 self.register_buffer_with_language_server(buffer, cx);
1099 cx.observe_release(buffer, |this, buffer, cx| {
1100 if let Some(file) = File::from_dyn(buffer.file()) {
1101 if file.is_local() {
1102 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1103 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1104 server
1105 .notify::<lsp::notification::DidCloseTextDocument>(
1106 lsp::DidCloseTextDocumentParams {
1107 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1108 },
1109 )
1110 .log_err();
1111 }
1112 }
1113 }
1114 })
1115 .detach();
1116
1117 Ok(())
1118 }
1119
1120 fn register_buffer_with_language_server(
1121 &mut self,
1122 buffer_handle: &ModelHandle<Buffer>,
1123 cx: &mut ModelContext<Self>,
1124 ) {
1125 let buffer = buffer_handle.read(cx);
1126 let buffer_id = buffer.remote_id();
1127 if let Some(file) = File::from_dyn(buffer.file()) {
1128 if file.is_local() {
1129 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1130 let initial_snapshot = buffer.text_snapshot();
1131 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1132
1133 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1134 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1135 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1136 .log_err();
1137 }
1138 }
1139
1140 if let Some((_, server)) = language_server {
1141 server
1142 .notify::<lsp::notification::DidOpenTextDocument>(
1143 lsp::DidOpenTextDocumentParams {
1144 text_document: lsp::TextDocumentItem::new(
1145 uri,
1146 Default::default(),
1147 0,
1148 initial_snapshot.text(),
1149 ),
1150 }
1151 .clone(),
1152 )
1153 .log_err();
1154 buffer_handle.update(cx, |buffer, cx| {
1155 buffer.set_completion_triggers(
1156 server
1157 .capabilities()
1158 .completion_provider
1159 .as_ref()
1160 .and_then(|provider| provider.trigger_characters.clone())
1161 .unwrap_or(Vec::new()),
1162 cx,
1163 )
1164 });
1165 self.buffer_snapshots
1166 .insert(buffer_id, vec![(0, initial_snapshot)]);
1167 }
1168 }
1169 }
1170 }
1171
1172 fn unregister_buffer_from_language_server(
1173 &mut self,
1174 buffer: &ModelHandle<Buffer>,
1175 old_path: PathBuf,
1176 cx: &mut ModelContext<Self>,
1177 ) {
1178 buffer.update(cx, |buffer, cx| {
1179 buffer.update_diagnostics(Default::default(), cx);
1180 self.buffer_snapshots.remove(&buffer.remote_id());
1181 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1182 language_server
1183 .notify::<lsp::notification::DidCloseTextDocument>(
1184 lsp::DidCloseTextDocumentParams {
1185 text_document: lsp::TextDocumentIdentifier::new(
1186 lsp::Url::from_file_path(old_path).unwrap(),
1187 ),
1188 },
1189 )
1190 .log_err();
1191 }
1192 });
1193 }
1194
1195 fn on_buffer_event(
1196 &mut self,
1197 buffer: ModelHandle<Buffer>,
1198 event: &BufferEvent,
1199 cx: &mut ModelContext<Self>,
1200 ) -> Option<()> {
1201 match event {
1202 BufferEvent::Operation(operation) => {
1203 let project_id = self.remote_id()?;
1204 let request = self.client.request(proto::UpdateBuffer {
1205 project_id,
1206 buffer_id: buffer.read(cx).remote_id(),
1207 operations: vec![language::proto::serialize_operation(&operation)],
1208 });
1209 cx.background().spawn(request).detach_and_log_err(cx);
1210 }
1211 BufferEvent::Edited { .. } => {
1212 let (_, language_server) = self
1213 .language_server_for_buffer(buffer.read(cx), cx)?
1214 .clone();
1215 let buffer = buffer.read(cx);
1216 let file = File::from_dyn(buffer.file())?;
1217 let abs_path = file.as_local()?.abs_path(cx);
1218 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1219 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1220 let (version, prev_snapshot) = buffer_snapshots.last()?;
1221 let next_snapshot = buffer.text_snapshot();
1222 let next_version = version + 1;
1223
1224 let content_changes = buffer
1225 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1226 .map(|edit| {
1227 let edit_start = edit.new.start.0;
1228 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1229 let new_text = next_snapshot
1230 .text_for_range(edit.new.start.1..edit.new.end.1)
1231 .collect();
1232 lsp::TextDocumentContentChangeEvent {
1233 range: Some(lsp::Range::new(
1234 point_to_lsp(edit_start),
1235 point_to_lsp(edit_end),
1236 )),
1237 range_length: None,
1238 text: new_text,
1239 }
1240 })
1241 .collect();
1242
1243 buffer_snapshots.push((next_version, next_snapshot));
1244
1245 language_server
1246 .notify::<lsp::notification::DidChangeTextDocument>(
1247 lsp::DidChangeTextDocumentParams {
1248 text_document: lsp::VersionedTextDocumentIdentifier::new(
1249 uri,
1250 next_version,
1251 ),
1252 content_changes,
1253 },
1254 )
1255 .log_err();
1256 }
1257 BufferEvent::Saved => {
1258 let file = File::from_dyn(buffer.read(cx).file())?;
1259 let worktree_id = file.worktree_id(cx);
1260 let abs_path = file.as_local()?.abs_path(cx);
1261 let text_document = lsp::TextDocumentIdentifier {
1262 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1263 };
1264
1265 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1266 server
1267 .notify::<lsp::notification::DidSaveTextDocument>(
1268 lsp::DidSaveTextDocumentParams {
1269 text_document: text_document.clone(),
1270 text: None,
1271 },
1272 )
1273 .log_err();
1274 }
1275 }
1276 _ => {}
1277 }
1278
1279 None
1280 }
1281
1282 fn language_servers_for_worktree(
1283 &self,
1284 worktree_id: WorktreeId,
1285 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1286 self.language_servers.iter().filter_map(
1287 move |((language_server_worktree_id, _), server)| {
1288 if *language_server_worktree_id == worktree_id {
1289 Some(server)
1290 } else {
1291 None
1292 }
1293 },
1294 )
1295 }
1296
1297 fn assign_language_to_buffer(
1298 &mut self,
1299 buffer: &ModelHandle<Buffer>,
1300 cx: &mut ModelContext<Self>,
1301 ) -> Option<()> {
1302 // If the buffer has a language, set it and start the language server if we haven't already.
1303 let full_path = buffer.read(cx).file()?.full_path(cx);
1304 let language = self.languages.select_language(&full_path)?;
1305 buffer.update(cx, |buffer, cx| {
1306 buffer.set_language(Some(language.clone()), cx);
1307 });
1308
1309 let file = File::from_dyn(buffer.read(cx).file())?;
1310 let worktree = file.worktree.read(cx).as_local()?;
1311 let worktree_id = worktree.id();
1312 let worktree_abs_path = worktree.abs_path().clone();
1313 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1314
1315 None
1316 }
1317
1318 fn start_language_server(
1319 &mut self,
1320 worktree_id: WorktreeId,
1321 worktree_path: Arc<Path>,
1322 language: Arc<Language>,
1323 cx: &mut ModelContext<Self>,
1324 ) {
1325 let adapter = if let Some(adapter) = language.lsp_adapter() {
1326 adapter
1327 } else {
1328 return;
1329 };
1330 let key = (worktree_id, adapter.name());
1331 self.started_language_servers
1332 .entry(key.clone())
1333 .or_insert_with(|| {
1334 let server_id = post_inc(&mut self.next_language_server_id);
1335 let language_server = self.languages.start_language_server(
1336 server_id,
1337 language.clone(),
1338 worktree_path,
1339 self.client.http_client(),
1340 cx,
1341 );
1342 cx.spawn_weak(|this, mut cx| async move {
1343 let language_server = language_server?.await.log_err()?;
1344 let language_server = language_server
1345 .initialize(adapter.initialization_options())
1346 .await
1347 .log_err()?;
1348 let this = this.upgrade(&cx)?;
1349 let disk_based_diagnostics_progress_token =
1350 adapter.disk_based_diagnostics_progress_token();
1351
1352 language_server
1353 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1354 let this = this.downgrade();
1355 let adapter = adapter.clone();
1356 move |params, mut cx| {
1357 if let Some(this) = this.upgrade(&cx) {
1358 this.update(&mut cx, |this, cx| {
1359 this.on_lsp_diagnostics_published(
1360 server_id,
1361 params,
1362 &adapter,
1363 disk_based_diagnostics_progress_token,
1364 cx,
1365 );
1366 });
1367 }
1368 }
1369 })
1370 .detach();
1371
1372 language_server
1373 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1374 let settings = this
1375 .read_with(&cx, |this, _| this.language_server_settings.clone());
1376 move |params, _| {
1377 let settings = settings.lock().clone();
1378 async move {
1379 Ok(params
1380 .items
1381 .into_iter()
1382 .map(|item| {
1383 if let Some(section) = &item.section {
1384 settings
1385 .get(section)
1386 .cloned()
1387 .unwrap_or(serde_json::Value::Null)
1388 } else {
1389 settings.clone()
1390 }
1391 })
1392 .collect())
1393 }
1394 }
1395 })
1396 .detach();
1397
1398 language_server
1399 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1400 let this = this.downgrade();
1401 let adapter = adapter.clone();
1402 let language_server = language_server.clone();
1403 move |params, cx| {
1404 Self::on_lsp_workspace_edit(
1405 this,
1406 params,
1407 server_id,
1408 adapter.clone(),
1409 language_server.clone(),
1410 cx,
1411 )
1412 }
1413 })
1414 .detach();
1415
1416 language_server
1417 .on_notification::<lsp::notification::Progress, _>({
1418 let this = this.downgrade();
1419 move |params, mut cx| {
1420 if let Some(this) = this.upgrade(&cx) {
1421 this.update(&mut cx, |this, cx| {
1422 this.on_lsp_progress(
1423 params,
1424 server_id,
1425 disk_based_diagnostics_progress_token,
1426 cx,
1427 );
1428 });
1429 }
1430 }
1431 })
1432 .detach();
1433
1434 this.update(&mut cx, |this, cx| {
1435 this.language_servers
1436 .insert(key.clone(), (adapter, language_server.clone()));
1437 this.language_server_statuses.insert(
1438 server_id,
1439 LanguageServerStatus {
1440 name: language_server.name().to_string(),
1441 pending_work: Default::default(),
1442 pending_diagnostic_updates: 0,
1443 },
1444 );
1445 language_server
1446 .notify::<lsp::notification::DidChangeConfiguration>(
1447 lsp::DidChangeConfigurationParams {
1448 settings: this.language_server_settings.lock().clone(),
1449 },
1450 )
1451 .ok();
1452
1453 if let Some(project_id) = this.remote_id() {
1454 this.client
1455 .send(proto::StartLanguageServer {
1456 project_id,
1457 server: Some(proto::LanguageServer {
1458 id: server_id as u64,
1459 name: language_server.name().to_string(),
1460 }),
1461 })
1462 .log_err();
1463 }
1464
1465 // Tell the language server about every open buffer in the worktree that matches the language.
1466 for buffer in this.opened_buffers.values() {
1467 if let Some(buffer_handle) = buffer.upgrade(cx) {
1468 let buffer = buffer_handle.read(cx);
1469 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1470 file
1471 } else {
1472 continue;
1473 };
1474 let language = if let Some(language) = buffer.language() {
1475 language
1476 } else {
1477 continue;
1478 };
1479 if file.worktree.read(cx).id() != key.0
1480 || language.lsp_adapter().map(|a| a.name())
1481 != Some(key.1.clone())
1482 {
1483 continue;
1484 }
1485
1486 let file = file.as_local()?;
1487 let versions = this
1488 .buffer_snapshots
1489 .entry(buffer.remote_id())
1490 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1491 let (version, initial_snapshot) = versions.last().unwrap();
1492 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1493 language_server
1494 .notify::<lsp::notification::DidOpenTextDocument>(
1495 lsp::DidOpenTextDocumentParams {
1496 text_document: lsp::TextDocumentItem::new(
1497 uri,
1498 Default::default(),
1499 *version,
1500 initial_snapshot.text(),
1501 ),
1502 },
1503 )
1504 .log_err()?;
1505 buffer_handle.update(cx, |buffer, cx| {
1506 buffer.set_completion_triggers(
1507 language_server
1508 .capabilities()
1509 .completion_provider
1510 .as_ref()
1511 .and_then(|provider| {
1512 provider.trigger_characters.clone()
1513 })
1514 .unwrap_or(Vec::new()),
1515 cx,
1516 )
1517 });
1518 }
1519 }
1520
1521 cx.notify();
1522 Some(())
1523 });
1524
1525 Some(language_server)
1526 })
1527 });
1528 }
1529
1530 pub fn restart_language_servers_for_buffers(
1531 &mut self,
1532 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1533 cx: &mut ModelContext<Self>,
1534 ) -> Option<()> {
1535 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1536 .into_iter()
1537 .filter_map(|buffer| {
1538 let file = File::from_dyn(buffer.read(cx).file())?;
1539 let worktree = file.worktree.read(cx).as_local()?;
1540 let worktree_id = worktree.id();
1541 let worktree_abs_path = worktree.abs_path().clone();
1542 let full_path = file.full_path(cx);
1543 Some((worktree_id, worktree_abs_path, full_path))
1544 })
1545 .collect();
1546 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1547 let language = self.languages.select_language(&full_path)?;
1548 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1549 }
1550
1551 None
1552 }
1553
1554 fn restart_language_server(
1555 &mut self,
1556 worktree_id: WorktreeId,
1557 worktree_path: Arc<Path>,
1558 language: Arc<Language>,
1559 cx: &mut ModelContext<Self>,
1560 ) {
1561 let adapter = if let Some(adapter) = language.lsp_adapter() {
1562 adapter
1563 } else {
1564 return;
1565 };
1566 let key = (worktree_id, adapter.name());
1567 let server_to_shutdown = self.language_servers.remove(&key);
1568 self.started_language_servers.remove(&key);
1569 server_to_shutdown
1570 .as_ref()
1571 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1572 cx.spawn_weak(|this, mut cx| async move {
1573 if let Some(this) = this.upgrade(&cx) {
1574 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1575 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1576 shutdown_task.await;
1577 }
1578 }
1579
1580 this.update(&mut cx, |this, cx| {
1581 this.start_language_server(worktree_id, worktree_path, language, cx);
1582 });
1583 }
1584 })
1585 .detach();
1586 }
1587
1588 fn on_lsp_diagnostics_published(
1589 &mut self,
1590 server_id: usize,
1591 mut params: lsp::PublishDiagnosticsParams,
1592 adapter: &Arc<dyn LspAdapter>,
1593 disk_based_diagnostics_progress_token: Option<&str>,
1594 cx: &mut ModelContext<Self>,
1595 ) {
1596 adapter.process_diagnostics(&mut params);
1597 if disk_based_diagnostics_progress_token.is_none() {
1598 self.disk_based_diagnostics_started(cx);
1599 self.broadcast_language_server_update(
1600 server_id,
1601 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1602 proto::LspDiskBasedDiagnosticsUpdating {},
1603 ),
1604 );
1605 }
1606 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1607 .log_err();
1608 if disk_based_diagnostics_progress_token.is_none() {
1609 self.disk_based_diagnostics_finished(cx);
1610 self.broadcast_language_server_update(
1611 server_id,
1612 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1613 proto::LspDiskBasedDiagnosticsUpdated {},
1614 ),
1615 );
1616 }
1617 }
1618
1619 fn on_lsp_progress(
1620 &mut self,
1621 progress: lsp::ProgressParams,
1622 server_id: usize,
1623 disk_based_diagnostics_progress_token: Option<&str>,
1624 cx: &mut ModelContext<Self>,
1625 ) {
1626 let token = match progress.token {
1627 lsp::NumberOrString::String(token) => token,
1628 lsp::NumberOrString::Number(token) => {
1629 log::info!("skipping numeric progress token {}", token);
1630 return;
1631 }
1632 };
1633 let progress = match progress.value {
1634 lsp::ProgressParamsValue::WorkDone(value) => value,
1635 };
1636 let language_server_status =
1637 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1638 status
1639 } else {
1640 return;
1641 };
1642 match progress {
1643 lsp::WorkDoneProgress::Begin(_) => {
1644 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1645 language_server_status.pending_diagnostic_updates += 1;
1646 if language_server_status.pending_diagnostic_updates == 1 {
1647 self.disk_based_diagnostics_started(cx);
1648 self.broadcast_language_server_update(
1649 server_id,
1650 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1651 proto::LspDiskBasedDiagnosticsUpdating {},
1652 ),
1653 );
1654 }
1655 } else {
1656 self.on_lsp_work_start(server_id, token.clone(), cx);
1657 self.broadcast_language_server_update(
1658 server_id,
1659 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1660 token,
1661 }),
1662 );
1663 }
1664 }
1665 lsp::WorkDoneProgress::Report(report) => {
1666 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1667 self.on_lsp_work_progress(
1668 server_id,
1669 token.clone(),
1670 LanguageServerProgress {
1671 message: report.message.clone(),
1672 percentage: report.percentage.map(|p| p as usize),
1673 last_update_at: Instant::now(),
1674 },
1675 cx,
1676 );
1677 self.broadcast_language_server_update(
1678 server_id,
1679 proto::update_language_server::Variant::WorkProgress(
1680 proto::LspWorkProgress {
1681 token,
1682 message: report.message,
1683 percentage: report.percentage.map(|p| p as u32),
1684 },
1685 ),
1686 );
1687 }
1688 }
1689 lsp::WorkDoneProgress::End(_) => {
1690 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1691 language_server_status.pending_diagnostic_updates -= 1;
1692 if language_server_status.pending_diagnostic_updates == 0 {
1693 self.disk_based_diagnostics_finished(cx);
1694 self.broadcast_language_server_update(
1695 server_id,
1696 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1697 proto::LspDiskBasedDiagnosticsUpdated {},
1698 ),
1699 );
1700 }
1701 } else {
1702 self.on_lsp_work_end(server_id, token.clone(), cx);
1703 self.broadcast_language_server_update(
1704 server_id,
1705 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1706 token,
1707 }),
1708 );
1709 }
1710 }
1711 }
1712 }
1713
1714 fn on_lsp_work_start(
1715 &mut self,
1716 language_server_id: usize,
1717 token: String,
1718 cx: &mut ModelContext<Self>,
1719 ) {
1720 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1721 status.pending_work.insert(
1722 token,
1723 LanguageServerProgress {
1724 message: None,
1725 percentage: None,
1726 last_update_at: Instant::now(),
1727 },
1728 );
1729 cx.notify();
1730 }
1731 }
1732
1733 fn on_lsp_work_progress(
1734 &mut self,
1735 language_server_id: usize,
1736 token: String,
1737 progress: LanguageServerProgress,
1738 cx: &mut ModelContext<Self>,
1739 ) {
1740 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1741 status.pending_work.insert(token, progress);
1742 cx.notify();
1743 }
1744 }
1745
1746 fn on_lsp_work_end(
1747 &mut self,
1748 language_server_id: usize,
1749 token: String,
1750 cx: &mut ModelContext<Self>,
1751 ) {
1752 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1753 status.pending_work.remove(&token);
1754 cx.notify();
1755 }
1756 }
1757
1758 async fn on_lsp_workspace_edit(
1759 this: WeakModelHandle<Self>,
1760 params: lsp::ApplyWorkspaceEditParams,
1761 server_id: usize,
1762 adapter: Arc<dyn LspAdapter>,
1763 language_server: Arc<LanguageServer>,
1764 mut cx: AsyncAppContext,
1765 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1766 let this = this
1767 .upgrade(&cx)
1768 .ok_or_else(|| anyhow!("project project closed"))?;
1769 let transaction = Self::deserialize_workspace_edit(
1770 this.clone(),
1771 params.edit,
1772 true,
1773 adapter.clone(),
1774 language_server.clone(),
1775 &mut cx,
1776 )
1777 .await
1778 .log_err();
1779 this.update(&mut cx, |this, _| {
1780 if let Some(transaction) = transaction {
1781 this.last_workspace_edits_by_language_server
1782 .insert(server_id, transaction);
1783 }
1784 });
1785 Ok(lsp::ApplyWorkspaceEditResponse {
1786 applied: true,
1787 failed_change: None,
1788 failure_reason: None,
1789 })
1790 }
1791
1792 fn broadcast_language_server_update(
1793 &self,
1794 language_server_id: usize,
1795 event: proto::update_language_server::Variant,
1796 ) {
1797 if let Some(project_id) = self.remote_id() {
1798 self.client
1799 .send(proto::UpdateLanguageServer {
1800 project_id,
1801 language_server_id: language_server_id as u64,
1802 variant: Some(event),
1803 })
1804 .log_err();
1805 }
1806 }
1807
1808 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1809 for (_, server) in self.language_servers.values() {
1810 server
1811 .notify::<lsp::notification::DidChangeConfiguration>(
1812 lsp::DidChangeConfigurationParams {
1813 settings: settings.clone(),
1814 },
1815 )
1816 .ok();
1817 }
1818 *self.language_server_settings.lock() = settings;
1819 }
1820
1821 pub fn language_server_statuses(
1822 &self,
1823 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1824 self.language_server_statuses.values()
1825 }
1826
1827 pub fn update_diagnostics(
1828 &mut self,
1829 params: lsp::PublishDiagnosticsParams,
1830 disk_based_sources: &[&str],
1831 cx: &mut ModelContext<Self>,
1832 ) -> Result<()> {
1833 let abs_path = params
1834 .uri
1835 .to_file_path()
1836 .map_err(|_| anyhow!("URI is not a file"))?;
1837 let mut next_group_id = 0;
1838 let mut diagnostics = Vec::default();
1839 let mut primary_diagnostic_group_ids = HashMap::default();
1840 let mut sources_by_group_id = HashMap::default();
1841 let mut supporting_diagnostics = HashMap::default();
1842 for diagnostic in ¶ms.diagnostics {
1843 let source = diagnostic.source.as_ref();
1844 let code = diagnostic.code.as_ref().map(|code| match code {
1845 lsp::NumberOrString::Number(code) => code.to_string(),
1846 lsp::NumberOrString::String(code) => code.clone(),
1847 });
1848 let range = range_from_lsp(diagnostic.range);
1849 let is_supporting = diagnostic
1850 .related_information
1851 .as_ref()
1852 .map_or(false, |infos| {
1853 infos.iter().any(|info| {
1854 primary_diagnostic_group_ids.contains_key(&(
1855 source,
1856 code.clone(),
1857 range_from_lsp(info.location.range),
1858 ))
1859 })
1860 });
1861
1862 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1863 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1864 });
1865
1866 if is_supporting {
1867 supporting_diagnostics.insert(
1868 (source, code.clone(), range),
1869 (diagnostic.severity, is_unnecessary),
1870 );
1871 } else {
1872 let group_id = post_inc(&mut next_group_id);
1873 let is_disk_based = source.map_or(false, |source| {
1874 disk_based_sources.contains(&source.as_str())
1875 });
1876
1877 sources_by_group_id.insert(group_id, source);
1878 primary_diagnostic_group_ids
1879 .insert((source, code.clone(), range.clone()), group_id);
1880
1881 diagnostics.push(DiagnosticEntry {
1882 range,
1883 diagnostic: Diagnostic {
1884 code: code.clone(),
1885 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1886 message: diagnostic.message.clone(),
1887 group_id,
1888 is_primary: true,
1889 is_valid: true,
1890 is_disk_based,
1891 is_unnecessary,
1892 },
1893 });
1894 if let Some(infos) = &diagnostic.related_information {
1895 for info in infos {
1896 if info.location.uri == params.uri && !info.message.is_empty() {
1897 let range = range_from_lsp(info.location.range);
1898 diagnostics.push(DiagnosticEntry {
1899 range,
1900 diagnostic: Diagnostic {
1901 code: code.clone(),
1902 severity: DiagnosticSeverity::INFORMATION,
1903 message: info.message.clone(),
1904 group_id,
1905 is_primary: false,
1906 is_valid: true,
1907 is_disk_based,
1908 is_unnecessary: false,
1909 },
1910 });
1911 }
1912 }
1913 }
1914 }
1915 }
1916
1917 for entry in &mut diagnostics {
1918 let diagnostic = &mut entry.diagnostic;
1919 if !diagnostic.is_primary {
1920 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1921 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1922 source,
1923 diagnostic.code.clone(),
1924 entry.range.clone(),
1925 )) {
1926 if let Some(severity) = severity {
1927 diagnostic.severity = severity;
1928 }
1929 diagnostic.is_unnecessary = is_unnecessary;
1930 }
1931 }
1932 }
1933
1934 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1935 Ok(())
1936 }
1937
1938 pub fn update_diagnostic_entries(
1939 &mut self,
1940 abs_path: PathBuf,
1941 version: Option<i32>,
1942 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1943 cx: &mut ModelContext<Project>,
1944 ) -> Result<(), anyhow::Error> {
1945 let (worktree, relative_path) = self
1946 .find_local_worktree(&abs_path, cx)
1947 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1948 if !worktree.read(cx).is_visible() {
1949 return Ok(());
1950 }
1951
1952 let project_path = ProjectPath {
1953 worktree_id: worktree.read(cx).id(),
1954 path: relative_path.into(),
1955 };
1956
1957 for buffer in self.opened_buffers.values() {
1958 if let Some(buffer) = buffer.upgrade(cx) {
1959 if buffer
1960 .read(cx)
1961 .file()
1962 .map_or(false, |file| *file.path() == project_path.path)
1963 {
1964 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1965 break;
1966 }
1967 }
1968 }
1969 worktree.update(cx, |worktree, cx| {
1970 worktree
1971 .as_local_mut()
1972 .ok_or_else(|| anyhow!("not a local worktree"))?
1973 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1974 })?;
1975 cx.emit(Event::DiagnosticsUpdated(project_path));
1976 Ok(())
1977 }
1978
1979 fn update_buffer_diagnostics(
1980 &mut self,
1981 buffer: &ModelHandle<Buffer>,
1982 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1983 version: Option<i32>,
1984 cx: &mut ModelContext<Self>,
1985 ) -> Result<()> {
1986 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1987 Ordering::Equal
1988 .then_with(|| b.is_primary.cmp(&a.is_primary))
1989 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1990 .then_with(|| a.severity.cmp(&b.severity))
1991 .then_with(|| a.message.cmp(&b.message))
1992 }
1993
1994 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1995
1996 diagnostics.sort_unstable_by(|a, b| {
1997 Ordering::Equal
1998 .then_with(|| a.range.start.cmp(&b.range.start))
1999 .then_with(|| b.range.end.cmp(&a.range.end))
2000 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2001 });
2002
2003 let mut sanitized_diagnostics = Vec::new();
2004 let edits_since_save = Patch::new(
2005 snapshot
2006 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2007 .collect(),
2008 );
2009 for entry in diagnostics {
2010 let start;
2011 let end;
2012 if entry.diagnostic.is_disk_based {
2013 // Some diagnostics are based on files on disk instead of buffers'
2014 // current contents. Adjust these diagnostics' ranges to reflect
2015 // any unsaved edits.
2016 start = edits_since_save.old_to_new(entry.range.start);
2017 end = edits_since_save.old_to_new(entry.range.end);
2018 } else {
2019 start = entry.range.start;
2020 end = entry.range.end;
2021 }
2022
2023 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2024 ..snapshot.clip_point_utf16(end, Bias::Right);
2025
2026 // Expand empty ranges by one character
2027 if range.start == range.end {
2028 range.end.column += 1;
2029 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2030 if range.start == range.end && range.end.column > 0 {
2031 range.start.column -= 1;
2032 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2033 }
2034 }
2035
2036 sanitized_diagnostics.push(DiagnosticEntry {
2037 range,
2038 diagnostic: entry.diagnostic,
2039 });
2040 }
2041 drop(edits_since_save);
2042
2043 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2044 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2045 Ok(())
2046 }
2047
2048 pub fn reload_buffers(
2049 &self,
2050 buffers: HashSet<ModelHandle<Buffer>>,
2051 push_to_history: bool,
2052 cx: &mut ModelContext<Self>,
2053 ) -> Task<Result<ProjectTransaction>> {
2054 let mut local_buffers = Vec::new();
2055 let mut remote_buffers = None;
2056 for buffer_handle in buffers {
2057 let buffer = buffer_handle.read(cx);
2058 if buffer.is_dirty() {
2059 if let Some(file) = File::from_dyn(buffer.file()) {
2060 if file.is_local() {
2061 local_buffers.push(buffer_handle);
2062 } else {
2063 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2064 }
2065 }
2066 }
2067 }
2068
2069 let remote_buffers = self.remote_id().zip(remote_buffers);
2070 let client = self.client.clone();
2071
2072 cx.spawn(|this, mut cx| async move {
2073 let mut project_transaction = ProjectTransaction::default();
2074
2075 if let Some((project_id, remote_buffers)) = remote_buffers {
2076 let response = client
2077 .request(proto::ReloadBuffers {
2078 project_id,
2079 buffer_ids: remote_buffers
2080 .iter()
2081 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2082 .collect(),
2083 })
2084 .await?
2085 .transaction
2086 .ok_or_else(|| anyhow!("missing transaction"))?;
2087 project_transaction = this
2088 .update(&mut cx, |this, cx| {
2089 this.deserialize_project_transaction(response, push_to_history, cx)
2090 })
2091 .await?;
2092 }
2093
2094 for buffer in local_buffers {
2095 let transaction = buffer
2096 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2097 .await?;
2098 buffer.update(&mut cx, |buffer, cx| {
2099 if let Some(transaction) = transaction {
2100 if !push_to_history {
2101 buffer.forget_transaction(transaction.id);
2102 }
2103 project_transaction.0.insert(cx.handle(), transaction);
2104 }
2105 });
2106 }
2107
2108 Ok(project_transaction)
2109 })
2110 }
2111
2112 pub fn format(
2113 &self,
2114 buffers: HashSet<ModelHandle<Buffer>>,
2115 push_to_history: bool,
2116 cx: &mut ModelContext<Project>,
2117 ) -> Task<Result<ProjectTransaction>> {
2118 let mut local_buffers = Vec::new();
2119 let mut remote_buffers = None;
2120 for buffer_handle in buffers {
2121 let buffer = buffer_handle.read(cx);
2122 if let Some(file) = File::from_dyn(buffer.file()) {
2123 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2124 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2125 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2126 }
2127 } else {
2128 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2129 }
2130 } else {
2131 return Task::ready(Ok(Default::default()));
2132 }
2133 }
2134
2135 let remote_buffers = self.remote_id().zip(remote_buffers);
2136 let client = self.client.clone();
2137
2138 cx.spawn(|this, mut cx| async move {
2139 let mut project_transaction = ProjectTransaction::default();
2140
2141 if let Some((project_id, remote_buffers)) = remote_buffers {
2142 let response = client
2143 .request(proto::FormatBuffers {
2144 project_id,
2145 buffer_ids: remote_buffers
2146 .iter()
2147 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2148 .collect(),
2149 })
2150 .await?
2151 .transaction
2152 .ok_or_else(|| anyhow!("missing transaction"))?;
2153 project_transaction = this
2154 .update(&mut cx, |this, cx| {
2155 this.deserialize_project_transaction(response, push_to_history, cx)
2156 })
2157 .await?;
2158 }
2159
2160 for (buffer, buffer_abs_path, language_server) in local_buffers {
2161 let text_document = lsp::TextDocumentIdentifier::new(
2162 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2163 );
2164 let capabilities = &language_server.capabilities();
2165 let tab_size = cx.update(|cx| {
2166 let language_name = buffer.read(cx).language().map(|language| language.name());
2167 cx.global::<Settings>().tab_size(language_name.as_deref())
2168 });
2169 let lsp_edits = if capabilities
2170 .document_formatting_provider
2171 .as_ref()
2172 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2173 {
2174 language_server
2175 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2176 text_document,
2177 options: lsp::FormattingOptions {
2178 tab_size,
2179 insert_spaces: true,
2180 insert_final_newline: Some(true),
2181 ..Default::default()
2182 },
2183 work_done_progress_params: Default::default(),
2184 })
2185 .await?
2186 } else if capabilities
2187 .document_range_formatting_provider
2188 .as_ref()
2189 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2190 {
2191 let buffer_start = lsp::Position::new(0, 0);
2192 let buffer_end =
2193 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2194 language_server
2195 .request::<lsp::request::RangeFormatting>(
2196 lsp::DocumentRangeFormattingParams {
2197 text_document,
2198 range: lsp::Range::new(buffer_start, buffer_end),
2199 options: lsp::FormattingOptions {
2200 tab_size: 4,
2201 insert_spaces: true,
2202 insert_final_newline: Some(true),
2203 ..Default::default()
2204 },
2205 work_done_progress_params: Default::default(),
2206 },
2207 )
2208 .await?
2209 } else {
2210 continue;
2211 };
2212
2213 if let Some(lsp_edits) = lsp_edits {
2214 let edits = this
2215 .update(&mut cx, |this, cx| {
2216 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2217 })
2218 .await?;
2219 buffer.update(&mut cx, |buffer, cx| {
2220 buffer.finalize_last_transaction();
2221 buffer.start_transaction();
2222 for (range, text) in edits {
2223 buffer.edit([range], text, cx);
2224 }
2225 if buffer.end_transaction(cx).is_some() {
2226 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2227 if !push_to_history {
2228 buffer.forget_transaction(transaction.id);
2229 }
2230 project_transaction.0.insert(cx.handle(), transaction);
2231 }
2232 });
2233 }
2234 }
2235
2236 Ok(project_transaction)
2237 })
2238 }
2239
2240 pub fn definition<T: ToPointUtf16>(
2241 &self,
2242 buffer: &ModelHandle<Buffer>,
2243 position: T,
2244 cx: &mut ModelContext<Self>,
2245 ) -> Task<Result<Vec<Location>>> {
2246 let position = position.to_point_utf16(buffer.read(cx));
2247 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2248 }
2249
2250 pub fn references<T: ToPointUtf16>(
2251 &self,
2252 buffer: &ModelHandle<Buffer>,
2253 position: T,
2254 cx: &mut ModelContext<Self>,
2255 ) -> Task<Result<Vec<Location>>> {
2256 let position = position.to_point_utf16(buffer.read(cx));
2257 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2258 }
2259
2260 pub fn document_highlights<T: ToPointUtf16>(
2261 &self,
2262 buffer: &ModelHandle<Buffer>,
2263 position: T,
2264 cx: &mut ModelContext<Self>,
2265 ) -> Task<Result<Vec<DocumentHighlight>>> {
2266 let position = position.to_point_utf16(buffer.read(cx));
2267
2268 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2269 }
2270
2271 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2272 if self.is_local() {
2273 let mut language_servers = HashMap::default();
2274 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2275 if let Some(worktree) = self
2276 .worktree_for_id(*worktree_id, cx)
2277 .and_then(|worktree| worktree.read(cx).as_local())
2278 {
2279 language_servers
2280 .entry(Arc::as_ptr(language_server))
2281 .or_insert((
2282 lsp_adapter.clone(),
2283 language_server.clone(),
2284 *worktree_id,
2285 worktree.abs_path().clone(),
2286 ));
2287 }
2288 }
2289
2290 let mut requests = Vec::new();
2291 for (_, language_server, _, _) in language_servers.values() {
2292 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2293 lsp::WorkspaceSymbolParams {
2294 query: query.to_string(),
2295 ..Default::default()
2296 },
2297 ));
2298 }
2299
2300 cx.spawn_weak(|this, cx| async move {
2301 let responses = futures::future::try_join_all(requests).await?;
2302
2303 let mut symbols = Vec::new();
2304 if let Some(this) = this.upgrade(&cx) {
2305 this.read_with(&cx, |this, cx| {
2306 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2307 language_servers.into_values().zip(responses)
2308 {
2309 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2310 |lsp_symbol| {
2311 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2312 let mut worktree_id = source_worktree_id;
2313 let path;
2314 if let Some((worktree, rel_path)) =
2315 this.find_local_worktree(&abs_path, cx)
2316 {
2317 worktree_id = worktree.read(cx).id();
2318 path = rel_path;
2319 } else {
2320 path = relativize_path(&worktree_abs_path, &abs_path);
2321 }
2322
2323 let label = this
2324 .languages
2325 .select_language(&path)
2326 .and_then(|language| {
2327 language
2328 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2329 })
2330 .unwrap_or_else(|| {
2331 CodeLabel::plain(lsp_symbol.name.clone(), None)
2332 });
2333 let signature = this.symbol_signature(worktree_id, &path);
2334
2335 Some(Symbol {
2336 source_worktree_id,
2337 worktree_id,
2338 language_server_name: adapter.name(),
2339 name: lsp_symbol.name,
2340 kind: lsp_symbol.kind,
2341 label,
2342 path,
2343 range: range_from_lsp(lsp_symbol.location.range),
2344 signature,
2345 })
2346 },
2347 ));
2348 }
2349 })
2350 }
2351
2352 Ok(symbols)
2353 })
2354 } else if let Some(project_id) = self.remote_id() {
2355 let request = self.client.request(proto::GetProjectSymbols {
2356 project_id,
2357 query: query.to_string(),
2358 });
2359 cx.spawn_weak(|this, cx| async move {
2360 let response = request.await?;
2361 let mut symbols = Vec::new();
2362 if let Some(this) = this.upgrade(&cx) {
2363 this.read_with(&cx, |this, _| {
2364 symbols.extend(
2365 response
2366 .symbols
2367 .into_iter()
2368 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2369 );
2370 })
2371 }
2372 Ok(symbols)
2373 })
2374 } else {
2375 Task::ready(Ok(Default::default()))
2376 }
2377 }
2378
2379 pub fn open_buffer_for_symbol(
2380 &mut self,
2381 symbol: &Symbol,
2382 cx: &mut ModelContext<Self>,
2383 ) -> Task<Result<ModelHandle<Buffer>>> {
2384 if self.is_local() {
2385 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2386 symbol.source_worktree_id,
2387 symbol.language_server_name.clone(),
2388 )) {
2389 server.clone()
2390 } else {
2391 return Task::ready(Err(anyhow!(
2392 "language server for worktree and language not found"
2393 )));
2394 };
2395
2396 let worktree_abs_path = if let Some(worktree_abs_path) = self
2397 .worktree_for_id(symbol.worktree_id, cx)
2398 .and_then(|worktree| worktree.read(cx).as_local())
2399 .map(|local_worktree| local_worktree.abs_path())
2400 {
2401 worktree_abs_path
2402 } else {
2403 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2404 };
2405 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2406 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2407 uri
2408 } else {
2409 return Task::ready(Err(anyhow!("invalid symbol path")));
2410 };
2411
2412 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2413 } else if let Some(project_id) = self.remote_id() {
2414 let request = self.client.request(proto::OpenBufferForSymbol {
2415 project_id,
2416 symbol: Some(serialize_symbol(symbol)),
2417 });
2418 cx.spawn(|this, mut cx| async move {
2419 let response = request.await?;
2420 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2421 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2422 .await
2423 })
2424 } else {
2425 Task::ready(Err(anyhow!("project does not have a remote id")))
2426 }
2427 }
2428
2429 pub fn completions<T: ToPointUtf16>(
2430 &self,
2431 source_buffer_handle: &ModelHandle<Buffer>,
2432 position: T,
2433 cx: &mut ModelContext<Self>,
2434 ) -> Task<Result<Vec<Completion>>> {
2435 let source_buffer_handle = source_buffer_handle.clone();
2436 let source_buffer = source_buffer_handle.read(cx);
2437 let buffer_id = source_buffer.remote_id();
2438 let language = source_buffer.language().cloned();
2439 let worktree;
2440 let buffer_abs_path;
2441 if let Some(file) = File::from_dyn(source_buffer.file()) {
2442 worktree = file.worktree.clone();
2443 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2444 } else {
2445 return Task::ready(Ok(Default::default()));
2446 };
2447
2448 let position = position.to_point_utf16(source_buffer);
2449 let anchor = source_buffer.anchor_after(position);
2450
2451 if worktree.read(cx).as_local().is_some() {
2452 let buffer_abs_path = buffer_abs_path.unwrap();
2453 let (_, lang_server) =
2454 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2455 server.clone()
2456 } else {
2457 return Task::ready(Ok(Default::default()));
2458 };
2459
2460 cx.spawn(|_, cx| async move {
2461 let completions = lang_server
2462 .request::<lsp::request::Completion>(lsp::CompletionParams {
2463 text_document_position: lsp::TextDocumentPositionParams::new(
2464 lsp::TextDocumentIdentifier::new(
2465 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2466 ),
2467 point_to_lsp(position),
2468 ),
2469 context: Default::default(),
2470 work_done_progress_params: Default::default(),
2471 partial_result_params: Default::default(),
2472 })
2473 .await
2474 .context("lsp completion request failed")?;
2475
2476 let completions = if let Some(completions) = completions {
2477 match completions {
2478 lsp::CompletionResponse::Array(completions) => completions,
2479 lsp::CompletionResponse::List(list) => list.items,
2480 }
2481 } else {
2482 Default::default()
2483 };
2484
2485 source_buffer_handle.read_with(&cx, |this, _| {
2486 Ok(completions
2487 .into_iter()
2488 .filter_map(|lsp_completion| {
2489 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2490 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2491 (range_from_lsp(edit.range), edit.new_text.clone())
2492 }
2493 None => {
2494 let clipped_position =
2495 this.clip_point_utf16(position, Bias::Left);
2496 if position != clipped_position {
2497 log::info!("completion out of expected range");
2498 return None;
2499 }
2500 (
2501 this.common_prefix_at(
2502 clipped_position,
2503 &lsp_completion.label,
2504 ),
2505 lsp_completion.label.clone(),
2506 )
2507 }
2508 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2509 log::info!("unsupported insert/replace completion");
2510 return None;
2511 }
2512 };
2513
2514 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2515 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2516 if clipped_start == old_range.start && clipped_end == old_range.end {
2517 Some(Completion {
2518 old_range: this.anchor_before(old_range.start)
2519 ..this.anchor_after(old_range.end),
2520 new_text,
2521 label: language
2522 .as_ref()
2523 .and_then(|l| l.label_for_completion(&lsp_completion))
2524 .unwrap_or_else(|| {
2525 CodeLabel::plain(
2526 lsp_completion.label.clone(),
2527 lsp_completion.filter_text.as_deref(),
2528 )
2529 }),
2530 lsp_completion,
2531 })
2532 } else {
2533 log::info!("completion out of expected range");
2534 None
2535 }
2536 })
2537 .collect())
2538 })
2539 })
2540 } else if let Some(project_id) = self.remote_id() {
2541 let rpc = self.client.clone();
2542 let message = proto::GetCompletions {
2543 project_id,
2544 buffer_id,
2545 position: Some(language::proto::serialize_anchor(&anchor)),
2546 version: serialize_version(&source_buffer.version()),
2547 };
2548 cx.spawn_weak(|_, mut cx| async move {
2549 let response = rpc.request(message).await?;
2550
2551 source_buffer_handle
2552 .update(&mut cx, |buffer, _| {
2553 buffer.wait_for_version(deserialize_version(response.version))
2554 })
2555 .await;
2556
2557 response
2558 .completions
2559 .into_iter()
2560 .map(|completion| {
2561 language::proto::deserialize_completion(completion, language.as_ref())
2562 })
2563 .collect()
2564 })
2565 } else {
2566 Task::ready(Ok(Default::default()))
2567 }
2568 }
2569
2570 pub fn apply_additional_edits_for_completion(
2571 &self,
2572 buffer_handle: ModelHandle<Buffer>,
2573 completion: Completion,
2574 push_to_history: bool,
2575 cx: &mut ModelContext<Self>,
2576 ) -> Task<Result<Option<Transaction>>> {
2577 let buffer = buffer_handle.read(cx);
2578 let buffer_id = buffer.remote_id();
2579
2580 if self.is_local() {
2581 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2582 {
2583 server.clone()
2584 } else {
2585 return Task::ready(Ok(Default::default()));
2586 };
2587
2588 cx.spawn(|this, mut cx| async move {
2589 let resolved_completion = lang_server
2590 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2591 .await?;
2592 if let Some(edits) = resolved_completion.additional_text_edits {
2593 let edits = this
2594 .update(&mut cx, |this, cx| {
2595 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2596 })
2597 .await?;
2598 buffer_handle.update(&mut cx, |buffer, cx| {
2599 buffer.finalize_last_transaction();
2600 buffer.start_transaction();
2601 for (range, text) in edits {
2602 buffer.edit([range], text, cx);
2603 }
2604 let transaction = if buffer.end_transaction(cx).is_some() {
2605 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2606 if !push_to_history {
2607 buffer.forget_transaction(transaction.id);
2608 }
2609 Some(transaction)
2610 } else {
2611 None
2612 };
2613 Ok(transaction)
2614 })
2615 } else {
2616 Ok(None)
2617 }
2618 })
2619 } else if let Some(project_id) = self.remote_id() {
2620 let client = self.client.clone();
2621 cx.spawn(|_, mut cx| async move {
2622 let response = client
2623 .request(proto::ApplyCompletionAdditionalEdits {
2624 project_id,
2625 buffer_id,
2626 completion: Some(language::proto::serialize_completion(&completion)),
2627 })
2628 .await?;
2629
2630 if let Some(transaction) = response.transaction {
2631 let transaction = language::proto::deserialize_transaction(transaction)?;
2632 buffer_handle
2633 .update(&mut cx, |buffer, _| {
2634 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2635 })
2636 .await;
2637 if push_to_history {
2638 buffer_handle.update(&mut cx, |buffer, _| {
2639 buffer.push_transaction(transaction.clone(), Instant::now());
2640 });
2641 }
2642 Ok(Some(transaction))
2643 } else {
2644 Ok(None)
2645 }
2646 })
2647 } else {
2648 Task::ready(Err(anyhow!("project does not have a remote id")))
2649 }
2650 }
2651
2652 pub fn code_actions<T: Clone + ToOffset>(
2653 &self,
2654 buffer_handle: &ModelHandle<Buffer>,
2655 range: Range<T>,
2656 cx: &mut ModelContext<Self>,
2657 ) -> Task<Result<Vec<CodeAction>>> {
2658 let buffer_handle = buffer_handle.clone();
2659 let buffer = buffer_handle.read(cx);
2660 let snapshot = buffer.snapshot();
2661 let relevant_diagnostics = snapshot
2662 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2663 .map(|entry| entry.to_lsp_diagnostic_stub())
2664 .collect();
2665 let buffer_id = buffer.remote_id();
2666 let worktree;
2667 let buffer_abs_path;
2668 if let Some(file) = File::from_dyn(buffer.file()) {
2669 worktree = file.worktree.clone();
2670 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2671 } else {
2672 return Task::ready(Ok(Default::default()));
2673 };
2674 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2675
2676 if worktree.read(cx).as_local().is_some() {
2677 let buffer_abs_path = buffer_abs_path.unwrap();
2678 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2679 {
2680 server.clone()
2681 } else {
2682 return Task::ready(Ok(Default::default()));
2683 };
2684
2685 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2686 cx.foreground().spawn(async move {
2687 if !lang_server.capabilities().code_action_provider.is_some() {
2688 return Ok(Default::default());
2689 }
2690
2691 Ok(lang_server
2692 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2693 text_document: lsp::TextDocumentIdentifier::new(
2694 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2695 ),
2696 range: lsp_range,
2697 work_done_progress_params: Default::default(),
2698 partial_result_params: Default::default(),
2699 context: lsp::CodeActionContext {
2700 diagnostics: relevant_diagnostics,
2701 only: Some(vec![
2702 lsp::CodeActionKind::QUICKFIX,
2703 lsp::CodeActionKind::REFACTOR,
2704 lsp::CodeActionKind::REFACTOR_EXTRACT,
2705 lsp::CodeActionKind::SOURCE,
2706 ]),
2707 },
2708 })
2709 .await?
2710 .unwrap_or_default()
2711 .into_iter()
2712 .filter_map(|entry| {
2713 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2714 Some(CodeAction {
2715 range: range.clone(),
2716 lsp_action,
2717 })
2718 } else {
2719 None
2720 }
2721 })
2722 .collect())
2723 })
2724 } else if let Some(project_id) = self.remote_id() {
2725 let rpc = self.client.clone();
2726 let version = buffer.version();
2727 cx.spawn_weak(|_, mut cx| async move {
2728 let response = rpc
2729 .request(proto::GetCodeActions {
2730 project_id,
2731 buffer_id,
2732 start: Some(language::proto::serialize_anchor(&range.start)),
2733 end: Some(language::proto::serialize_anchor(&range.end)),
2734 version: serialize_version(&version),
2735 })
2736 .await?;
2737
2738 buffer_handle
2739 .update(&mut cx, |buffer, _| {
2740 buffer.wait_for_version(deserialize_version(response.version))
2741 })
2742 .await;
2743
2744 response
2745 .actions
2746 .into_iter()
2747 .map(language::proto::deserialize_code_action)
2748 .collect()
2749 })
2750 } else {
2751 Task::ready(Ok(Default::default()))
2752 }
2753 }
2754
2755 pub fn apply_code_action(
2756 &self,
2757 buffer_handle: ModelHandle<Buffer>,
2758 mut action: CodeAction,
2759 push_to_history: bool,
2760 cx: &mut ModelContext<Self>,
2761 ) -> Task<Result<ProjectTransaction>> {
2762 if self.is_local() {
2763 let buffer = buffer_handle.read(cx);
2764 let (lsp_adapter, lang_server) =
2765 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2766 server.clone()
2767 } else {
2768 return Task::ready(Ok(Default::default()));
2769 };
2770 let range = action.range.to_point_utf16(buffer);
2771
2772 cx.spawn(|this, mut cx| async move {
2773 if let Some(lsp_range) = action
2774 .lsp_action
2775 .data
2776 .as_mut()
2777 .and_then(|d| d.get_mut("codeActionParams"))
2778 .and_then(|d| d.get_mut("range"))
2779 {
2780 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2781 action.lsp_action = lang_server
2782 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2783 .await?;
2784 } else {
2785 let actions = this
2786 .update(&mut cx, |this, cx| {
2787 this.code_actions(&buffer_handle, action.range, cx)
2788 })
2789 .await?;
2790 action.lsp_action = actions
2791 .into_iter()
2792 .find(|a| a.lsp_action.title == action.lsp_action.title)
2793 .ok_or_else(|| anyhow!("code action is outdated"))?
2794 .lsp_action;
2795 }
2796
2797 if let Some(edit) = action.lsp_action.edit {
2798 Self::deserialize_workspace_edit(
2799 this,
2800 edit,
2801 push_to_history,
2802 lsp_adapter,
2803 lang_server,
2804 &mut cx,
2805 )
2806 .await
2807 } else if let Some(command) = action.lsp_action.command {
2808 this.update(&mut cx, |this, _| {
2809 this.last_workspace_edits_by_language_server
2810 .remove(&lang_server.server_id());
2811 });
2812 lang_server
2813 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2814 command: command.command,
2815 arguments: command.arguments.unwrap_or_default(),
2816 ..Default::default()
2817 })
2818 .await?;
2819 Ok(this.update(&mut cx, |this, _| {
2820 this.last_workspace_edits_by_language_server
2821 .remove(&lang_server.server_id())
2822 .unwrap_or_default()
2823 }))
2824 } else {
2825 Ok(ProjectTransaction::default())
2826 }
2827 })
2828 } else if let Some(project_id) = self.remote_id() {
2829 let client = self.client.clone();
2830 let request = proto::ApplyCodeAction {
2831 project_id,
2832 buffer_id: buffer_handle.read(cx).remote_id(),
2833 action: Some(language::proto::serialize_code_action(&action)),
2834 };
2835 cx.spawn(|this, mut cx| async move {
2836 let response = client
2837 .request(request)
2838 .await?
2839 .transaction
2840 .ok_or_else(|| anyhow!("missing transaction"))?;
2841 this.update(&mut cx, |this, cx| {
2842 this.deserialize_project_transaction(response, push_to_history, cx)
2843 })
2844 .await
2845 })
2846 } else {
2847 Task::ready(Err(anyhow!("project does not have a remote id")))
2848 }
2849 }
2850
2851 async fn deserialize_workspace_edit(
2852 this: ModelHandle<Self>,
2853 edit: lsp::WorkspaceEdit,
2854 push_to_history: bool,
2855 lsp_adapter: Arc<dyn LspAdapter>,
2856 language_server: Arc<LanguageServer>,
2857 cx: &mut AsyncAppContext,
2858 ) -> Result<ProjectTransaction> {
2859 let fs = this.read_with(cx, |this, _| this.fs.clone());
2860 let mut operations = Vec::new();
2861 if let Some(document_changes) = edit.document_changes {
2862 match document_changes {
2863 lsp::DocumentChanges::Edits(edits) => {
2864 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2865 }
2866 lsp::DocumentChanges::Operations(ops) => operations = ops,
2867 }
2868 } else if let Some(changes) = edit.changes {
2869 operations.extend(changes.into_iter().map(|(uri, edits)| {
2870 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2871 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2872 uri,
2873 version: None,
2874 },
2875 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2876 })
2877 }));
2878 }
2879
2880 let mut project_transaction = ProjectTransaction::default();
2881 for operation in operations {
2882 match operation {
2883 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2884 let abs_path = op
2885 .uri
2886 .to_file_path()
2887 .map_err(|_| anyhow!("can't convert URI to path"))?;
2888
2889 if let Some(parent_path) = abs_path.parent() {
2890 fs.create_dir(parent_path).await?;
2891 }
2892 if abs_path.ends_with("/") {
2893 fs.create_dir(&abs_path).await?;
2894 } else {
2895 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2896 .await?;
2897 }
2898 }
2899 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2900 let source_abs_path = op
2901 .old_uri
2902 .to_file_path()
2903 .map_err(|_| anyhow!("can't convert URI to path"))?;
2904 let target_abs_path = op
2905 .new_uri
2906 .to_file_path()
2907 .map_err(|_| anyhow!("can't convert URI to path"))?;
2908 fs.rename(
2909 &source_abs_path,
2910 &target_abs_path,
2911 op.options.map(Into::into).unwrap_or_default(),
2912 )
2913 .await?;
2914 }
2915 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2916 let abs_path = op
2917 .uri
2918 .to_file_path()
2919 .map_err(|_| anyhow!("can't convert URI to path"))?;
2920 let options = op.options.map(Into::into).unwrap_or_default();
2921 if abs_path.ends_with("/") {
2922 fs.remove_dir(&abs_path, options).await?;
2923 } else {
2924 fs.remove_file(&abs_path, options).await?;
2925 }
2926 }
2927 lsp::DocumentChangeOperation::Edit(op) => {
2928 let buffer_to_edit = this
2929 .update(cx, |this, cx| {
2930 this.open_local_buffer_via_lsp(
2931 op.text_document.uri,
2932 lsp_adapter.clone(),
2933 language_server.clone(),
2934 cx,
2935 )
2936 })
2937 .await?;
2938
2939 let edits = this
2940 .update(cx, |this, cx| {
2941 let edits = op.edits.into_iter().map(|edit| match edit {
2942 lsp::OneOf::Left(edit) => edit,
2943 lsp::OneOf::Right(edit) => edit.text_edit,
2944 });
2945 this.edits_from_lsp(
2946 &buffer_to_edit,
2947 edits,
2948 op.text_document.version,
2949 cx,
2950 )
2951 })
2952 .await?;
2953
2954 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2955 buffer.finalize_last_transaction();
2956 buffer.start_transaction();
2957 for (range, text) in edits {
2958 buffer.edit([range], text, cx);
2959 }
2960 let transaction = if buffer.end_transaction(cx).is_some() {
2961 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2962 if !push_to_history {
2963 buffer.forget_transaction(transaction.id);
2964 }
2965 Some(transaction)
2966 } else {
2967 None
2968 };
2969
2970 transaction
2971 });
2972 if let Some(transaction) = transaction {
2973 project_transaction.0.insert(buffer_to_edit, transaction);
2974 }
2975 }
2976 }
2977 }
2978
2979 Ok(project_transaction)
2980 }
2981
2982 pub fn prepare_rename<T: ToPointUtf16>(
2983 &self,
2984 buffer: ModelHandle<Buffer>,
2985 position: T,
2986 cx: &mut ModelContext<Self>,
2987 ) -> Task<Result<Option<Range<Anchor>>>> {
2988 let position = position.to_point_utf16(buffer.read(cx));
2989 self.request_lsp(buffer, PrepareRename { position }, cx)
2990 }
2991
2992 pub fn perform_rename<T: ToPointUtf16>(
2993 &self,
2994 buffer: ModelHandle<Buffer>,
2995 position: T,
2996 new_name: String,
2997 push_to_history: bool,
2998 cx: &mut ModelContext<Self>,
2999 ) -> Task<Result<ProjectTransaction>> {
3000 let position = position.to_point_utf16(buffer.read(cx));
3001 self.request_lsp(
3002 buffer,
3003 PerformRename {
3004 position,
3005 new_name,
3006 push_to_history,
3007 },
3008 cx,
3009 )
3010 }
3011
3012 pub fn search(
3013 &self,
3014 query: SearchQuery,
3015 cx: &mut ModelContext<Self>,
3016 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3017 if self.is_local() {
3018 let snapshots = self
3019 .visible_worktrees(cx)
3020 .filter_map(|tree| {
3021 let tree = tree.read(cx).as_local()?;
3022 Some(tree.snapshot())
3023 })
3024 .collect::<Vec<_>>();
3025
3026 let background = cx.background().clone();
3027 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3028 if path_count == 0 {
3029 return Task::ready(Ok(Default::default()));
3030 }
3031 let workers = background.num_cpus().min(path_count);
3032 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3033 cx.background()
3034 .spawn({
3035 let fs = self.fs.clone();
3036 let background = cx.background().clone();
3037 let query = query.clone();
3038 async move {
3039 let fs = &fs;
3040 let query = &query;
3041 let matching_paths_tx = &matching_paths_tx;
3042 let paths_per_worker = (path_count + workers - 1) / workers;
3043 let snapshots = &snapshots;
3044 background
3045 .scoped(|scope| {
3046 for worker_ix in 0..workers {
3047 let worker_start_ix = worker_ix * paths_per_worker;
3048 let worker_end_ix = worker_start_ix + paths_per_worker;
3049 scope.spawn(async move {
3050 let mut snapshot_start_ix = 0;
3051 let mut abs_path = PathBuf::new();
3052 for snapshot in snapshots {
3053 let snapshot_end_ix =
3054 snapshot_start_ix + snapshot.visible_file_count();
3055 if worker_end_ix <= snapshot_start_ix {
3056 break;
3057 } else if worker_start_ix > snapshot_end_ix {
3058 snapshot_start_ix = snapshot_end_ix;
3059 continue;
3060 } else {
3061 let start_in_snapshot = worker_start_ix
3062 .saturating_sub(snapshot_start_ix);
3063 let end_in_snapshot =
3064 cmp::min(worker_end_ix, snapshot_end_ix)
3065 - snapshot_start_ix;
3066
3067 for entry in snapshot
3068 .files(false, start_in_snapshot)
3069 .take(end_in_snapshot - start_in_snapshot)
3070 {
3071 if matching_paths_tx.is_closed() {
3072 break;
3073 }
3074
3075 abs_path.clear();
3076 abs_path.push(&snapshot.abs_path());
3077 abs_path.push(&entry.path);
3078 let matches = if let Some(file) =
3079 fs.open_sync(&abs_path).await.log_err()
3080 {
3081 query.detect(file).unwrap_or(false)
3082 } else {
3083 false
3084 };
3085
3086 if matches {
3087 let project_path =
3088 (snapshot.id(), entry.path.clone());
3089 if matching_paths_tx
3090 .send(project_path)
3091 .await
3092 .is_err()
3093 {
3094 break;
3095 }
3096 }
3097 }
3098
3099 snapshot_start_ix = snapshot_end_ix;
3100 }
3101 }
3102 });
3103 }
3104 })
3105 .await;
3106 }
3107 })
3108 .detach();
3109
3110 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3111 let open_buffers = self
3112 .opened_buffers
3113 .values()
3114 .filter_map(|b| b.upgrade(cx))
3115 .collect::<HashSet<_>>();
3116 cx.spawn(|this, cx| async move {
3117 for buffer in &open_buffers {
3118 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3119 buffers_tx.send((buffer.clone(), snapshot)).await?;
3120 }
3121
3122 let open_buffers = Rc::new(RefCell::new(open_buffers));
3123 while let Some(project_path) = matching_paths_rx.next().await {
3124 if buffers_tx.is_closed() {
3125 break;
3126 }
3127
3128 let this = this.clone();
3129 let open_buffers = open_buffers.clone();
3130 let buffers_tx = buffers_tx.clone();
3131 cx.spawn(|mut cx| async move {
3132 if let Some(buffer) = this
3133 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3134 .await
3135 .log_err()
3136 {
3137 if open_buffers.borrow_mut().insert(buffer.clone()) {
3138 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3139 buffers_tx.send((buffer, snapshot)).await?;
3140 }
3141 }
3142
3143 Ok::<_, anyhow::Error>(())
3144 })
3145 .detach();
3146 }
3147
3148 Ok::<_, anyhow::Error>(())
3149 })
3150 .detach_and_log_err(cx);
3151
3152 let background = cx.background().clone();
3153 cx.background().spawn(async move {
3154 let query = &query;
3155 let mut matched_buffers = Vec::new();
3156 for _ in 0..workers {
3157 matched_buffers.push(HashMap::default());
3158 }
3159 background
3160 .scoped(|scope| {
3161 for worker_matched_buffers in matched_buffers.iter_mut() {
3162 let mut buffers_rx = buffers_rx.clone();
3163 scope.spawn(async move {
3164 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3165 let buffer_matches = query
3166 .search(snapshot.as_rope())
3167 .await
3168 .iter()
3169 .map(|range| {
3170 snapshot.anchor_before(range.start)
3171 ..snapshot.anchor_after(range.end)
3172 })
3173 .collect::<Vec<_>>();
3174 if !buffer_matches.is_empty() {
3175 worker_matched_buffers
3176 .insert(buffer.clone(), buffer_matches);
3177 }
3178 }
3179 });
3180 }
3181 })
3182 .await;
3183 Ok(matched_buffers.into_iter().flatten().collect())
3184 })
3185 } else if let Some(project_id) = self.remote_id() {
3186 let request = self.client.request(query.to_proto(project_id));
3187 cx.spawn(|this, mut cx| async move {
3188 let response = request.await?;
3189 let mut result = HashMap::default();
3190 for location in response.locations {
3191 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3192 let target_buffer = this
3193 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3194 .await?;
3195 let start = location
3196 .start
3197 .and_then(deserialize_anchor)
3198 .ok_or_else(|| anyhow!("missing target start"))?;
3199 let end = location
3200 .end
3201 .and_then(deserialize_anchor)
3202 .ok_or_else(|| anyhow!("missing target end"))?;
3203 result
3204 .entry(target_buffer)
3205 .or_insert(Vec::new())
3206 .push(start..end)
3207 }
3208 Ok(result)
3209 })
3210 } else {
3211 Task::ready(Ok(Default::default()))
3212 }
3213 }
3214
3215 fn request_lsp<R: LspCommand>(
3216 &self,
3217 buffer_handle: ModelHandle<Buffer>,
3218 request: R,
3219 cx: &mut ModelContext<Self>,
3220 ) -> Task<Result<R::Response>>
3221 where
3222 <R::LspRequest as lsp::request::Request>::Result: Send,
3223 {
3224 let buffer = buffer_handle.read(cx);
3225 if self.is_local() {
3226 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3227 if let Some((file, (_, language_server))) =
3228 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3229 {
3230 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3231 return cx.spawn(|this, cx| async move {
3232 if !request.check_capabilities(&language_server.capabilities()) {
3233 return Ok(Default::default());
3234 }
3235
3236 let response = language_server
3237 .request::<R::LspRequest>(lsp_params)
3238 .await
3239 .context("lsp request failed")?;
3240 request
3241 .response_from_lsp(response, this, buffer_handle, cx)
3242 .await
3243 });
3244 }
3245 } else if let Some(project_id) = self.remote_id() {
3246 let rpc = self.client.clone();
3247 let message = request.to_proto(project_id, buffer);
3248 return cx.spawn(|this, cx| async move {
3249 let response = rpc.request(message).await?;
3250 request
3251 .response_from_proto(response, this, buffer_handle, cx)
3252 .await
3253 });
3254 }
3255 Task::ready(Ok(Default::default()))
3256 }
3257
3258 pub fn find_or_create_local_worktree(
3259 &mut self,
3260 abs_path: impl AsRef<Path>,
3261 visible: bool,
3262 cx: &mut ModelContext<Self>,
3263 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3264 let abs_path = abs_path.as_ref();
3265 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3266 Task::ready(Ok((tree.clone(), relative_path.into())))
3267 } else {
3268 let worktree = self.create_local_worktree(abs_path, visible, cx);
3269 cx.foreground()
3270 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3271 }
3272 }
3273
3274 pub fn find_local_worktree(
3275 &self,
3276 abs_path: &Path,
3277 cx: &AppContext,
3278 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3279 for tree in self.worktrees(cx) {
3280 if let Some(relative_path) = tree
3281 .read(cx)
3282 .as_local()
3283 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3284 {
3285 return Some((tree.clone(), relative_path.into()));
3286 }
3287 }
3288 None
3289 }
3290
3291 pub fn is_shared(&self) -> bool {
3292 match &self.client_state {
3293 ProjectClientState::Local { is_shared, .. } => *is_shared,
3294 ProjectClientState::Remote { .. } => false,
3295 }
3296 }
3297
3298 fn create_local_worktree(
3299 &mut self,
3300 abs_path: impl AsRef<Path>,
3301 visible: bool,
3302 cx: &mut ModelContext<Self>,
3303 ) -> Task<Result<ModelHandle<Worktree>>> {
3304 let fs = self.fs.clone();
3305 let client = self.client.clone();
3306 let next_entry_id = self.next_entry_id.clone();
3307 let path: Arc<Path> = abs_path.as_ref().into();
3308 let task = self
3309 .loading_local_worktrees
3310 .entry(path.clone())
3311 .or_insert_with(|| {
3312 cx.spawn(|project, mut cx| {
3313 async move {
3314 let worktree = Worktree::local(
3315 client.clone(),
3316 path.clone(),
3317 visible,
3318 fs,
3319 next_entry_id,
3320 &mut cx,
3321 )
3322 .await;
3323 project.update(&mut cx, |project, _| {
3324 project.loading_local_worktrees.remove(&path);
3325 });
3326 let worktree = worktree?;
3327
3328 let (remote_project_id, is_shared) =
3329 project.update(&mut cx, |project, cx| {
3330 project.add_worktree(&worktree, cx);
3331 (project.remote_id(), project.is_shared())
3332 });
3333
3334 if let Some(project_id) = remote_project_id {
3335 if is_shared {
3336 worktree
3337 .update(&mut cx, |worktree, cx| {
3338 worktree.as_local_mut().unwrap().share(project_id, cx)
3339 })
3340 .await?;
3341 } else {
3342 worktree
3343 .update(&mut cx, |worktree, cx| {
3344 worktree.as_local_mut().unwrap().register(project_id, cx)
3345 })
3346 .await?;
3347 }
3348 }
3349
3350 Ok(worktree)
3351 }
3352 .map_err(|err| Arc::new(err))
3353 })
3354 .shared()
3355 })
3356 .clone();
3357 cx.foreground().spawn(async move {
3358 match task.await {
3359 Ok(worktree) => Ok(worktree),
3360 Err(err) => Err(anyhow!("{}", err)),
3361 }
3362 })
3363 }
3364
3365 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3366 self.worktrees.retain(|worktree| {
3367 worktree
3368 .upgrade(cx)
3369 .map_or(false, |w| w.read(cx).id() != id)
3370 });
3371 cx.notify();
3372 }
3373
3374 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3375 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3376 if worktree.read(cx).is_local() {
3377 cx.subscribe(&worktree, |this, worktree, _, cx| {
3378 this.update_local_worktree_buffers(worktree, cx);
3379 })
3380 .detach();
3381 }
3382
3383 let push_strong_handle = {
3384 let worktree = worktree.read(cx);
3385 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3386 };
3387 if push_strong_handle {
3388 self.worktrees
3389 .push(WorktreeHandle::Strong(worktree.clone()));
3390 } else {
3391 cx.observe_release(&worktree, |this, _, cx| {
3392 this.worktrees
3393 .retain(|worktree| worktree.upgrade(cx).is_some());
3394 cx.notify();
3395 })
3396 .detach();
3397 self.worktrees
3398 .push(WorktreeHandle::Weak(worktree.downgrade()));
3399 }
3400 cx.notify();
3401 }
3402
3403 fn update_local_worktree_buffers(
3404 &mut self,
3405 worktree_handle: ModelHandle<Worktree>,
3406 cx: &mut ModelContext<Self>,
3407 ) {
3408 let snapshot = worktree_handle.read(cx).snapshot();
3409 let mut buffers_to_delete = Vec::new();
3410 let mut renamed_buffers = Vec::new();
3411 for (buffer_id, buffer) in &self.opened_buffers {
3412 if let Some(buffer) = buffer.upgrade(cx) {
3413 buffer.update(cx, |buffer, cx| {
3414 if let Some(old_file) = File::from_dyn(buffer.file()) {
3415 if old_file.worktree != worktree_handle {
3416 return;
3417 }
3418
3419 let new_file = if let Some(entry) = old_file
3420 .entry_id
3421 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3422 {
3423 File {
3424 is_local: true,
3425 entry_id: Some(entry.id),
3426 mtime: entry.mtime,
3427 path: entry.path.clone(),
3428 worktree: worktree_handle.clone(),
3429 }
3430 } else if let Some(entry) =
3431 snapshot.entry_for_path(old_file.path().as_ref())
3432 {
3433 File {
3434 is_local: true,
3435 entry_id: Some(entry.id),
3436 mtime: entry.mtime,
3437 path: entry.path.clone(),
3438 worktree: worktree_handle.clone(),
3439 }
3440 } else {
3441 File {
3442 is_local: true,
3443 entry_id: None,
3444 path: old_file.path().clone(),
3445 mtime: old_file.mtime(),
3446 worktree: worktree_handle.clone(),
3447 }
3448 };
3449
3450 let old_path = old_file.abs_path(cx);
3451 if new_file.abs_path(cx) != old_path {
3452 renamed_buffers.push((cx.handle(), old_path));
3453 }
3454
3455 if let Some(project_id) = self.remote_id() {
3456 self.client
3457 .send(proto::UpdateBufferFile {
3458 project_id,
3459 buffer_id: *buffer_id as u64,
3460 file: Some(new_file.to_proto()),
3461 })
3462 .log_err();
3463 }
3464 buffer.file_updated(Box::new(new_file), cx).detach();
3465 }
3466 });
3467 } else {
3468 buffers_to_delete.push(*buffer_id);
3469 }
3470 }
3471
3472 for buffer_id in buffers_to_delete {
3473 self.opened_buffers.remove(&buffer_id);
3474 }
3475
3476 for (buffer, old_path) in renamed_buffers {
3477 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3478 self.assign_language_to_buffer(&buffer, cx);
3479 self.register_buffer_with_language_server(&buffer, cx);
3480 }
3481 }
3482
3483 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3484 let new_active_entry = entry.and_then(|project_path| {
3485 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3486 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3487 Some(entry.id)
3488 });
3489 if new_active_entry != self.active_entry {
3490 self.active_entry = new_active_entry;
3491 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3492 }
3493 }
3494
3495 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3496 self.language_server_statuses
3497 .values()
3498 .any(|status| status.pending_diagnostic_updates > 0)
3499 }
3500
3501 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3502 let mut summary = DiagnosticSummary::default();
3503 for (_, path_summary) in self.diagnostic_summaries(cx) {
3504 summary.error_count += path_summary.error_count;
3505 summary.warning_count += path_summary.warning_count;
3506 summary.info_count += path_summary.info_count;
3507 summary.hint_count += path_summary.hint_count;
3508 }
3509 summary
3510 }
3511
3512 pub fn diagnostic_summaries<'a>(
3513 &'a self,
3514 cx: &'a AppContext,
3515 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3516 self.worktrees(cx).flat_map(move |worktree| {
3517 let worktree = worktree.read(cx);
3518 let worktree_id = worktree.id();
3519 worktree
3520 .diagnostic_summaries()
3521 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3522 })
3523 }
3524
3525 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3526 if self
3527 .language_server_statuses
3528 .values()
3529 .map(|status| status.pending_diagnostic_updates)
3530 .sum::<isize>()
3531 == 1
3532 {
3533 cx.emit(Event::DiskBasedDiagnosticsStarted);
3534 }
3535 }
3536
3537 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3538 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3539 if self
3540 .language_server_statuses
3541 .values()
3542 .map(|status| status.pending_diagnostic_updates)
3543 .sum::<isize>()
3544 == 0
3545 {
3546 cx.emit(Event::DiskBasedDiagnosticsFinished);
3547 }
3548 }
3549
3550 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3551 self.active_entry
3552 }
3553
3554 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3555 self.worktree_for_id(path.worktree_id, cx)?
3556 .read(cx)
3557 .entry_for_path(&path.path)
3558 .map(|entry| entry.id)
3559 }
3560
3561 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3562 let worktree = self.worktree_for_entry(entry_id, cx)?;
3563 let worktree = worktree.read(cx);
3564 let worktree_id = worktree.id();
3565 let path = worktree.entry_for_id(entry_id)?.path.clone();
3566 Some(ProjectPath { worktree_id, path })
3567 }
3568
3569 // RPC message handlers
3570
3571 async fn handle_unshare_project(
3572 this: ModelHandle<Self>,
3573 _: TypedEnvelope<proto::UnshareProject>,
3574 _: Arc<Client>,
3575 mut cx: AsyncAppContext,
3576 ) -> Result<()> {
3577 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3578 Ok(())
3579 }
3580
3581 async fn handle_add_collaborator(
3582 this: ModelHandle<Self>,
3583 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3584 _: Arc<Client>,
3585 mut cx: AsyncAppContext,
3586 ) -> Result<()> {
3587 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3588 let collaborator = envelope
3589 .payload
3590 .collaborator
3591 .take()
3592 .ok_or_else(|| anyhow!("empty collaborator"))?;
3593
3594 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3595 this.update(&mut cx, |this, cx| {
3596 this.collaborators
3597 .insert(collaborator.peer_id, collaborator);
3598 cx.notify();
3599 });
3600
3601 Ok(())
3602 }
3603
3604 async fn handle_remove_collaborator(
3605 this: ModelHandle<Self>,
3606 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3607 _: Arc<Client>,
3608 mut cx: AsyncAppContext,
3609 ) -> Result<()> {
3610 this.update(&mut cx, |this, cx| {
3611 let peer_id = PeerId(envelope.payload.peer_id);
3612 let replica_id = this
3613 .collaborators
3614 .remove(&peer_id)
3615 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3616 .replica_id;
3617 for (_, buffer) in &this.opened_buffers {
3618 if let Some(buffer) = buffer.upgrade(cx) {
3619 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3620 }
3621 }
3622 cx.emit(Event::CollaboratorLeft(peer_id));
3623 cx.notify();
3624 Ok(())
3625 })
3626 }
3627
3628 async fn handle_register_worktree(
3629 this: ModelHandle<Self>,
3630 envelope: TypedEnvelope<proto::RegisterWorktree>,
3631 client: Arc<Client>,
3632 mut cx: AsyncAppContext,
3633 ) -> Result<()> {
3634 this.update(&mut cx, |this, cx| {
3635 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3636 let replica_id = this.replica_id();
3637 let worktree = proto::Worktree {
3638 id: envelope.payload.worktree_id,
3639 root_name: envelope.payload.root_name,
3640 entries: Default::default(),
3641 diagnostic_summaries: Default::default(),
3642 visible: envelope.payload.visible,
3643 };
3644 let (worktree, load_task) =
3645 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3646 this.add_worktree(&worktree, cx);
3647 load_task.detach();
3648 Ok(())
3649 })
3650 }
3651
3652 async fn handle_unregister_worktree(
3653 this: ModelHandle<Self>,
3654 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3655 _: Arc<Client>,
3656 mut cx: AsyncAppContext,
3657 ) -> Result<()> {
3658 this.update(&mut cx, |this, cx| {
3659 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3660 this.remove_worktree(worktree_id, cx);
3661 Ok(())
3662 })
3663 }
3664
3665 async fn handle_update_worktree(
3666 this: ModelHandle<Self>,
3667 envelope: TypedEnvelope<proto::UpdateWorktree>,
3668 _: Arc<Client>,
3669 mut cx: AsyncAppContext,
3670 ) -> Result<()> {
3671 this.update(&mut cx, |this, cx| {
3672 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3673 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3674 worktree.update(cx, |worktree, _| {
3675 let worktree = worktree.as_remote_mut().unwrap();
3676 worktree.update_from_remote(envelope)
3677 })?;
3678 }
3679 Ok(())
3680 })
3681 }
3682
3683 async fn handle_update_diagnostic_summary(
3684 this: ModelHandle<Self>,
3685 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3686 _: Arc<Client>,
3687 mut cx: AsyncAppContext,
3688 ) -> Result<()> {
3689 this.update(&mut cx, |this, cx| {
3690 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3691 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3692 if let Some(summary) = envelope.payload.summary {
3693 let project_path = ProjectPath {
3694 worktree_id,
3695 path: Path::new(&summary.path).into(),
3696 };
3697 worktree.update(cx, |worktree, _| {
3698 worktree
3699 .as_remote_mut()
3700 .unwrap()
3701 .update_diagnostic_summary(project_path.path.clone(), &summary);
3702 });
3703 cx.emit(Event::DiagnosticsUpdated(project_path));
3704 }
3705 }
3706 Ok(())
3707 })
3708 }
3709
3710 async fn handle_start_language_server(
3711 this: ModelHandle<Self>,
3712 envelope: TypedEnvelope<proto::StartLanguageServer>,
3713 _: Arc<Client>,
3714 mut cx: AsyncAppContext,
3715 ) -> Result<()> {
3716 let server = envelope
3717 .payload
3718 .server
3719 .ok_or_else(|| anyhow!("invalid server"))?;
3720 this.update(&mut cx, |this, cx| {
3721 this.language_server_statuses.insert(
3722 server.id as usize,
3723 LanguageServerStatus {
3724 name: server.name,
3725 pending_work: Default::default(),
3726 pending_diagnostic_updates: 0,
3727 },
3728 );
3729 cx.notify();
3730 });
3731 Ok(())
3732 }
3733
3734 async fn handle_update_language_server(
3735 this: ModelHandle<Self>,
3736 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3737 _: Arc<Client>,
3738 mut cx: AsyncAppContext,
3739 ) -> Result<()> {
3740 let language_server_id = envelope.payload.language_server_id as usize;
3741 match envelope
3742 .payload
3743 .variant
3744 .ok_or_else(|| anyhow!("invalid variant"))?
3745 {
3746 proto::update_language_server::Variant::WorkStart(payload) => {
3747 this.update(&mut cx, |this, cx| {
3748 this.on_lsp_work_start(language_server_id, payload.token, cx);
3749 })
3750 }
3751 proto::update_language_server::Variant::WorkProgress(payload) => {
3752 this.update(&mut cx, |this, cx| {
3753 this.on_lsp_work_progress(
3754 language_server_id,
3755 payload.token,
3756 LanguageServerProgress {
3757 message: payload.message,
3758 percentage: payload.percentage.map(|p| p as usize),
3759 last_update_at: Instant::now(),
3760 },
3761 cx,
3762 );
3763 })
3764 }
3765 proto::update_language_server::Variant::WorkEnd(payload) => {
3766 this.update(&mut cx, |this, cx| {
3767 this.on_lsp_work_end(language_server_id, payload.token, cx);
3768 })
3769 }
3770 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3771 this.update(&mut cx, |this, cx| {
3772 this.disk_based_diagnostics_started(cx);
3773 })
3774 }
3775 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3776 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3777 }
3778 }
3779
3780 Ok(())
3781 }
3782
3783 async fn handle_update_buffer(
3784 this: ModelHandle<Self>,
3785 envelope: TypedEnvelope<proto::UpdateBuffer>,
3786 _: Arc<Client>,
3787 mut cx: AsyncAppContext,
3788 ) -> Result<()> {
3789 this.update(&mut cx, |this, cx| {
3790 let payload = envelope.payload.clone();
3791 let buffer_id = payload.buffer_id;
3792 let ops = payload
3793 .operations
3794 .into_iter()
3795 .map(|op| language::proto::deserialize_operation(op))
3796 .collect::<Result<Vec<_>, _>>()?;
3797 match this.opened_buffers.entry(buffer_id) {
3798 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3799 OpenBuffer::Strong(buffer) => {
3800 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3801 }
3802 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3803 OpenBuffer::Weak(_) => {}
3804 },
3805 hash_map::Entry::Vacant(e) => {
3806 e.insert(OpenBuffer::Loading(ops));
3807 }
3808 }
3809 Ok(())
3810 })
3811 }
3812
3813 async fn handle_update_buffer_file(
3814 this: ModelHandle<Self>,
3815 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3816 _: Arc<Client>,
3817 mut cx: AsyncAppContext,
3818 ) -> Result<()> {
3819 this.update(&mut cx, |this, cx| {
3820 let payload = envelope.payload.clone();
3821 let buffer_id = payload.buffer_id;
3822 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3823 let worktree = this
3824 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3825 .ok_or_else(|| anyhow!("no such worktree"))?;
3826 let file = File::from_proto(file, worktree.clone(), cx)?;
3827 let buffer = this
3828 .opened_buffers
3829 .get_mut(&buffer_id)
3830 .and_then(|b| b.upgrade(cx))
3831 .ok_or_else(|| anyhow!("no such buffer"))?;
3832 buffer.update(cx, |buffer, cx| {
3833 buffer.file_updated(Box::new(file), cx).detach();
3834 });
3835 Ok(())
3836 })
3837 }
3838
3839 async fn handle_save_buffer(
3840 this: ModelHandle<Self>,
3841 envelope: TypedEnvelope<proto::SaveBuffer>,
3842 _: Arc<Client>,
3843 mut cx: AsyncAppContext,
3844 ) -> Result<proto::BufferSaved> {
3845 let buffer_id = envelope.payload.buffer_id;
3846 let requested_version = deserialize_version(envelope.payload.version);
3847
3848 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3849 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3850 let buffer = this
3851 .opened_buffers
3852 .get(&buffer_id)
3853 .and_then(|buffer| buffer.upgrade(cx))
3854 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3855 Ok::<_, anyhow::Error>((project_id, buffer))
3856 })?;
3857 buffer
3858 .update(&mut cx, |buffer, _| {
3859 buffer.wait_for_version(requested_version)
3860 })
3861 .await;
3862
3863 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3864 Ok(proto::BufferSaved {
3865 project_id,
3866 buffer_id,
3867 version: serialize_version(&saved_version),
3868 mtime: Some(mtime.into()),
3869 })
3870 }
3871
3872 async fn handle_reload_buffers(
3873 this: ModelHandle<Self>,
3874 envelope: TypedEnvelope<proto::ReloadBuffers>,
3875 _: Arc<Client>,
3876 mut cx: AsyncAppContext,
3877 ) -> Result<proto::ReloadBuffersResponse> {
3878 let sender_id = envelope.original_sender_id()?;
3879 let reload = this.update(&mut cx, |this, cx| {
3880 let mut buffers = HashSet::default();
3881 for buffer_id in &envelope.payload.buffer_ids {
3882 buffers.insert(
3883 this.opened_buffers
3884 .get(buffer_id)
3885 .and_then(|buffer| buffer.upgrade(cx))
3886 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3887 );
3888 }
3889 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3890 })?;
3891
3892 let project_transaction = reload.await?;
3893 let project_transaction = this.update(&mut cx, |this, cx| {
3894 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3895 });
3896 Ok(proto::ReloadBuffersResponse {
3897 transaction: Some(project_transaction),
3898 })
3899 }
3900
3901 async fn handle_format_buffers(
3902 this: ModelHandle<Self>,
3903 envelope: TypedEnvelope<proto::FormatBuffers>,
3904 _: Arc<Client>,
3905 mut cx: AsyncAppContext,
3906 ) -> Result<proto::FormatBuffersResponse> {
3907 let sender_id = envelope.original_sender_id()?;
3908 let format = this.update(&mut cx, |this, cx| {
3909 let mut buffers = HashSet::default();
3910 for buffer_id in &envelope.payload.buffer_ids {
3911 buffers.insert(
3912 this.opened_buffers
3913 .get(buffer_id)
3914 .and_then(|buffer| buffer.upgrade(cx))
3915 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3916 );
3917 }
3918 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3919 })?;
3920
3921 let project_transaction = format.await?;
3922 let project_transaction = this.update(&mut cx, |this, cx| {
3923 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3924 });
3925 Ok(proto::FormatBuffersResponse {
3926 transaction: Some(project_transaction),
3927 })
3928 }
3929
3930 async fn handle_get_completions(
3931 this: ModelHandle<Self>,
3932 envelope: TypedEnvelope<proto::GetCompletions>,
3933 _: Arc<Client>,
3934 mut cx: AsyncAppContext,
3935 ) -> Result<proto::GetCompletionsResponse> {
3936 let position = envelope
3937 .payload
3938 .position
3939 .and_then(language::proto::deserialize_anchor)
3940 .ok_or_else(|| anyhow!("invalid position"))?;
3941 let version = deserialize_version(envelope.payload.version);
3942 let buffer = this.read_with(&cx, |this, cx| {
3943 this.opened_buffers
3944 .get(&envelope.payload.buffer_id)
3945 .and_then(|buffer| buffer.upgrade(cx))
3946 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3947 })?;
3948 buffer
3949 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3950 .await;
3951 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3952 let completions = this
3953 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3954 .await?;
3955
3956 Ok(proto::GetCompletionsResponse {
3957 completions: completions
3958 .iter()
3959 .map(language::proto::serialize_completion)
3960 .collect(),
3961 version: serialize_version(&version),
3962 })
3963 }
3964
3965 async fn handle_apply_additional_edits_for_completion(
3966 this: ModelHandle<Self>,
3967 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3968 _: Arc<Client>,
3969 mut cx: AsyncAppContext,
3970 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3971 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3972 let buffer = this
3973 .opened_buffers
3974 .get(&envelope.payload.buffer_id)
3975 .and_then(|buffer| buffer.upgrade(cx))
3976 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3977 let language = buffer.read(cx).language();
3978 let completion = language::proto::deserialize_completion(
3979 envelope
3980 .payload
3981 .completion
3982 .ok_or_else(|| anyhow!("invalid completion"))?,
3983 language,
3984 )?;
3985 Ok::<_, anyhow::Error>(
3986 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3987 )
3988 })?;
3989
3990 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3991 transaction: apply_additional_edits
3992 .await?
3993 .as_ref()
3994 .map(language::proto::serialize_transaction),
3995 })
3996 }
3997
3998 async fn handle_get_code_actions(
3999 this: ModelHandle<Self>,
4000 envelope: TypedEnvelope<proto::GetCodeActions>,
4001 _: Arc<Client>,
4002 mut cx: AsyncAppContext,
4003 ) -> Result<proto::GetCodeActionsResponse> {
4004 let start = envelope
4005 .payload
4006 .start
4007 .and_then(language::proto::deserialize_anchor)
4008 .ok_or_else(|| anyhow!("invalid start"))?;
4009 let end = envelope
4010 .payload
4011 .end
4012 .and_then(language::proto::deserialize_anchor)
4013 .ok_or_else(|| anyhow!("invalid end"))?;
4014 let buffer = this.update(&mut cx, |this, cx| {
4015 this.opened_buffers
4016 .get(&envelope.payload.buffer_id)
4017 .and_then(|buffer| buffer.upgrade(cx))
4018 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4019 })?;
4020 buffer
4021 .update(&mut cx, |buffer, _| {
4022 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4023 })
4024 .await;
4025
4026 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4027 let code_actions = this.update(&mut cx, |this, cx| {
4028 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4029 })?;
4030
4031 Ok(proto::GetCodeActionsResponse {
4032 actions: code_actions
4033 .await?
4034 .iter()
4035 .map(language::proto::serialize_code_action)
4036 .collect(),
4037 version: serialize_version(&version),
4038 })
4039 }
4040
4041 async fn handle_apply_code_action(
4042 this: ModelHandle<Self>,
4043 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4044 _: Arc<Client>,
4045 mut cx: AsyncAppContext,
4046 ) -> Result<proto::ApplyCodeActionResponse> {
4047 let sender_id = envelope.original_sender_id()?;
4048 let action = language::proto::deserialize_code_action(
4049 envelope
4050 .payload
4051 .action
4052 .ok_or_else(|| anyhow!("invalid action"))?,
4053 )?;
4054 let apply_code_action = this.update(&mut cx, |this, cx| {
4055 let buffer = this
4056 .opened_buffers
4057 .get(&envelope.payload.buffer_id)
4058 .and_then(|buffer| buffer.upgrade(cx))
4059 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4060 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4061 })?;
4062
4063 let project_transaction = apply_code_action.await?;
4064 let project_transaction = this.update(&mut cx, |this, cx| {
4065 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4066 });
4067 Ok(proto::ApplyCodeActionResponse {
4068 transaction: Some(project_transaction),
4069 })
4070 }
4071
4072 async fn handle_lsp_command<T: LspCommand>(
4073 this: ModelHandle<Self>,
4074 envelope: TypedEnvelope<T::ProtoRequest>,
4075 _: Arc<Client>,
4076 mut cx: AsyncAppContext,
4077 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4078 where
4079 <T::LspRequest as lsp::request::Request>::Result: Send,
4080 {
4081 let sender_id = envelope.original_sender_id()?;
4082 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4083 let buffer_handle = this.read_with(&cx, |this, _| {
4084 this.opened_buffers
4085 .get(&buffer_id)
4086 .and_then(|buffer| buffer.upgrade(&cx))
4087 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4088 })?;
4089 let request = T::from_proto(
4090 envelope.payload,
4091 this.clone(),
4092 buffer_handle.clone(),
4093 cx.clone(),
4094 )
4095 .await?;
4096 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4097 let response = this
4098 .update(&mut cx, |this, cx| {
4099 this.request_lsp(buffer_handle, request, cx)
4100 })
4101 .await?;
4102 this.update(&mut cx, |this, cx| {
4103 Ok(T::response_to_proto(
4104 response,
4105 this,
4106 sender_id,
4107 &buffer_version,
4108 cx,
4109 ))
4110 })
4111 }
4112
4113 async fn handle_get_project_symbols(
4114 this: ModelHandle<Self>,
4115 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4116 _: Arc<Client>,
4117 mut cx: AsyncAppContext,
4118 ) -> Result<proto::GetProjectSymbolsResponse> {
4119 let symbols = this
4120 .update(&mut cx, |this, cx| {
4121 this.symbols(&envelope.payload.query, cx)
4122 })
4123 .await?;
4124
4125 Ok(proto::GetProjectSymbolsResponse {
4126 symbols: symbols.iter().map(serialize_symbol).collect(),
4127 })
4128 }
4129
4130 async fn handle_search_project(
4131 this: ModelHandle<Self>,
4132 envelope: TypedEnvelope<proto::SearchProject>,
4133 _: Arc<Client>,
4134 mut cx: AsyncAppContext,
4135 ) -> Result<proto::SearchProjectResponse> {
4136 let peer_id = envelope.original_sender_id()?;
4137 let query = SearchQuery::from_proto(envelope.payload)?;
4138 let result = this
4139 .update(&mut cx, |this, cx| this.search(query, cx))
4140 .await?;
4141
4142 this.update(&mut cx, |this, cx| {
4143 let mut locations = Vec::new();
4144 for (buffer, ranges) in result {
4145 for range in ranges {
4146 let start = serialize_anchor(&range.start);
4147 let end = serialize_anchor(&range.end);
4148 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4149 locations.push(proto::Location {
4150 buffer: Some(buffer),
4151 start: Some(start),
4152 end: Some(end),
4153 });
4154 }
4155 }
4156 Ok(proto::SearchProjectResponse { locations })
4157 })
4158 }
4159
4160 async fn handle_open_buffer_for_symbol(
4161 this: ModelHandle<Self>,
4162 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4163 _: Arc<Client>,
4164 mut cx: AsyncAppContext,
4165 ) -> Result<proto::OpenBufferForSymbolResponse> {
4166 let peer_id = envelope.original_sender_id()?;
4167 let symbol = envelope
4168 .payload
4169 .symbol
4170 .ok_or_else(|| anyhow!("invalid symbol"))?;
4171 let symbol = this.read_with(&cx, |this, _| {
4172 let symbol = this.deserialize_symbol(symbol)?;
4173 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4174 if signature == symbol.signature {
4175 Ok(symbol)
4176 } else {
4177 Err(anyhow!("invalid symbol signature"))
4178 }
4179 })?;
4180 let buffer = this
4181 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4182 .await?;
4183
4184 Ok(proto::OpenBufferForSymbolResponse {
4185 buffer: Some(this.update(&mut cx, |this, cx| {
4186 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4187 })),
4188 })
4189 }
4190
4191 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4192 let mut hasher = Sha256::new();
4193 hasher.update(worktree_id.to_proto().to_be_bytes());
4194 hasher.update(path.to_string_lossy().as_bytes());
4195 hasher.update(self.nonce.to_be_bytes());
4196 hasher.finalize().as_slice().try_into().unwrap()
4197 }
4198
4199 async fn handle_open_buffer_by_id(
4200 this: ModelHandle<Self>,
4201 envelope: TypedEnvelope<proto::OpenBufferById>,
4202 _: Arc<Client>,
4203 mut cx: AsyncAppContext,
4204 ) -> Result<proto::OpenBufferResponse> {
4205 let peer_id = envelope.original_sender_id()?;
4206 let buffer = this
4207 .update(&mut cx, |this, cx| {
4208 this.open_buffer_by_id(envelope.payload.id, cx)
4209 })
4210 .await?;
4211 this.update(&mut cx, |this, cx| {
4212 Ok(proto::OpenBufferResponse {
4213 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4214 })
4215 })
4216 }
4217
4218 async fn handle_open_buffer_by_path(
4219 this: ModelHandle<Self>,
4220 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4221 _: Arc<Client>,
4222 mut cx: AsyncAppContext,
4223 ) -> Result<proto::OpenBufferResponse> {
4224 let peer_id = envelope.original_sender_id()?;
4225 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4226 let open_buffer = this.update(&mut cx, |this, cx| {
4227 this.open_buffer(
4228 ProjectPath {
4229 worktree_id,
4230 path: PathBuf::from(envelope.payload.path).into(),
4231 },
4232 cx,
4233 )
4234 });
4235
4236 let buffer = open_buffer.await?;
4237 this.update(&mut cx, |this, cx| {
4238 Ok(proto::OpenBufferResponse {
4239 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4240 })
4241 })
4242 }
4243
4244 fn serialize_project_transaction_for_peer(
4245 &mut self,
4246 project_transaction: ProjectTransaction,
4247 peer_id: PeerId,
4248 cx: &AppContext,
4249 ) -> proto::ProjectTransaction {
4250 let mut serialized_transaction = proto::ProjectTransaction {
4251 buffers: Default::default(),
4252 transactions: Default::default(),
4253 };
4254 for (buffer, transaction) in project_transaction.0 {
4255 serialized_transaction
4256 .buffers
4257 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4258 serialized_transaction
4259 .transactions
4260 .push(language::proto::serialize_transaction(&transaction));
4261 }
4262 serialized_transaction
4263 }
4264
4265 fn deserialize_project_transaction(
4266 &mut self,
4267 message: proto::ProjectTransaction,
4268 push_to_history: bool,
4269 cx: &mut ModelContext<Self>,
4270 ) -> Task<Result<ProjectTransaction>> {
4271 cx.spawn(|this, mut cx| async move {
4272 let mut project_transaction = ProjectTransaction::default();
4273 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4274 let buffer = this
4275 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4276 .await?;
4277 let transaction = language::proto::deserialize_transaction(transaction)?;
4278 project_transaction.0.insert(buffer, transaction);
4279 }
4280
4281 for (buffer, transaction) in &project_transaction.0 {
4282 buffer
4283 .update(&mut cx, |buffer, _| {
4284 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4285 })
4286 .await;
4287
4288 if push_to_history {
4289 buffer.update(&mut cx, |buffer, _| {
4290 buffer.push_transaction(transaction.clone(), Instant::now());
4291 });
4292 }
4293 }
4294
4295 Ok(project_transaction)
4296 })
4297 }
4298
4299 fn serialize_buffer_for_peer(
4300 &mut self,
4301 buffer: &ModelHandle<Buffer>,
4302 peer_id: PeerId,
4303 cx: &AppContext,
4304 ) -> proto::Buffer {
4305 let buffer_id = buffer.read(cx).remote_id();
4306 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4307 if shared_buffers.insert(buffer_id) {
4308 proto::Buffer {
4309 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4310 }
4311 } else {
4312 proto::Buffer {
4313 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4314 }
4315 }
4316 }
4317
4318 fn deserialize_buffer(
4319 &mut self,
4320 buffer: proto::Buffer,
4321 cx: &mut ModelContext<Self>,
4322 ) -> Task<Result<ModelHandle<Buffer>>> {
4323 let replica_id = self.replica_id();
4324
4325 let opened_buffer_tx = self.opened_buffer.0.clone();
4326 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4327 cx.spawn(|this, mut cx| async move {
4328 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4329 proto::buffer::Variant::Id(id) => {
4330 let buffer = loop {
4331 let buffer = this.read_with(&cx, |this, cx| {
4332 this.opened_buffers
4333 .get(&id)
4334 .and_then(|buffer| buffer.upgrade(cx))
4335 });
4336 if let Some(buffer) = buffer {
4337 break buffer;
4338 }
4339 opened_buffer_rx
4340 .next()
4341 .await
4342 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4343 };
4344 Ok(buffer)
4345 }
4346 proto::buffer::Variant::State(mut buffer) => {
4347 let mut buffer_worktree = None;
4348 let mut buffer_file = None;
4349 if let Some(file) = buffer.file.take() {
4350 this.read_with(&cx, |this, cx| {
4351 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4352 let worktree =
4353 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4354 anyhow!("no worktree found for id {}", file.worktree_id)
4355 })?;
4356 buffer_file =
4357 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4358 as Box<dyn language::File>);
4359 buffer_worktree = Some(worktree);
4360 Ok::<_, anyhow::Error>(())
4361 })?;
4362 }
4363
4364 let buffer = cx.add_model(|cx| {
4365 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4366 });
4367
4368 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4369
4370 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4371 Ok(buffer)
4372 }
4373 }
4374 })
4375 }
4376
4377 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4378 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4379 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4380 let start = serialized_symbol
4381 .start
4382 .ok_or_else(|| anyhow!("invalid start"))?;
4383 let end = serialized_symbol
4384 .end
4385 .ok_or_else(|| anyhow!("invalid end"))?;
4386 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4387 let path = PathBuf::from(serialized_symbol.path);
4388 let language = self.languages.select_language(&path);
4389 Ok(Symbol {
4390 source_worktree_id,
4391 worktree_id,
4392 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4393 label: language
4394 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4395 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4396 name: serialized_symbol.name,
4397 path,
4398 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4399 kind,
4400 signature: serialized_symbol
4401 .signature
4402 .try_into()
4403 .map_err(|_| anyhow!("invalid signature"))?,
4404 })
4405 }
4406
4407 async fn handle_buffer_saved(
4408 this: ModelHandle<Self>,
4409 envelope: TypedEnvelope<proto::BufferSaved>,
4410 _: Arc<Client>,
4411 mut cx: AsyncAppContext,
4412 ) -> Result<()> {
4413 let version = deserialize_version(envelope.payload.version);
4414 let mtime = envelope
4415 .payload
4416 .mtime
4417 .ok_or_else(|| anyhow!("missing mtime"))?
4418 .into();
4419
4420 this.update(&mut cx, |this, cx| {
4421 let buffer = this
4422 .opened_buffers
4423 .get(&envelope.payload.buffer_id)
4424 .and_then(|buffer| buffer.upgrade(cx));
4425 if let Some(buffer) = buffer {
4426 buffer.update(cx, |buffer, cx| {
4427 buffer.did_save(version, mtime, None, cx);
4428 });
4429 }
4430 Ok(())
4431 })
4432 }
4433
4434 async fn handle_buffer_reloaded(
4435 this: ModelHandle<Self>,
4436 envelope: TypedEnvelope<proto::BufferReloaded>,
4437 _: Arc<Client>,
4438 mut cx: AsyncAppContext,
4439 ) -> Result<()> {
4440 let payload = envelope.payload.clone();
4441 let version = deserialize_version(payload.version);
4442 let mtime = payload
4443 .mtime
4444 .ok_or_else(|| anyhow!("missing mtime"))?
4445 .into();
4446 this.update(&mut cx, |this, cx| {
4447 let buffer = this
4448 .opened_buffers
4449 .get(&payload.buffer_id)
4450 .and_then(|buffer| buffer.upgrade(cx));
4451 if let Some(buffer) = buffer {
4452 buffer.update(cx, |buffer, cx| {
4453 buffer.did_reload(version, mtime, cx);
4454 });
4455 }
4456 Ok(())
4457 })
4458 }
4459
4460 pub fn match_paths<'a>(
4461 &self,
4462 query: &'a str,
4463 include_ignored: bool,
4464 smart_case: bool,
4465 max_results: usize,
4466 cancel_flag: &'a AtomicBool,
4467 cx: &AppContext,
4468 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4469 let worktrees = self
4470 .worktrees(cx)
4471 .filter(|worktree| worktree.read(cx).is_visible())
4472 .collect::<Vec<_>>();
4473 let include_root_name = worktrees.len() > 1;
4474 let candidate_sets = worktrees
4475 .into_iter()
4476 .map(|worktree| CandidateSet {
4477 snapshot: worktree.read(cx).snapshot(),
4478 include_ignored,
4479 include_root_name,
4480 })
4481 .collect::<Vec<_>>();
4482
4483 let background = cx.background().clone();
4484 async move {
4485 fuzzy::match_paths(
4486 candidate_sets.as_slice(),
4487 query,
4488 smart_case,
4489 max_results,
4490 cancel_flag,
4491 background,
4492 )
4493 .await
4494 }
4495 }
4496
4497 fn edits_from_lsp(
4498 &mut self,
4499 buffer: &ModelHandle<Buffer>,
4500 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4501 version: Option<i32>,
4502 cx: &mut ModelContext<Self>,
4503 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4504 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4505 cx.background().spawn(async move {
4506 let snapshot = snapshot?;
4507 let mut lsp_edits = lsp_edits
4508 .into_iter()
4509 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4510 .peekable();
4511
4512 let mut edits = Vec::new();
4513 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4514 // Combine any LSP edits that are adjacent.
4515 //
4516 // Also, combine LSP edits that are separated from each other by only
4517 // a newline. This is important because for some code actions,
4518 // Rust-analyzer rewrites the entire buffer via a series of edits that
4519 // are separated by unchanged newline characters.
4520 //
4521 // In order for the diffing logic below to work properly, any edits that
4522 // cancel each other out must be combined into one.
4523 while let Some((next_range, next_text)) = lsp_edits.peek() {
4524 if next_range.start > range.end {
4525 if next_range.start.row > range.end.row + 1
4526 || next_range.start.column > 0
4527 || snapshot.clip_point_utf16(
4528 PointUtf16::new(range.end.row, u32::MAX),
4529 Bias::Left,
4530 ) > range.end
4531 {
4532 break;
4533 }
4534 new_text.push('\n');
4535 }
4536 range.end = next_range.end;
4537 new_text.push_str(&next_text);
4538 lsp_edits.next();
4539 }
4540
4541 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4542 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4543 {
4544 return Err(anyhow!("invalid edits received from language server"));
4545 }
4546
4547 // For multiline edits, perform a diff of the old and new text so that
4548 // we can identify the changes more precisely, preserving the locations
4549 // of any anchors positioned in the unchanged regions.
4550 if range.end.row > range.start.row {
4551 let mut offset = range.start.to_offset(&snapshot);
4552 let old_text = snapshot.text_for_range(range).collect::<String>();
4553
4554 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4555 let mut moved_since_edit = true;
4556 for change in diff.iter_all_changes() {
4557 let tag = change.tag();
4558 let value = change.value();
4559 match tag {
4560 ChangeTag::Equal => {
4561 offset += value.len();
4562 moved_since_edit = true;
4563 }
4564 ChangeTag::Delete => {
4565 let start = snapshot.anchor_after(offset);
4566 let end = snapshot.anchor_before(offset + value.len());
4567 if moved_since_edit {
4568 edits.push((start..end, String::new()));
4569 } else {
4570 edits.last_mut().unwrap().0.end = end;
4571 }
4572 offset += value.len();
4573 moved_since_edit = false;
4574 }
4575 ChangeTag::Insert => {
4576 if moved_since_edit {
4577 let anchor = snapshot.anchor_after(offset);
4578 edits.push((anchor.clone()..anchor, value.to_string()));
4579 } else {
4580 edits.last_mut().unwrap().1.push_str(value);
4581 }
4582 moved_since_edit = false;
4583 }
4584 }
4585 }
4586 } else if range.end == range.start {
4587 let anchor = snapshot.anchor_after(range.start);
4588 edits.push((anchor.clone()..anchor, new_text));
4589 } else {
4590 let edit_start = snapshot.anchor_after(range.start);
4591 let edit_end = snapshot.anchor_before(range.end);
4592 edits.push((edit_start..edit_end, new_text));
4593 }
4594 }
4595
4596 Ok(edits)
4597 })
4598 }
4599
4600 fn buffer_snapshot_for_lsp_version(
4601 &mut self,
4602 buffer: &ModelHandle<Buffer>,
4603 version: Option<i32>,
4604 cx: &AppContext,
4605 ) -> Result<TextBufferSnapshot> {
4606 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4607
4608 if let Some(version) = version {
4609 let buffer_id = buffer.read(cx).remote_id();
4610 let snapshots = self
4611 .buffer_snapshots
4612 .get_mut(&buffer_id)
4613 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4614 let mut found_snapshot = None;
4615 snapshots.retain(|(snapshot_version, snapshot)| {
4616 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4617 false
4618 } else {
4619 if *snapshot_version == version {
4620 found_snapshot = Some(snapshot.clone());
4621 }
4622 true
4623 }
4624 });
4625
4626 found_snapshot.ok_or_else(|| {
4627 anyhow!(
4628 "snapshot not found for buffer {} at version {}",
4629 buffer_id,
4630 version
4631 )
4632 })
4633 } else {
4634 Ok((buffer.read(cx)).text_snapshot())
4635 }
4636 }
4637
4638 fn language_server_for_buffer(
4639 &self,
4640 buffer: &Buffer,
4641 cx: &AppContext,
4642 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4643 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4644 let worktree_id = file.worktree_id(cx);
4645 self.language_servers
4646 .get(&(worktree_id, language.lsp_adapter()?.name()))
4647 } else {
4648 None
4649 }
4650 }
4651}
4652
4653impl WorktreeHandle {
4654 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4655 match self {
4656 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4657 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4658 }
4659 }
4660}
4661
4662impl OpenBuffer {
4663 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4664 match self {
4665 OpenBuffer::Strong(handle) => Some(handle.clone()),
4666 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4667 OpenBuffer::Loading(_) => None,
4668 }
4669 }
4670}
4671
4672struct CandidateSet {
4673 snapshot: Snapshot,
4674 include_ignored: bool,
4675 include_root_name: bool,
4676}
4677
4678impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4679 type Candidates = CandidateSetIter<'a>;
4680
4681 fn id(&self) -> usize {
4682 self.snapshot.id().to_usize()
4683 }
4684
4685 fn len(&self) -> usize {
4686 if self.include_ignored {
4687 self.snapshot.file_count()
4688 } else {
4689 self.snapshot.visible_file_count()
4690 }
4691 }
4692
4693 fn prefix(&self) -> Arc<str> {
4694 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4695 self.snapshot.root_name().into()
4696 } else if self.include_root_name {
4697 format!("{}/", self.snapshot.root_name()).into()
4698 } else {
4699 "".into()
4700 }
4701 }
4702
4703 fn candidates(&'a self, start: usize) -> Self::Candidates {
4704 CandidateSetIter {
4705 traversal: self.snapshot.files(self.include_ignored, start),
4706 }
4707 }
4708}
4709
4710struct CandidateSetIter<'a> {
4711 traversal: Traversal<'a>,
4712}
4713
4714impl<'a> Iterator for CandidateSetIter<'a> {
4715 type Item = PathMatchCandidate<'a>;
4716
4717 fn next(&mut self) -> Option<Self::Item> {
4718 self.traversal.next().map(|entry| {
4719 if let EntryKind::File(char_bag) = entry.kind {
4720 PathMatchCandidate {
4721 path: &entry.path,
4722 char_bag,
4723 }
4724 } else {
4725 unreachable!()
4726 }
4727 })
4728 }
4729}
4730
4731impl Entity for Project {
4732 type Event = Event;
4733
4734 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4735 match &self.client_state {
4736 ProjectClientState::Local { remote_id_rx, .. } => {
4737 if let Some(project_id) = *remote_id_rx.borrow() {
4738 self.client
4739 .send(proto::UnregisterProject { project_id })
4740 .log_err();
4741 }
4742 }
4743 ProjectClientState::Remote { remote_id, .. } => {
4744 self.client
4745 .send(proto::LeaveProject {
4746 project_id: *remote_id,
4747 })
4748 .log_err();
4749 }
4750 }
4751 }
4752
4753 fn app_will_quit(
4754 &mut self,
4755 _: &mut MutableAppContext,
4756 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4757 let shutdown_futures = self
4758 .language_servers
4759 .drain()
4760 .filter_map(|(_, (_, server))| server.shutdown())
4761 .collect::<Vec<_>>();
4762 Some(
4763 async move {
4764 futures::future::join_all(shutdown_futures).await;
4765 }
4766 .boxed(),
4767 )
4768 }
4769}
4770
4771impl Collaborator {
4772 fn from_proto(
4773 message: proto::Collaborator,
4774 user_store: &ModelHandle<UserStore>,
4775 cx: &mut AsyncAppContext,
4776 ) -> impl Future<Output = Result<Self>> {
4777 let user = user_store.update(cx, |user_store, cx| {
4778 user_store.fetch_user(message.user_id, cx)
4779 });
4780
4781 async move {
4782 Ok(Self {
4783 peer_id: PeerId(message.peer_id),
4784 user: user.await?,
4785 replica_id: message.replica_id as ReplicaId,
4786 })
4787 }
4788 }
4789}
4790
4791impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4792 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4793 Self {
4794 worktree_id,
4795 path: path.as_ref().into(),
4796 }
4797 }
4798}
4799
4800impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4801 fn from(options: lsp::CreateFileOptions) -> Self {
4802 Self {
4803 overwrite: options.overwrite.unwrap_or(false),
4804 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4805 }
4806 }
4807}
4808
4809impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4810 fn from(options: lsp::RenameFileOptions) -> Self {
4811 Self {
4812 overwrite: options.overwrite.unwrap_or(false),
4813 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4814 }
4815 }
4816}
4817
4818impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4819 fn from(options: lsp::DeleteFileOptions) -> Self {
4820 Self {
4821 recursive: options.recursive.unwrap_or(false),
4822 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4823 }
4824 }
4825}
4826
4827fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4828 proto::Symbol {
4829 source_worktree_id: symbol.source_worktree_id.to_proto(),
4830 worktree_id: symbol.worktree_id.to_proto(),
4831 language_server_name: symbol.language_server_name.0.to_string(),
4832 name: symbol.name.clone(),
4833 kind: unsafe { mem::transmute(symbol.kind) },
4834 path: symbol.path.to_string_lossy().to_string(),
4835 start: Some(proto::Point {
4836 row: symbol.range.start.row,
4837 column: symbol.range.start.column,
4838 }),
4839 end: Some(proto::Point {
4840 row: symbol.range.end.row,
4841 column: symbol.range.end.column,
4842 }),
4843 signature: symbol.signature.to_vec(),
4844 }
4845}
4846
4847fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4848 let mut path_components = path.components();
4849 let mut base_components = base.components();
4850 let mut components: Vec<Component> = Vec::new();
4851 loop {
4852 match (path_components.next(), base_components.next()) {
4853 (None, None) => break,
4854 (Some(a), None) => {
4855 components.push(a);
4856 components.extend(path_components.by_ref());
4857 break;
4858 }
4859 (None, _) => components.push(Component::ParentDir),
4860 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4861 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4862 (Some(a), Some(_)) => {
4863 components.push(Component::ParentDir);
4864 for _ in base_components {
4865 components.push(Component::ParentDir);
4866 }
4867 components.push(a);
4868 components.extend(path_components.by_ref());
4869 break;
4870 }
4871 }
4872 }
4873 components.iter().map(|c| c.as_os_str()).collect()
4874}
4875
4876impl Item for Buffer {
4877 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4878 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4879 }
4880}
4881
4882#[cfg(test)]
4883mod tests {
4884 use super::{Event, *};
4885 use fs::RealFs;
4886 use futures::{future, StreamExt};
4887 use gpui::test::subscribe;
4888 use language::{
4889 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4890 ToPoint,
4891 };
4892 use lsp::Url;
4893 use serde_json::json;
4894 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4895 use unindent::Unindent as _;
4896 use util::{assert_set_eq, test::temp_tree};
4897 use worktree::WorktreeHandle as _;
4898
4899 #[gpui::test]
4900 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4901 let dir = temp_tree(json!({
4902 "root": {
4903 "apple": "",
4904 "banana": {
4905 "carrot": {
4906 "date": "",
4907 "endive": "",
4908 }
4909 },
4910 "fennel": {
4911 "grape": "",
4912 }
4913 }
4914 }));
4915
4916 let root_link_path = dir.path().join("root_link");
4917 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4918 unix::fs::symlink(
4919 &dir.path().join("root/fennel"),
4920 &dir.path().join("root/finnochio"),
4921 )
4922 .unwrap();
4923
4924 let project = Project::test(Arc::new(RealFs), cx);
4925
4926 let (tree, _) = project
4927 .update(cx, |project, cx| {
4928 project.find_or_create_local_worktree(&root_link_path, true, cx)
4929 })
4930 .await
4931 .unwrap();
4932
4933 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4934 .await;
4935 cx.read(|cx| {
4936 let tree = tree.read(cx);
4937 assert_eq!(tree.file_count(), 5);
4938 assert_eq!(
4939 tree.inode_for_path("fennel/grape"),
4940 tree.inode_for_path("finnochio/grape")
4941 );
4942 });
4943
4944 let cancel_flag = Default::default();
4945 let results = project
4946 .read_with(cx, |project, cx| {
4947 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4948 })
4949 .await;
4950 assert_eq!(
4951 results
4952 .into_iter()
4953 .map(|result| result.path)
4954 .collect::<Vec<Arc<Path>>>(),
4955 vec![
4956 PathBuf::from("banana/carrot/date").into(),
4957 PathBuf::from("banana/carrot/endive").into(),
4958 ]
4959 );
4960 }
4961
4962 #[gpui::test]
4963 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4964 cx.foreground().forbid_parking();
4965
4966 let mut rust_language = Language::new(
4967 LanguageConfig {
4968 name: "Rust".into(),
4969 path_suffixes: vec!["rs".to_string()],
4970 ..Default::default()
4971 },
4972 Some(tree_sitter_rust::language()),
4973 );
4974 let mut json_language = Language::new(
4975 LanguageConfig {
4976 name: "JSON".into(),
4977 path_suffixes: vec!["json".to_string()],
4978 ..Default::default()
4979 },
4980 None,
4981 );
4982 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4983 name: "the-rust-language-server",
4984 capabilities: lsp::ServerCapabilities {
4985 completion_provider: Some(lsp::CompletionOptions {
4986 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4987 ..Default::default()
4988 }),
4989 ..Default::default()
4990 },
4991 ..Default::default()
4992 });
4993 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4994 name: "the-json-language-server",
4995 capabilities: lsp::ServerCapabilities {
4996 completion_provider: Some(lsp::CompletionOptions {
4997 trigger_characters: Some(vec![":".to_string()]),
4998 ..Default::default()
4999 }),
5000 ..Default::default()
5001 },
5002 ..Default::default()
5003 });
5004
5005 let fs = FakeFs::new(cx.background());
5006 fs.insert_tree(
5007 "/the-root",
5008 json!({
5009 "test.rs": "const A: i32 = 1;",
5010 "test2.rs": "",
5011 "Cargo.toml": "a = 1",
5012 "package.json": "{\"a\": 1}",
5013 }),
5014 )
5015 .await;
5016
5017 let project = Project::test(fs.clone(), cx);
5018 project.update(cx, |project, _| {
5019 project.languages.add(Arc::new(rust_language));
5020 project.languages.add(Arc::new(json_language));
5021 });
5022
5023 let worktree_id = project
5024 .update(cx, |project, cx| {
5025 project.find_or_create_local_worktree("/the-root", true, cx)
5026 })
5027 .await
5028 .unwrap()
5029 .0
5030 .read_with(cx, |tree, _| tree.id());
5031
5032 // Open a buffer without an associated language server.
5033 let toml_buffer = project
5034 .update(cx, |project, cx| {
5035 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5036 })
5037 .await
5038 .unwrap();
5039
5040 // Open a buffer with an associated language server.
5041 let rust_buffer = project
5042 .update(cx, |project, cx| {
5043 project.open_buffer((worktree_id, "test.rs"), cx)
5044 })
5045 .await
5046 .unwrap();
5047
5048 // A server is started up, and it is notified about Rust files.
5049 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5050 assert_eq!(
5051 fake_rust_server
5052 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5053 .await
5054 .text_document,
5055 lsp::TextDocumentItem {
5056 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5057 version: 0,
5058 text: "const A: i32 = 1;".to_string(),
5059 language_id: Default::default()
5060 }
5061 );
5062
5063 // The buffer is configured based on the language server's capabilities.
5064 rust_buffer.read_with(cx, |buffer, _| {
5065 assert_eq!(
5066 buffer.completion_triggers(),
5067 &[".".to_string(), "::".to_string()]
5068 );
5069 });
5070 toml_buffer.read_with(cx, |buffer, _| {
5071 assert!(buffer.completion_triggers().is_empty());
5072 });
5073
5074 // Edit a buffer. The changes are reported to the language server.
5075 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5076 assert_eq!(
5077 fake_rust_server
5078 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5079 .await
5080 .text_document,
5081 lsp::VersionedTextDocumentIdentifier::new(
5082 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5083 1
5084 )
5085 );
5086
5087 // Open a third buffer with a different associated language server.
5088 let json_buffer = project
5089 .update(cx, |project, cx| {
5090 project.open_buffer((worktree_id, "package.json"), cx)
5091 })
5092 .await
5093 .unwrap();
5094
5095 // A json language server is started up and is only notified about the json buffer.
5096 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5097 assert_eq!(
5098 fake_json_server
5099 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5100 .await
5101 .text_document,
5102 lsp::TextDocumentItem {
5103 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5104 version: 0,
5105 text: "{\"a\": 1}".to_string(),
5106 language_id: Default::default()
5107 }
5108 );
5109
5110 // This buffer is configured based on the second language server's
5111 // capabilities.
5112 json_buffer.read_with(cx, |buffer, _| {
5113 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5114 });
5115
5116 // When opening another buffer whose language server is already running,
5117 // it is also configured based on the existing language server's capabilities.
5118 let rust_buffer2 = project
5119 .update(cx, |project, cx| {
5120 project.open_buffer((worktree_id, "test2.rs"), cx)
5121 })
5122 .await
5123 .unwrap();
5124 rust_buffer2.read_with(cx, |buffer, _| {
5125 assert_eq!(
5126 buffer.completion_triggers(),
5127 &[".".to_string(), "::".to_string()]
5128 );
5129 });
5130
5131 // Changes are reported only to servers matching the buffer's language.
5132 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5133 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5134 assert_eq!(
5135 fake_rust_server
5136 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5137 .await
5138 .text_document,
5139 lsp::VersionedTextDocumentIdentifier::new(
5140 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5141 1
5142 )
5143 );
5144
5145 // Save notifications are reported to all servers.
5146 toml_buffer
5147 .update(cx, |buffer, cx| buffer.save(cx))
5148 .await
5149 .unwrap();
5150 assert_eq!(
5151 fake_rust_server
5152 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5153 .await
5154 .text_document,
5155 lsp::TextDocumentIdentifier::new(
5156 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5157 )
5158 );
5159 assert_eq!(
5160 fake_json_server
5161 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5162 .await
5163 .text_document,
5164 lsp::TextDocumentIdentifier::new(
5165 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5166 )
5167 );
5168
5169 // Renames are reported only to servers matching the buffer's language.
5170 fs.rename(
5171 Path::new("/the-root/test2.rs"),
5172 Path::new("/the-root/test3.rs"),
5173 Default::default(),
5174 )
5175 .await
5176 .unwrap();
5177 assert_eq!(
5178 fake_rust_server
5179 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5180 .await
5181 .text_document,
5182 lsp::TextDocumentIdentifier::new(
5183 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5184 ),
5185 );
5186 assert_eq!(
5187 fake_rust_server
5188 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5189 .await
5190 .text_document,
5191 lsp::TextDocumentItem {
5192 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5193 version: 0,
5194 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5195 language_id: Default::default()
5196 },
5197 );
5198
5199 rust_buffer2.update(cx, |buffer, cx| {
5200 buffer.update_diagnostics(
5201 DiagnosticSet::from_sorted_entries(
5202 vec![DiagnosticEntry {
5203 diagnostic: Default::default(),
5204 range: Anchor::MIN..Anchor::MAX,
5205 }],
5206 &buffer.snapshot(),
5207 ),
5208 cx,
5209 );
5210 assert_eq!(
5211 buffer
5212 .snapshot()
5213 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5214 .count(),
5215 1
5216 );
5217 });
5218
5219 // When the rename changes the extension of the file, the buffer gets closed on the old
5220 // language server and gets opened on the new one.
5221 fs.rename(
5222 Path::new("/the-root/test3.rs"),
5223 Path::new("/the-root/test3.json"),
5224 Default::default(),
5225 )
5226 .await
5227 .unwrap();
5228 assert_eq!(
5229 fake_rust_server
5230 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5231 .await
5232 .text_document,
5233 lsp::TextDocumentIdentifier::new(
5234 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5235 ),
5236 );
5237 assert_eq!(
5238 fake_json_server
5239 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5240 .await
5241 .text_document,
5242 lsp::TextDocumentItem {
5243 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5244 version: 0,
5245 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5246 language_id: Default::default()
5247 },
5248 );
5249 // We clear the diagnostics, since the language has changed.
5250 rust_buffer2.read_with(cx, |buffer, _| {
5251 assert_eq!(
5252 buffer
5253 .snapshot()
5254 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5255 .count(),
5256 0
5257 );
5258 });
5259
5260 // The renamed file's version resets after changing language server.
5261 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5262 assert_eq!(
5263 fake_json_server
5264 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5265 .await
5266 .text_document,
5267 lsp::VersionedTextDocumentIdentifier::new(
5268 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5269 1
5270 )
5271 );
5272
5273 // Restart language servers
5274 project.update(cx, |project, cx| {
5275 project.restart_language_servers_for_buffers(
5276 vec![rust_buffer.clone(), json_buffer.clone()],
5277 cx,
5278 );
5279 });
5280
5281 let mut rust_shutdown_requests = fake_rust_server
5282 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5283 let mut json_shutdown_requests = fake_json_server
5284 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5285 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5286
5287 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5288 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5289
5290 // Ensure rust document is reopened in new rust language server
5291 assert_eq!(
5292 fake_rust_server
5293 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5294 .await
5295 .text_document,
5296 lsp::TextDocumentItem {
5297 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5298 version: 1,
5299 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5300 language_id: Default::default()
5301 }
5302 );
5303
5304 // Ensure json documents are reopened in new json language server
5305 assert_set_eq!(
5306 [
5307 fake_json_server
5308 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5309 .await
5310 .text_document,
5311 fake_json_server
5312 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5313 .await
5314 .text_document,
5315 ],
5316 [
5317 lsp::TextDocumentItem {
5318 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5319 version: 0,
5320 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5321 language_id: Default::default()
5322 },
5323 lsp::TextDocumentItem {
5324 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5325 version: 1,
5326 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5327 language_id: Default::default()
5328 }
5329 ]
5330 );
5331
5332 // Close notifications are reported only to servers matching the buffer's language.
5333 cx.update(|_| drop(json_buffer));
5334 let close_message = lsp::DidCloseTextDocumentParams {
5335 text_document: lsp::TextDocumentIdentifier::new(
5336 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5337 ),
5338 };
5339 assert_eq!(
5340 fake_json_server
5341 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5342 .await,
5343 close_message,
5344 );
5345 }
5346
5347 #[gpui::test]
5348 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5349 cx.foreground().forbid_parking();
5350
5351 let progress_token = "the-progress-token";
5352 let mut language = Language::new(
5353 LanguageConfig {
5354 name: "Rust".into(),
5355 path_suffixes: vec!["rs".to_string()],
5356 ..Default::default()
5357 },
5358 Some(tree_sitter_rust::language()),
5359 );
5360 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5361 disk_based_diagnostics_progress_token: Some(progress_token),
5362 disk_based_diagnostics_sources: &["disk"],
5363 ..Default::default()
5364 });
5365
5366 let fs = FakeFs::new(cx.background());
5367 fs.insert_tree(
5368 "/dir",
5369 json!({
5370 "a.rs": "fn a() { A }",
5371 "b.rs": "const y: i32 = 1",
5372 }),
5373 )
5374 .await;
5375
5376 let project = Project::test(fs, cx);
5377 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5378
5379 let (tree, _) = project
5380 .update(cx, |project, cx| {
5381 project.find_or_create_local_worktree("/dir", true, cx)
5382 })
5383 .await
5384 .unwrap();
5385 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5386
5387 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5388 .await;
5389
5390 // Cause worktree to start the fake language server
5391 let _buffer = project
5392 .update(cx, |project, cx| {
5393 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5394 })
5395 .await
5396 .unwrap();
5397
5398 let mut events = subscribe(&project, cx);
5399
5400 let mut fake_server = fake_servers.next().await.unwrap();
5401 fake_server.start_progress(progress_token).await;
5402 assert_eq!(
5403 events.next().await.unwrap(),
5404 Event::DiskBasedDiagnosticsStarted
5405 );
5406
5407 fake_server.start_progress(progress_token).await;
5408 fake_server.end_progress(progress_token).await;
5409 fake_server.start_progress(progress_token).await;
5410
5411 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5412 lsp::PublishDiagnosticsParams {
5413 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5414 version: None,
5415 diagnostics: vec![lsp::Diagnostic {
5416 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5417 severity: Some(lsp::DiagnosticSeverity::ERROR),
5418 message: "undefined variable 'A'".to_string(),
5419 ..Default::default()
5420 }],
5421 },
5422 );
5423 assert_eq!(
5424 events.next().await.unwrap(),
5425 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5426 );
5427
5428 fake_server.end_progress(progress_token).await;
5429 fake_server.end_progress(progress_token).await;
5430 assert_eq!(
5431 events.next().await.unwrap(),
5432 Event::DiskBasedDiagnosticsUpdated
5433 );
5434 assert_eq!(
5435 events.next().await.unwrap(),
5436 Event::DiskBasedDiagnosticsFinished
5437 );
5438
5439 let buffer = project
5440 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5441 .await
5442 .unwrap();
5443
5444 buffer.read_with(cx, |buffer, _| {
5445 let snapshot = buffer.snapshot();
5446 let diagnostics = snapshot
5447 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5448 .collect::<Vec<_>>();
5449 assert_eq!(
5450 diagnostics,
5451 &[DiagnosticEntry {
5452 range: Point::new(0, 9)..Point::new(0, 10),
5453 diagnostic: Diagnostic {
5454 severity: lsp::DiagnosticSeverity::ERROR,
5455 message: "undefined variable 'A'".to_string(),
5456 group_id: 0,
5457 is_primary: true,
5458 ..Default::default()
5459 }
5460 }]
5461 )
5462 });
5463 }
5464
5465 #[gpui::test]
5466 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5467 cx.foreground().forbid_parking();
5468
5469 let progress_token = "the-progress-token";
5470 let mut language = Language::new(
5471 LanguageConfig {
5472 path_suffixes: vec!["rs".to_string()],
5473 ..Default::default()
5474 },
5475 None,
5476 );
5477 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5478 disk_based_diagnostics_sources: &["disk"],
5479 disk_based_diagnostics_progress_token: Some(progress_token),
5480 ..Default::default()
5481 });
5482
5483 let fs = FakeFs::new(cx.background());
5484 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5485
5486 let project = Project::test(fs, cx);
5487 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5488
5489 let worktree_id = project
5490 .update(cx, |project, cx| {
5491 project.find_or_create_local_worktree("/dir", true, cx)
5492 })
5493 .await
5494 .unwrap()
5495 .0
5496 .read_with(cx, |tree, _| tree.id());
5497
5498 let buffer = project
5499 .update(cx, |project, cx| {
5500 project.open_buffer((worktree_id, "a.rs"), cx)
5501 })
5502 .await
5503 .unwrap();
5504
5505 // Simulate diagnostics starting to update.
5506 let mut fake_server = fake_servers.next().await.unwrap();
5507 fake_server.start_progress(progress_token).await;
5508
5509 // Restart the server before the diagnostics finish updating.
5510 project.update(cx, |project, cx| {
5511 project.restart_language_servers_for_buffers([buffer], cx);
5512 });
5513 let mut events = subscribe(&project, cx);
5514
5515 // Simulate the newly started server sending more diagnostics.
5516 let mut fake_server = fake_servers.next().await.unwrap();
5517 fake_server.start_progress(progress_token).await;
5518 assert_eq!(
5519 events.next().await.unwrap(),
5520 Event::DiskBasedDiagnosticsStarted
5521 );
5522
5523 // All diagnostics are considered done, despite the old server's diagnostic
5524 // task never completing.
5525 fake_server.end_progress(progress_token).await;
5526 assert_eq!(
5527 events.next().await.unwrap(),
5528 Event::DiskBasedDiagnosticsUpdated
5529 );
5530 assert_eq!(
5531 events.next().await.unwrap(),
5532 Event::DiskBasedDiagnosticsFinished
5533 );
5534 project.read_with(cx, |project, _| {
5535 assert!(!project.is_running_disk_based_diagnostics());
5536 });
5537 }
5538
5539 #[gpui::test]
5540 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5541 cx.foreground().forbid_parking();
5542
5543 let mut language = Language::new(
5544 LanguageConfig {
5545 name: "Rust".into(),
5546 path_suffixes: vec!["rs".to_string()],
5547 ..Default::default()
5548 },
5549 Some(tree_sitter_rust::language()),
5550 );
5551 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5552 disk_based_diagnostics_sources: &["disk"],
5553 ..Default::default()
5554 });
5555
5556 let text = "
5557 fn a() { A }
5558 fn b() { BB }
5559 fn c() { CCC }
5560 "
5561 .unindent();
5562
5563 let fs = FakeFs::new(cx.background());
5564 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5565
5566 let project = Project::test(fs, cx);
5567 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5568
5569 let worktree_id = project
5570 .update(cx, |project, cx| {
5571 project.find_or_create_local_worktree("/dir", true, cx)
5572 })
5573 .await
5574 .unwrap()
5575 .0
5576 .read_with(cx, |tree, _| tree.id());
5577
5578 let buffer = project
5579 .update(cx, |project, cx| {
5580 project.open_buffer((worktree_id, "a.rs"), cx)
5581 })
5582 .await
5583 .unwrap();
5584
5585 let mut fake_server = fake_servers.next().await.unwrap();
5586 let open_notification = fake_server
5587 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5588 .await;
5589
5590 // Edit the buffer, moving the content down
5591 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5592 let change_notification_1 = fake_server
5593 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5594 .await;
5595 assert!(
5596 change_notification_1.text_document.version > open_notification.text_document.version
5597 );
5598
5599 // Report some diagnostics for the initial version of the buffer
5600 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5601 lsp::PublishDiagnosticsParams {
5602 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5603 version: Some(open_notification.text_document.version),
5604 diagnostics: vec![
5605 lsp::Diagnostic {
5606 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5607 severity: Some(DiagnosticSeverity::ERROR),
5608 message: "undefined variable 'A'".to_string(),
5609 source: Some("disk".to_string()),
5610 ..Default::default()
5611 },
5612 lsp::Diagnostic {
5613 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5614 severity: Some(DiagnosticSeverity::ERROR),
5615 message: "undefined variable 'BB'".to_string(),
5616 source: Some("disk".to_string()),
5617 ..Default::default()
5618 },
5619 lsp::Diagnostic {
5620 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5621 severity: Some(DiagnosticSeverity::ERROR),
5622 source: Some("disk".to_string()),
5623 message: "undefined variable 'CCC'".to_string(),
5624 ..Default::default()
5625 },
5626 ],
5627 },
5628 );
5629
5630 // The diagnostics have moved down since they were created.
5631 buffer.next_notification(cx).await;
5632 buffer.read_with(cx, |buffer, _| {
5633 assert_eq!(
5634 buffer
5635 .snapshot()
5636 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5637 .collect::<Vec<_>>(),
5638 &[
5639 DiagnosticEntry {
5640 range: Point::new(3, 9)..Point::new(3, 11),
5641 diagnostic: Diagnostic {
5642 severity: DiagnosticSeverity::ERROR,
5643 message: "undefined variable 'BB'".to_string(),
5644 is_disk_based: true,
5645 group_id: 1,
5646 is_primary: true,
5647 ..Default::default()
5648 },
5649 },
5650 DiagnosticEntry {
5651 range: Point::new(4, 9)..Point::new(4, 12),
5652 diagnostic: Diagnostic {
5653 severity: DiagnosticSeverity::ERROR,
5654 message: "undefined variable 'CCC'".to_string(),
5655 is_disk_based: true,
5656 group_id: 2,
5657 is_primary: true,
5658 ..Default::default()
5659 }
5660 }
5661 ]
5662 );
5663 assert_eq!(
5664 chunks_with_diagnostics(buffer, 0..buffer.len()),
5665 [
5666 ("\n\nfn a() { ".to_string(), None),
5667 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5668 (" }\nfn b() { ".to_string(), None),
5669 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5670 (" }\nfn c() { ".to_string(), None),
5671 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5672 (" }\n".to_string(), None),
5673 ]
5674 );
5675 assert_eq!(
5676 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5677 [
5678 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5679 (" }\nfn c() { ".to_string(), None),
5680 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5681 ]
5682 );
5683 });
5684
5685 // Ensure overlapping diagnostics are highlighted correctly.
5686 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5687 lsp::PublishDiagnosticsParams {
5688 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5689 version: Some(open_notification.text_document.version),
5690 diagnostics: vec![
5691 lsp::Diagnostic {
5692 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5693 severity: Some(DiagnosticSeverity::ERROR),
5694 message: "undefined variable 'A'".to_string(),
5695 source: Some("disk".to_string()),
5696 ..Default::default()
5697 },
5698 lsp::Diagnostic {
5699 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5700 severity: Some(DiagnosticSeverity::WARNING),
5701 message: "unreachable statement".to_string(),
5702 source: Some("disk".to_string()),
5703 ..Default::default()
5704 },
5705 ],
5706 },
5707 );
5708
5709 buffer.next_notification(cx).await;
5710 buffer.read_with(cx, |buffer, _| {
5711 assert_eq!(
5712 buffer
5713 .snapshot()
5714 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5715 .collect::<Vec<_>>(),
5716 &[
5717 DiagnosticEntry {
5718 range: Point::new(2, 9)..Point::new(2, 12),
5719 diagnostic: Diagnostic {
5720 severity: DiagnosticSeverity::WARNING,
5721 message: "unreachable statement".to_string(),
5722 is_disk_based: true,
5723 group_id: 1,
5724 is_primary: true,
5725 ..Default::default()
5726 }
5727 },
5728 DiagnosticEntry {
5729 range: Point::new(2, 9)..Point::new(2, 10),
5730 diagnostic: Diagnostic {
5731 severity: DiagnosticSeverity::ERROR,
5732 message: "undefined variable 'A'".to_string(),
5733 is_disk_based: true,
5734 group_id: 0,
5735 is_primary: true,
5736 ..Default::default()
5737 },
5738 }
5739 ]
5740 );
5741 assert_eq!(
5742 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5743 [
5744 ("fn a() { ".to_string(), None),
5745 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5746 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5747 ("\n".to_string(), None),
5748 ]
5749 );
5750 assert_eq!(
5751 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5752 [
5753 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5754 ("\n".to_string(), None),
5755 ]
5756 );
5757 });
5758
5759 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5760 // changes since the last save.
5761 buffer.update(cx, |buffer, cx| {
5762 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5763 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5764 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5765 });
5766 let change_notification_2 = fake_server
5767 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5768 .await;
5769 assert!(
5770 change_notification_2.text_document.version
5771 > change_notification_1.text_document.version
5772 );
5773
5774 // Handle out-of-order diagnostics
5775 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5776 lsp::PublishDiagnosticsParams {
5777 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5778 version: Some(change_notification_2.text_document.version),
5779 diagnostics: vec![
5780 lsp::Diagnostic {
5781 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5782 severity: Some(DiagnosticSeverity::ERROR),
5783 message: "undefined variable 'BB'".to_string(),
5784 source: Some("disk".to_string()),
5785 ..Default::default()
5786 },
5787 lsp::Diagnostic {
5788 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5789 severity: Some(DiagnosticSeverity::WARNING),
5790 message: "undefined variable 'A'".to_string(),
5791 source: Some("disk".to_string()),
5792 ..Default::default()
5793 },
5794 ],
5795 },
5796 );
5797
5798 buffer.next_notification(cx).await;
5799 buffer.read_with(cx, |buffer, _| {
5800 assert_eq!(
5801 buffer
5802 .snapshot()
5803 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5804 .collect::<Vec<_>>(),
5805 &[
5806 DiagnosticEntry {
5807 range: Point::new(2, 21)..Point::new(2, 22),
5808 diagnostic: Diagnostic {
5809 severity: DiagnosticSeverity::WARNING,
5810 message: "undefined variable 'A'".to_string(),
5811 is_disk_based: true,
5812 group_id: 1,
5813 is_primary: true,
5814 ..Default::default()
5815 }
5816 },
5817 DiagnosticEntry {
5818 range: Point::new(3, 9)..Point::new(3, 14),
5819 diagnostic: Diagnostic {
5820 severity: DiagnosticSeverity::ERROR,
5821 message: "undefined variable 'BB'".to_string(),
5822 is_disk_based: true,
5823 group_id: 0,
5824 is_primary: true,
5825 ..Default::default()
5826 },
5827 }
5828 ]
5829 );
5830 });
5831 }
5832
5833 #[gpui::test]
5834 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5835 cx.foreground().forbid_parking();
5836
5837 let text = concat!(
5838 "let one = ;\n", //
5839 "let two = \n",
5840 "let three = 3;\n",
5841 );
5842
5843 let fs = FakeFs::new(cx.background());
5844 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5845
5846 let project = Project::test(fs, cx);
5847 let worktree_id = project
5848 .update(cx, |project, cx| {
5849 project.find_or_create_local_worktree("/dir", true, cx)
5850 })
5851 .await
5852 .unwrap()
5853 .0
5854 .read_with(cx, |tree, _| tree.id());
5855
5856 let buffer = project
5857 .update(cx, |project, cx| {
5858 project.open_buffer((worktree_id, "a.rs"), cx)
5859 })
5860 .await
5861 .unwrap();
5862
5863 project.update(cx, |project, cx| {
5864 project
5865 .update_buffer_diagnostics(
5866 &buffer,
5867 vec![
5868 DiagnosticEntry {
5869 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5870 diagnostic: Diagnostic {
5871 severity: DiagnosticSeverity::ERROR,
5872 message: "syntax error 1".to_string(),
5873 ..Default::default()
5874 },
5875 },
5876 DiagnosticEntry {
5877 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5878 diagnostic: Diagnostic {
5879 severity: DiagnosticSeverity::ERROR,
5880 message: "syntax error 2".to_string(),
5881 ..Default::default()
5882 },
5883 },
5884 ],
5885 None,
5886 cx,
5887 )
5888 .unwrap();
5889 });
5890
5891 // An empty range is extended forward to include the following character.
5892 // At the end of a line, an empty range is extended backward to include
5893 // the preceding character.
5894 buffer.read_with(cx, |buffer, _| {
5895 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5896 assert_eq!(
5897 chunks
5898 .iter()
5899 .map(|(s, d)| (s.as_str(), *d))
5900 .collect::<Vec<_>>(),
5901 &[
5902 ("let one = ", None),
5903 (";", Some(DiagnosticSeverity::ERROR)),
5904 ("\nlet two =", None),
5905 (" ", Some(DiagnosticSeverity::ERROR)),
5906 ("\nlet three = 3;\n", None)
5907 ]
5908 );
5909 });
5910 }
5911
5912 #[gpui::test]
5913 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5914 cx.foreground().forbid_parking();
5915
5916 let mut language = Language::new(
5917 LanguageConfig {
5918 name: "Rust".into(),
5919 path_suffixes: vec!["rs".to_string()],
5920 ..Default::default()
5921 },
5922 Some(tree_sitter_rust::language()),
5923 );
5924 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5925
5926 let text = "
5927 fn a() {
5928 f1();
5929 }
5930 fn b() {
5931 f2();
5932 }
5933 fn c() {
5934 f3();
5935 }
5936 "
5937 .unindent();
5938
5939 let fs = FakeFs::new(cx.background());
5940 fs.insert_tree(
5941 "/dir",
5942 json!({
5943 "a.rs": text.clone(),
5944 }),
5945 )
5946 .await;
5947
5948 let project = Project::test(fs, cx);
5949 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5950
5951 let worktree_id = project
5952 .update(cx, |project, cx| {
5953 project.find_or_create_local_worktree("/dir", true, cx)
5954 })
5955 .await
5956 .unwrap()
5957 .0
5958 .read_with(cx, |tree, _| tree.id());
5959
5960 let buffer = project
5961 .update(cx, |project, cx| {
5962 project.open_buffer((worktree_id, "a.rs"), cx)
5963 })
5964 .await
5965 .unwrap();
5966
5967 let mut fake_server = fake_servers.next().await.unwrap();
5968 let lsp_document_version = fake_server
5969 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5970 .await
5971 .text_document
5972 .version;
5973
5974 // Simulate editing the buffer after the language server computes some edits.
5975 buffer.update(cx, |buffer, cx| {
5976 buffer.edit(
5977 [Point::new(0, 0)..Point::new(0, 0)],
5978 "// above first function\n",
5979 cx,
5980 );
5981 buffer.edit(
5982 [Point::new(2, 0)..Point::new(2, 0)],
5983 " // inside first function\n",
5984 cx,
5985 );
5986 buffer.edit(
5987 [Point::new(6, 4)..Point::new(6, 4)],
5988 "// inside second function ",
5989 cx,
5990 );
5991
5992 assert_eq!(
5993 buffer.text(),
5994 "
5995 // above first function
5996 fn a() {
5997 // inside first function
5998 f1();
5999 }
6000 fn b() {
6001 // inside second function f2();
6002 }
6003 fn c() {
6004 f3();
6005 }
6006 "
6007 .unindent()
6008 );
6009 });
6010
6011 let edits = project
6012 .update(cx, |project, cx| {
6013 project.edits_from_lsp(
6014 &buffer,
6015 vec![
6016 // replace body of first function
6017 lsp::TextEdit {
6018 range: lsp::Range::new(
6019 lsp::Position::new(0, 0),
6020 lsp::Position::new(3, 0),
6021 ),
6022 new_text: "
6023 fn a() {
6024 f10();
6025 }
6026 "
6027 .unindent(),
6028 },
6029 // edit inside second function
6030 lsp::TextEdit {
6031 range: lsp::Range::new(
6032 lsp::Position::new(4, 6),
6033 lsp::Position::new(4, 6),
6034 ),
6035 new_text: "00".into(),
6036 },
6037 // edit inside third function via two distinct edits
6038 lsp::TextEdit {
6039 range: lsp::Range::new(
6040 lsp::Position::new(7, 5),
6041 lsp::Position::new(7, 5),
6042 ),
6043 new_text: "4000".into(),
6044 },
6045 lsp::TextEdit {
6046 range: lsp::Range::new(
6047 lsp::Position::new(7, 5),
6048 lsp::Position::new(7, 6),
6049 ),
6050 new_text: "".into(),
6051 },
6052 ],
6053 Some(lsp_document_version),
6054 cx,
6055 )
6056 })
6057 .await
6058 .unwrap();
6059
6060 buffer.update(cx, |buffer, cx| {
6061 for (range, new_text) in edits {
6062 buffer.edit([range], new_text, cx);
6063 }
6064 assert_eq!(
6065 buffer.text(),
6066 "
6067 // above first function
6068 fn a() {
6069 // inside first function
6070 f10();
6071 }
6072 fn b() {
6073 // inside second function f200();
6074 }
6075 fn c() {
6076 f4000();
6077 }
6078 "
6079 .unindent()
6080 );
6081 });
6082 }
6083
6084 #[gpui::test]
6085 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6086 cx.foreground().forbid_parking();
6087
6088 let text = "
6089 use a::b;
6090 use a::c;
6091
6092 fn f() {
6093 b();
6094 c();
6095 }
6096 "
6097 .unindent();
6098
6099 let fs = FakeFs::new(cx.background());
6100 fs.insert_tree(
6101 "/dir",
6102 json!({
6103 "a.rs": text.clone(),
6104 }),
6105 )
6106 .await;
6107
6108 let project = Project::test(fs, cx);
6109 let worktree_id = project
6110 .update(cx, |project, cx| {
6111 project.find_or_create_local_worktree("/dir", true, cx)
6112 })
6113 .await
6114 .unwrap()
6115 .0
6116 .read_with(cx, |tree, _| tree.id());
6117
6118 let buffer = project
6119 .update(cx, |project, cx| {
6120 project.open_buffer((worktree_id, "a.rs"), cx)
6121 })
6122 .await
6123 .unwrap();
6124
6125 // Simulate the language server sending us a small edit in the form of a very large diff.
6126 // Rust-analyzer does this when performing a merge-imports code action.
6127 let edits = project
6128 .update(cx, |project, cx| {
6129 project.edits_from_lsp(
6130 &buffer,
6131 [
6132 // Replace the first use statement without editing the semicolon.
6133 lsp::TextEdit {
6134 range: lsp::Range::new(
6135 lsp::Position::new(0, 4),
6136 lsp::Position::new(0, 8),
6137 ),
6138 new_text: "a::{b, c}".into(),
6139 },
6140 // Reinsert the remainder of the file between the semicolon and the final
6141 // newline of the file.
6142 lsp::TextEdit {
6143 range: lsp::Range::new(
6144 lsp::Position::new(0, 9),
6145 lsp::Position::new(0, 9),
6146 ),
6147 new_text: "\n\n".into(),
6148 },
6149 lsp::TextEdit {
6150 range: lsp::Range::new(
6151 lsp::Position::new(0, 9),
6152 lsp::Position::new(0, 9),
6153 ),
6154 new_text: "
6155 fn f() {
6156 b();
6157 c();
6158 }"
6159 .unindent(),
6160 },
6161 // Delete everything after the first newline of the file.
6162 lsp::TextEdit {
6163 range: lsp::Range::new(
6164 lsp::Position::new(1, 0),
6165 lsp::Position::new(7, 0),
6166 ),
6167 new_text: "".into(),
6168 },
6169 ],
6170 None,
6171 cx,
6172 )
6173 })
6174 .await
6175 .unwrap();
6176
6177 buffer.update(cx, |buffer, cx| {
6178 let edits = edits
6179 .into_iter()
6180 .map(|(range, text)| {
6181 (
6182 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6183 text,
6184 )
6185 })
6186 .collect::<Vec<_>>();
6187
6188 assert_eq!(
6189 edits,
6190 [
6191 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6192 (Point::new(1, 0)..Point::new(2, 0), "".into())
6193 ]
6194 );
6195
6196 for (range, new_text) in edits {
6197 buffer.edit([range], new_text, cx);
6198 }
6199 assert_eq!(
6200 buffer.text(),
6201 "
6202 use a::{b, c};
6203
6204 fn f() {
6205 b();
6206 c();
6207 }
6208 "
6209 .unindent()
6210 );
6211 });
6212 }
6213
6214 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6215 buffer: &Buffer,
6216 range: Range<T>,
6217 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6218 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6219 for chunk in buffer.snapshot().chunks(range, true) {
6220 if chunks.last().map_or(false, |prev_chunk| {
6221 prev_chunk.1 == chunk.diagnostic_severity
6222 }) {
6223 chunks.last_mut().unwrap().0.push_str(chunk.text);
6224 } else {
6225 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6226 }
6227 }
6228 chunks
6229 }
6230
6231 #[gpui::test]
6232 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6233 let dir = temp_tree(json!({
6234 "root": {
6235 "dir1": {},
6236 "dir2": {
6237 "dir3": {}
6238 }
6239 }
6240 }));
6241
6242 let project = Project::test(Arc::new(RealFs), cx);
6243 let (tree, _) = project
6244 .update(cx, |project, cx| {
6245 project.find_or_create_local_worktree(&dir.path(), true, cx)
6246 })
6247 .await
6248 .unwrap();
6249
6250 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6251 .await;
6252
6253 let cancel_flag = Default::default();
6254 let results = project
6255 .read_with(cx, |project, cx| {
6256 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6257 })
6258 .await;
6259
6260 assert!(results.is_empty());
6261 }
6262
6263 #[gpui::test]
6264 async fn test_definition(cx: &mut gpui::TestAppContext) {
6265 let mut language = Language::new(
6266 LanguageConfig {
6267 name: "Rust".into(),
6268 path_suffixes: vec!["rs".to_string()],
6269 ..Default::default()
6270 },
6271 Some(tree_sitter_rust::language()),
6272 );
6273 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6274
6275 let fs = FakeFs::new(cx.background());
6276 fs.insert_tree(
6277 "/dir",
6278 json!({
6279 "a.rs": "const fn a() { A }",
6280 "b.rs": "const y: i32 = crate::a()",
6281 }),
6282 )
6283 .await;
6284
6285 let project = Project::test(fs, cx);
6286 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6287
6288 let (tree, _) = project
6289 .update(cx, |project, cx| {
6290 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6291 })
6292 .await
6293 .unwrap();
6294 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6295 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6296 .await;
6297
6298 let buffer = project
6299 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6300 .await
6301 .unwrap();
6302
6303 let fake_server = fake_servers.next().await.unwrap();
6304 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6305 let params = params.text_document_position_params;
6306 assert_eq!(
6307 params.text_document.uri.to_file_path().unwrap(),
6308 Path::new("/dir/b.rs"),
6309 );
6310 assert_eq!(params.position, lsp::Position::new(0, 22));
6311
6312 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6313 lsp::Location::new(
6314 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6315 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6316 ),
6317 )))
6318 });
6319
6320 let mut definitions = project
6321 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6322 .await
6323 .unwrap();
6324
6325 assert_eq!(definitions.len(), 1);
6326 let definition = definitions.pop().unwrap();
6327 cx.update(|cx| {
6328 let target_buffer = definition.buffer.read(cx);
6329 assert_eq!(
6330 target_buffer
6331 .file()
6332 .unwrap()
6333 .as_local()
6334 .unwrap()
6335 .abs_path(cx),
6336 Path::new("/dir/a.rs"),
6337 );
6338 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6339 assert_eq!(
6340 list_worktrees(&project, cx),
6341 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6342 );
6343
6344 drop(definition);
6345 });
6346 cx.read(|cx| {
6347 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6348 });
6349
6350 fn list_worktrees<'a>(
6351 project: &'a ModelHandle<Project>,
6352 cx: &'a AppContext,
6353 ) -> Vec<(&'a Path, bool)> {
6354 project
6355 .read(cx)
6356 .worktrees(cx)
6357 .map(|worktree| {
6358 let worktree = worktree.read(cx);
6359 (
6360 worktree.as_local().unwrap().abs_path().as_ref(),
6361 worktree.is_visible(),
6362 )
6363 })
6364 .collect::<Vec<_>>()
6365 }
6366 }
6367
6368 #[gpui::test(iterations = 10)]
6369 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6370 let mut language = Language::new(
6371 LanguageConfig {
6372 name: "TypeScript".into(),
6373 path_suffixes: vec!["ts".to_string()],
6374 ..Default::default()
6375 },
6376 None,
6377 );
6378 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6379
6380 let fs = FakeFs::new(cx.background());
6381 fs.insert_tree(
6382 "/dir",
6383 json!({
6384 "a.ts": "a",
6385 }),
6386 )
6387 .await;
6388
6389 let project = Project::test(fs, cx);
6390 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6391
6392 let (tree, _) = project
6393 .update(cx, |project, cx| {
6394 project.find_or_create_local_worktree("/dir", true, cx)
6395 })
6396 .await
6397 .unwrap();
6398 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6399 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6400 .await;
6401
6402 let buffer = project
6403 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6404 .await
6405 .unwrap();
6406
6407 let fake_server = fake_language_servers.next().await.unwrap();
6408
6409 // Language server returns code actions that contain commands, and not edits.
6410 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6411 fake_server
6412 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6413 Ok(Some(vec![
6414 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6415 title: "The code action".into(),
6416 command: Some(lsp::Command {
6417 title: "The command".into(),
6418 command: "_the/command".into(),
6419 arguments: Some(vec![json!("the-argument")]),
6420 }),
6421 ..Default::default()
6422 }),
6423 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6424 title: "two".into(),
6425 ..Default::default()
6426 }),
6427 ]))
6428 })
6429 .next()
6430 .await;
6431
6432 let action = actions.await.unwrap()[0].clone();
6433 let apply = project.update(cx, |project, cx| {
6434 project.apply_code_action(buffer.clone(), action, true, cx)
6435 });
6436
6437 // Resolving the code action does not populate its edits. In absence of
6438 // edits, we must execute the given command.
6439 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6440 |action, _| async move { Ok(action) },
6441 );
6442
6443 // While executing the command, the language server sends the editor
6444 // a `workspaceEdit` request.
6445 fake_server
6446 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6447 let fake = fake_server.clone();
6448 move |params, _| {
6449 assert_eq!(params.command, "_the/command");
6450 let fake = fake.clone();
6451 async move {
6452 fake.server
6453 .request::<lsp::request::ApplyWorkspaceEdit>(
6454 lsp::ApplyWorkspaceEditParams {
6455 label: None,
6456 edit: lsp::WorkspaceEdit {
6457 changes: Some(
6458 [(
6459 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6460 vec![lsp::TextEdit {
6461 range: lsp::Range::new(
6462 lsp::Position::new(0, 0),
6463 lsp::Position::new(0, 0),
6464 ),
6465 new_text: "X".into(),
6466 }],
6467 )]
6468 .into_iter()
6469 .collect(),
6470 ),
6471 ..Default::default()
6472 },
6473 },
6474 )
6475 .await
6476 .unwrap();
6477 Ok(Some(json!(null)))
6478 }
6479 }
6480 })
6481 .next()
6482 .await;
6483
6484 // Applying the code action returns a project transaction containing the edits
6485 // sent by the language server in its `workspaceEdit` request.
6486 let transaction = apply.await.unwrap();
6487 assert!(transaction.0.contains_key(&buffer));
6488 buffer.update(cx, |buffer, cx| {
6489 assert_eq!(buffer.text(), "Xa");
6490 buffer.undo(cx);
6491 assert_eq!(buffer.text(), "a");
6492 });
6493 }
6494
6495 #[gpui::test]
6496 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6497 let fs = FakeFs::new(cx.background());
6498 fs.insert_tree(
6499 "/dir",
6500 json!({
6501 "file1": "the old contents",
6502 }),
6503 )
6504 .await;
6505
6506 let project = Project::test(fs.clone(), cx);
6507 let worktree_id = project
6508 .update(cx, |p, cx| {
6509 p.find_or_create_local_worktree("/dir", true, cx)
6510 })
6511 .await
6512 .unwrap()
6513 .0
6514 .read_with(cx, |tree, _| tree.id());
6515
6516 let buffer = project
6517 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6518 .await
6519 .unwrap();
6520 buffer
6521 .update(cx, |buffer, cx| {
6522 assert_eq!(buffer.text(), "the old contents");
6523 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6524 buffer.save(cx)
6525 })
6526 .await
6527 .unwrap();
6528
6529 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6530 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6531 }
6532
6533 #[gpui::test]
6534 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6535 let fs = FakeFs::new(cx.background());
6536 fs.insert_tree(
6537 "/dir",
6538 json!({
6539 "file1": "the old contents",
6540 }),
6541 )
6542 .await;
6543
6544 let project = Project::test(fs.clone(), cx);
6545 let worktree_id = project
6546 .update(cx, |p, cx| {
6547 p.find_or_create_local_worktree("/dir/file1", true, cx)
6548 })
6549 .await
6550 .unwrap()
6551 .0
6552 .read_with(cx, |tree, _| tree.id());
6553
6554 let buffer = project
6555 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6556 .await
6557 .unwrap();
6558 buffer
6559 .update(cx, |buffer, cx| {
6560 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6561 buffer.save(cx)
6562 })
6563 .await
6564 .unwrap();
6565
6566 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6567 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6568 }
6569
6570 #[gpui::test]
6571 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6572 let fs = FakeFs::new(cx.background());
6573 fs.insert_tree("/dir", json!({})).await;
6574
6575 let project = Project::test(fs.clone(), cx);
6576 let (worktree, _) = project
6577 .update(cx, |project, cx| {
6578 project.find_or_create_local_worktree("/dir", true, cx)
6579 })
6580 .await
6581 .unwrap();
6582 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6583
6584 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6585 buffer.update(cx, |buffer, cx| {
6586 buffer.edit([0..0], "abc", cx);
6587 assert!(buffer.is_dirty());
6588 assert!(!buffer.has_conflict());
6589 });
6590 project
6591 .update(cx, |project, cx| {
6592 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6593 })
6594 .await
6595 .unwrap();
6596 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6597 buffer.read_with(cx, |buffer, cx| {
6598 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6599 assert!(!buffer.is_dirty());
6600 assert!(!buffer.has_conflict());
6601 });
6602
6603 let opened_buffer = project
6604 .update(cx, |project, cx| {
6605 project.open_buffer((worktree_id, "file1"), cx)
6606 })
6607 .await
6608 .unwrap();
6609 assert_eq!(opened_buffer, buffer);
6610 }
6611
6612 #[gpui::test(retries = 5)]
6613 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6614 let dir = temp_tree(json!({
6615 "a": {
6616 "file1": "",
6617 "file2": "",
6618 "file3": "",
6619 },
6620 "b": {
6621 "c": {
6622 "file4": "",
6623 "file5": "",
6624 }
6625 }
6626 }));
6627
6628 let project = Project::test(Arc::new(RealFs), cx);
6629 let rpc = project.read_with(cx, |p, _| p.client.clone());
6630
6631 let (tree, _) = project
6632 .update(cx, |p, cx| {
6633 p.find_or_create_local_worktree(dir.path(), true, cx)
6634 })
6635 .await
6636 .unwrap();
6637 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6638
6639 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6640 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6641 async move { buffer.await.unwrap() }
6642 };
6643 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6644 tree.read_with(cx, |tree, _| {
6645 tree.entry_for_path(path)
6646 .expect(&format!("no entry for path {}", path))
6647 .id
6648 })
6649 };
6650
6651 let buffer2 = buffer_for_path("a/file2", cx).await;
6652 let buffer3 = buffer_for_path("a/file3", cx).await;
6653 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6654 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6655
6656 let file2_id = id_for_path("a/file2", &cx);
6657 let file3_id = id_for_path("a/file3", &cx);
6658 let file4_id = id_for_path("b/c/file4", &cx);
6659
6660 // Wait for the initial scan.
6661 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6662 .await;
6663
6664 // Create a remote copy of this worktree.
6665 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6666 let (remote, load_task) = cx.update(|cx| {
6667 Worktree::remote(
6668 1,
6669 1,
6670 initial_snapshot.to_proto(&Default::default(), true),
6671 rpc.clone(),
6672 cx,
6673 )
6674 });
6675 load_task.await;
6676
6677 cx.read(|cx| {
6678 assert!(!buffer2.read(cx).is_dirty());
6679 assert!(!buffer3.read(cx).is_dirty());
6680 assert!(!buffer4.read(cx).is_dirty());
6681 assert!(!buffer5.read(cx).is_dirty());
6682 });
6683
6684 // Rename and delete files and directories.
6685 tree.flush_fs_events(&cx).await;
6686 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6687 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6688 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6689 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6690 tree.flush_fs_events(&cx).await;
6691
6692 let expected_paths = vec![
6693 "a",
6694 "a/file1",
6695 "a/file2.new",
6696 "b",
6697 "d",
6698 "d/file3",
6699 "d/file4",
6700 ];
6701
6702 cx.read(|app| {
6703 assert_eq!(
6704 tree.read(app)
6705 .paths()
6706 .map(|p| p.to_str().unwrap())
6707 .collect::<Vec<_>>(),
6708 expected_paths
6709 );
6710
6711 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6712 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6713 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6714
6715 assert_eq!(
6716 buffer2.read(app).file().unwrap().path().as_ref(),
6717 Path::new("a/file2.new")
6718 );
6719 assert_eq!(
6720 buffer3.read(app).file().unwrap().path().as_ref(),
6721 Path::new("d/file3")
6722 );
6723 assert_eq!(
6724 buffer4.read(app).file().unwrap().path().as_ref(),
6725 Path::new("d/file4")
6726 );
6727 assert_eq!(
6728 buffer5.read(app).file().unwrap().path().as_ref(),
6729 Path::new("b/c/file5")
6730 );
6731
6732 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6733 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6734 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6735 assert!(buffer5.read(app).file().unwrap().is_deleted());
6736 });
6737
6738 // Update the remote worktree. Check that it becomes consistent with the
6739 // local worktree.
6740 remote.update(cx, |remote, cx| {
6741 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6742 &initial_snapshot,
6743 1,
6744 1,
6745 true,
6746 );
6747 remote
6748 .as_remote_mut()
6749 .unwrap()
6750 .snapshot
6751 .apply_remote_update(update_message)
6752 .unwrap();
6753
6754 assert_eq!(
6755 remote
6756 .paths()
6757 .map(|p| p.to_str().unwrap())
6758 .collect::<Vec<_>>(),
6759 expected_paths
6760 );
6761 });
6762 }
6763
6764 #[gpui::test]
6765 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6766 let fs = FakeFs::new(cx.background());
6767 fs.insert_tree(
6768 "/the-dir",
6769 json!({
6770 "a.txt": "a-contents",
6771 "b.txt": "b-contents",
6772 }),
6773 )
6774 .await;
6775
6776 let project = Project::test(fs.clone(), cx);
6777 let worktree_id = project
6778 .update(cx, |p, cx| {
6779 p.find_or_create_local_worktree("/the-dir", true, cx)
6780 })
6781 .await
6782 .unwrap()
6783 .0
6784 .read_with(cx, |tree, _| tree.id());
6785
6786 // Spawn multiple tasks to open paths, repeating some paths.
6787 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6788 (
6789 p.open_buffer((worktree_id, "a.txt"), cx),
6790 p.open_buffer((worktree_id, "b.txt"), cx),
6791 p.open_buffer((worktree_id, "a.txt"), cx),
6792 )
6793 });
6794
6795 let buffer_a_1 = buffer_a_1.await.unwrap();
6796 let buffer_a_2 = buffer_a_2.await.unwrap();
6797 let buffer_b = buffer_b.await.unwrap();
6798 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6799 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6800
6801 // There is only one buffer per path.
6802 let buffer_a_id = buffer_a_1.id();
6803 assert_eq!(buffer_a_2.id(), buffer_a_id);
6804
6805 // Open the same path again while it is still open.
6806 drop(buffer_a_1);
6807 let buffer_a_3 = project
6808 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6809 .await
6810 .unwrap();
6811
6812 // There's still only one buffer per path.
6813 assert_eq!(buffer_a_3.id(), buffer_a_id);
6814 }
6815
6816 #[gpui::test]
6817 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6818 use std::fs;
6819
6820 let dir = temp_tree(json!({
6821 "file1": "abc",
6822 "file2": "def",
6823 "file3": "ghi",
6824 }));
6825
6826 let project = Project::test(Arc::new(RealFs), cx);
6827 let (worktree, _) = project
6828 .update(cx, |p, cx| {
6829 p.find_or_create_local_worktree(dir.path(), true, cx)
6830 })
6831 .await
6832 .unwrap();
6833 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6834
6835 worktree.flush_fs_events(&cx).await;
6836 worktree
6837 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6838 .await;
6839
6840 let buffer1 = project
6841 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6842 .await
6843 .unwrap();
6844 let events = Rc::new(RefCell::new(Vec::new()));
6845
6846 // initially, the buffer isn't dirty.
6847 buffer1.update(cx, |buffer, cx| {
6848 cx.subscribe(&buffer1, {
6849 let events = events.clone();
6850 move |_, _, event, _| match event {
6851 BufferEvent::Operation(_) => {}
6852 _ => events.borrow_mut().push(event.clone()),
6853 }
6854 })
6855 .detach();
6856
6857 assert!(!buffer.is_dirty());
6858 assert!(events.borrow().is_empty());
6859
6860 buffer.edit(vec![1..2], "", cx);
6861 });
6862
6863 // after the first edit, the buffer is dirty, and emits a dirtied event.
6864 buffer1.update(cx, |buffer, cx| {
6865 assert!(buffer.text() == "ac");
6866 assert!(buffer.is_dirty());
6867 assert_eq!(
6868 *events.borrow(),
6869 &[language::Event::Edited, language::Event::Dirtied]
6870 );
6871 events.borrow_mut().clear();
6872 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6873 });
6874
6875 // after saving, the buffer is not dirty, and emits a saved event.
6876 buffer1.update(cx, |buffer, cx| {
6877 assert!(!buffer.is_dirty());
6878 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6879 events.borrow_mut().clear();
6880
6881 buffer.edit(vec![1..1], "B", cx);
6882 buffer.edit(vec![2..2], "D", cx);
6883 });
6884
6885 // after editing again, the buffer is dirty, and emits another dirty event.
6886 buffer1.update(cx, |buffer, cx| {
6887 assert!(buffer.text() == "aBDc");
6888 assert!(buffer.is_dirty());
6889 assert_eq!(
6890 *events.borrow(),
6891 &[
6892 language::Event::Edited,
6893 language::Event::Dirtied,
6894 language::Event::Edited,
6895 ],
6896 );
6897 events.borrow_mut().clear();
6898
6899 // TODO - currently, after restoring the buffer to its
6900 // previously-saved state, the is still considered dirty.
6901 buffer.edit([1..3], "", cx);
6902 assert!(buffer.text() == "ac");
6903 assert!(buffer.is_dirty());
6904 });
6905
6906 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6907
6908 // When a file is deleted, the buffer is considered dirty.
6909 let events = Rc::new(RefCell::new(Vec::new()));
6910 let buffer2 = project
6911 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6912 .await
6913 .unwrap();
6914 buffer2.update(cx, |_, cx| {
6915 cx.subscribe(&buffer2, {
6916 let events = events.clone();
6917 move |_, _, event, _| events.borrow_mut().push(event.clone())
6918 })
6919 .detach();
6920 });
6921
6922 fs::remove_file(dir.path().join("file2")).unwrap();
6923 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6924 assert_eq!(
6925 *events.borrow(),
6926 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6927 );
6928
6929 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6930 let events = Rc::new(RefCell::new(Vec::new()));
6931 let buffer3 = project
6932 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6933 .await
6934 .unwrap();
6935 buffer3.update(cx, |_, cx| {
6936 cx.subscribe(&buffer3, {
6937 let events = events.clone();
6938 move |_, _, event, _| events.borrow_mut().push(event.clone())
6939 })
6940 .detach();
6941 });
6942
6943 worktree.flush_fs_events(&cx).await;
6944 buffer3.update(cx, |buffer, cx| {
6945 buffer.edit(Some(0..0), "x", cx);
6946 });
6947 events.borrow_mut().clear();
6948 fs::remove_file(dir.path().join("file3")).unwrap();
6949 buffer3
6950 .condition(&cx, |_, _| !events.borrow().is_empty())
6951 .await;
6952 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6953 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6954 }
6955
6956 #[gpui::test]
6957 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6958 use std::fs;
6959
6960 let initial_contents = "aaa\nbbbbb\nc\n";
6961 let dir = temp_tree(json!({ "the-file": initial_contents }));
6962
6963 let project = Project::test(Arc::new(RealFs), cx);
6964 let (worktree, _) = project
6965 .update(cx, |p, cx| {
6966 p.find_or_create_local_worktree(dir.path(), true, cx)
6967 })
6968 .await
6969 .unwrap();
6970 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6971
6972 worktree
6973 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6974 .await;
6975
6976 let abs_path = dir.path().join("the-file");
6977 let buffer = project
6978 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6979 .await
6980 .unwrap();
6981
6982 // TODO
6983 // Add a cursor on each row.
6984 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6985 // assert!(!buffer.is_dirty());
6986 // buffer.add_selection_set(
6987 // &(0..3)
6988 // .map(|row| Selection {
6989 // id: row as usize,
6990 // start: Point::new(row, 1),
6991 // end: Point::new(row, 1),
6992 // reversed: false,
6993 // goal: SelectionGoal::None,
6994 // })
6995 // .collect::<Vec<_>>(),
6996 // cx,
6997 // )
6998 // });
6999
7000 // Change the file on disk, adding two new lines of text, and removing
7001 // one line.
7002 buffer.read_with(cx, |buffer, _| {
7003 assert!(!buffer.is_dirty());
7004 assert!(!buffer.has_conflict());
7005 });
7006 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7007 fs::write(&abs_path, new_contents).unwrap();
7008
7009 // Because the buffer was not modified, it is reloaded from disk. Its
7010 // contents are edited according to the diff between the old and new
7011 // file contents.
7012 buffer
7013 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7014 .await;
7015
7016 buffer.update(cx, |buffer, _| {
7017 assert_eq!(buffer.text(), new_contents);
7018 assert!(!buffer.is_dirty());
7019 assert!(!buffer.has_conflict());
7020
7021 // TODO
7022 // let cursor_positions = buffer
7023 // .selection_set(selection_set_id)
7024 // .unwrap()
7025 // .selections::<Point>(&*buffer)
7026 // .map(|selection| {
7027 // assert_eq!(selection.start, selection.end);
7028 // selection.start
7029 // })
7030 // .collect::<Vec<_>>();
7031 // assert_eq!(
7032 // cursor_positions,
7033 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7034 // );
7035 });
7036
7037 // Modify the buffer
7038 buffer.update(cx, |buffer, cx| {
7039 buffer.edit(vec![0..0], " ", cx);
7040 assert!(buffer.is_dirty());
7041 assert!(!buffer.has_conflict());
7042 });
7043
7044 // Change the file on disk again, adding blank lines to the beginning.
7045 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
7046
7047 // Because the buffer is modified, it doesn't reload from disk, but is
7048 // marked as having a conflict.
7049 buffer
7050 .condition(&cx, |buffer, _| buffer.has_conflict())
7051 .await;
7052 }
7053
7054 #[gpui::test]
7055 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7056 cx.foreground().forbid_parking();
7057
7058 let fs = FakeFs::new(cx.background());
7059 fs.insert_tree(
7060 "/the-dir",
7061 json!({
7062 "a.rs": "
7063 fn foo(mut v: Vec<usize>) {
7064 for x in &v {
7065 v.push(1);
7066 }
7067 }
7068 "
7069 .unindent(),
7070 }),
7071 )
7072 .await;
7073
7074 let project = Project::test(fs.clone(), cx);
7075 let (worktree, _) = project
7076 .update(cx, |p, cx| {
7077 p.find_or_create_local_worktree("/the-dir", true, cx)
7078 })
7079 .await
7080 .unwrap();
7081 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7082
7083 let buffer = project
7084 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7085 .await
7086 .unwrap();
7087
7088 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7089 let message = lsp::PublishDiagnosticsParams {
7090 uri: buffer_uri.clone(),
7091 diagnostics: vec![
7092 lsp::Diagnostic {
7093 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7094 severity: Some(DiagnosticSeverity::WARNING),
7095 message: "error 1".to_string(),
7096 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7097 location: lsp::Location {
7098 uri: buffer_uri.clone(),
7099 range: lsp::Range::new(
7100 lsp::Position::new(1, 8),
7101 lsp::Position::new(1, 9),
7102 ),
7103 },
7104 message: "error 1 hint 1".to_string(),
7105 }]),
7106 ..Default::default()
7107 },
7108 lsp::Diagnostic {
7109 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7110 severity: Some(DiagnosticSeverity::HINT),
7111 message: "error 1 hint 1".to_string(),
7112 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7113 location: lsp::Location {
7114 uri: buffer_uri.clone(),
7115 range: lsp::Range::new(
7116 lsp::Position::new(1, 8),
7117 lsp::Position::new(1, 9),
7118 ),
7119 },
7120 message: "original diagnostic".to_string(),
7121 }]),
7122 ..Default::default()
7123 },
7124 lsp::Diagnostic {
7125 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7126 severity: Some(DiagnosticSeverity::ERROR),
7127 message: "error 2".to_string(),
7128 related_information: Some(vec![
7129 lsp::DiagnosticRelatedInformation {
7130 location: lsp::Location {
7131 uri: buffer_uri.clone(),
7132 range: lsp::Range::new(
7133 lsp::Position::new(1, 13),
7134 lsp::Position::new(1, 15),
7135 ),
7136 },
7137 message: "error 2 hint 1".to_string(),
7138 },
7139 lsp::DiagnosticRelatedInformation {
7140 location: lsp::Location {
7141 uri: buffer_uri.clone(),
7142 range: lsp::Range::new(
7143 lsp::Position::new(1, 13),
7144 lsp::Position::new(1, 15),
7145 ),
7146 },
7147 message: "error 2 hint 2".to_string(),
7148 },
7149 ]),
7150 ..Default::default()
7151 },
7152 lsp::Diagnostic {
7153 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7154 severity: Some(DiagnosticSeverity::HINT),
7155 message: "error 2 hint 1".to_string(),
7156 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7157 location: lsp::Location {
7158 uri: buffer_uri.clone(),
7159 range: lsp::Range::new(
7160 lsp::Position::new(2, 8),
7161 lsp::Position::new(2, 17),
7162 ),
7163 },
7164 message: "original diagnostic".to_string(),
7165 }]),
7166 ..Default::default()
7167 },
7168 lsp::Diagnostic {
7169 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7170 severity: Some(DiagnosticSeverity::HINT),
7171 message: "error 2 hint 2".to_string(),
7172 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7173 location: lsp::Location {
7174 uri: buffer_uri.clone(),
7175 range: lsp::Range::new(
7176 lsp::Position::new(2, 8),
7177 lsp::Position::new(2, 17),
7178 ),
7179 },
7180 message: "original diagnostic".to_string(),
7181 }]),
7182 ..Default::default()
7183 },
7184 ],
7185 version: None,
7186 };
7187
7188 project
7189 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7190 .unwrap();
7191 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7192
7193 assert_eq!(
7194 buffer
7195 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7196 .collect::<Vec<_>>(),
7197 &[
7198 DiagnosticEntry {
7199 range: Point::new(1, 8)..Point::new(1, 9),
7200 diagnostic: Diagnostic {
7201 severity: DiagnosticSeverity::WARNING,
7202 message: "error 1".to_string(),
7203 group_id: 0,
7204 is_primary: true,
7205 ..Default::default()
7206 }
7207 },
7208 DiagnosticEntry {
7209 range: Point::new(1, 8)..Point::new(1, 9),
7210 diagnostic: Diagnostic {
7211 severity: DiagnosticSeverity::HINT,
7212 message: "error 1 hint 1".to_string(),
7213 group_id: 0,
7214 is_primary: false,
7215 ..Default::default()
7216 }
7217 },
7218 DiagnosticEntry {
7219 range: Point::new(1, 13)..Point::new(1, 15),
7220 diagnostic: Diagnostic {
7221 severity: DiagnosticSeverity::HINT,
7222 message: "error 2 hint 1".to_string(),
7223 group_id: 1,
7224 is_primary: false,
7225 ..Default::default()
7226 }
7227 },
7228 DiagnosticEntry {
7229 range: Point::new(1, 13)..Point::new(1, 15),
7230 diagnostic: Diagnostic {
7231 severity: DiagnosticSeverity::HINT,
7232 message: "error 2 hint 2".to_string(),
7233 group_id: 1,
7234 is_primary: false,
7235 ..Default::default()
7236 }
7237 },
7238 DiagnosticEntry {
7239 range: Point::new(2, 8)..Point::new(2, 17),
7240 diagnostic: Diagnostic {
7241 severity: DiagnosticSeverity::ERROR,
7242 message: "error 2".to_string(),
7243 group_id: 1,
7244 is_primary: true,
7245 ..Default::default()
7246 }
7247 }
7248 ]
7249 );
7250
7251 assert_eq!(
7252 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7253 &[
7254 DiagnosticEntry {
7255 range: Point::new(1, 8)..Point::new(1, 9),
7256 diagnostic: Diagnostic {
7257 severity: DiagnosticSeverity::WARNING,
7258 message: "error 1".to_string(),
7259 group_id: 0,
7260 is_primary: true,
7261 ..Default::default()
7262 }
7263 },
7264 DiagnosticEntry {
7265 range: Point::new(1, 8)..Point::new(1, 9),
7266 diagnostic: Diagnostic {
7267 severity: DiagnosticSeverity::HINT,
7268 message: "error 1 hint 1".to_string(),
7269 group_id: 0,
7270 is_primary: false,
7271 ..Default::default()
7272 }
7273 },
7274 ]
7275 );
7276 assert_eq!(
7277 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7278 &[
7279 DiagnosticEntry {
7280 range: Point::new(1, 13)..Point::new(1, 15),
7281 diagnostic: Diagnostic {
7282 severity: DiagnosticSeverity::HINT,
7283 message: "error 2 hint 1".to_string(),
7284 group_id: 1,
7285 is_primary: false,
7286 ..Default::default()
7287 }
7288 },
7289 DiagnosticEntry {
7290 range: Point::new(1, 13)..Point::new(1, 15),
7291 diagnostic: Diagnostic {
7292 severity: DiagnosticSeverity::HINT,
7293 message: "error 2 hint 2".to_string(),
7294 group_id: 1,
7295 is_primary: false,
7296 ..Default::default()
7297 }
7298 },
7299 DiagnosticEntry {
7300 range: Point::new(2, 8)..Point::new(2, 17),
7301 diagnostic: Diagnostic {
7302 severity: DiagnosticSeverity::ERROR,
7303 message: "error 2".to_string(),
7304 group_id: 1,
7305 is_primary: true,
7306 ..Default::default()
7307 }
7308 }
7309 ]
7310 );
7311 }
7312
7313 #[gpui::test]
7314 async fn test_rename(cx: &mut gpui::TestAppContext) {
7315 cx.foreground().forbid_parking();
7316
7317 let mut language = Language::new(
7318 LanguageConfig {
7319 name: "Rust".into(),
7320 path_suffixes: vec!["rs".to_string()],
7321 ..Default::default()
7322 },
7323 Some(tree_sitter_rust::language()),
7324 );
7325 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7326
7327 let fs = FakeFs::new(cx.background());
7328 fs.insert_tree(
7329 "/dir",
7330 json!({
7331 "one.rs": "const ONE: usize = 1;",
7332 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7333 }),
7334 )
7335 .await;
7336
7337 let project = Project::test(fs.clone(), cx);
7338 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7339
7340 let (tree, _) = project
7341 .update(cx, |project, cx| {
7342 project.find_or_create_local_worktree("/dir", true, cx)
7343 })
7344 .await
7345 .unwrap();
7346 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7347 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7348 .await;
7349
7350 let buffer = project
7351 .update(cx, |project, cx| {
7352 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7353 })
7354 .await
7355 .unwrap();
7356
7357 let fake_server = fake_servers.next().await.unwrap();
7358
7359 let response = project.update(cx, |project, cx| {
7360 project.prepare_rename(buffer.clone(), 7, cx)
7361 });
7362 fake_server
7363 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7364 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7365 assert_eq!(params.position, lsp::Position::new(0, 7));
7366 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7367 lsp::Position::new(0, 6),
7368 lsp::Position::new(0, 9),
7369 ))))
7370 })
7371 .next()
7372 .await
7373 .unwrap();
7374 let range = response.await.unwrap().unwrap();
7375 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7376 assert_eq!(range, 6..9);
7377
7378 let response = project.update(cx, |project, cx| {
7379 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7380 });
7381 fake_server
7382 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7383 assert_eq!(
7384 params.text_document_position.text_document.uri.as_str(),
7385 "file:///dir/one.rs"
7386 );
7387 assert_eq!(
7388 params.text_document_position.position,
7389 lsp::Position::new(0, 7)
7390 );
7391 assert_eq!(params.new_name, "THREE");
7392 Ok(Some(lsp::WorkspaceEdit {
7393 changes: Some(
7394 [
7395 (
7396 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7397 vec![lsp::TextEdit::new(
7398 lsp::Range::new(
7399 lsp::Position::new(0, 6),
7400 lsp::Position::new(0, 9),
7401 ),
7402 "THREE".to_string(),
7403 )],
7404 ),
7405 (
7406 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7407 vec![
7408 lsp::TextEdit::new(
7409 lsp::Range::new(
7410 lsp::Position::new(0, 24),
7411 lsp::Position::new(0, 27),
7412 ),
7413 "THREE".to_string(),
7414 ),
7415 lsp::TextEdit::new(
7416 lsp::Range::new(
7417 lsp::Position::new(0, 35),
7418 lsp::Position::new(0, 38),
7419 ),
7420 "THREE".to_string(),
7421 ),
7422 ],
7423 ),
7424 ]
7425 .into_iter()
7426 .collect(),
7427 ),
7428 ..Default::default()
7429 }))
7430 })
7431 .next()
7432 .await
7433 .unwrap();
7434 let mut transaction = response.await.unwrap().0;
7435 assert_eq!(transaction.len(), 2);
7436 assert_eq!(
7437 transaction
7438 .remove_entry(&buffer)
7439 .unwrap()
7440 .0
7441 .read_with(cx, |buffer, _| buffer.text()),
7442 "const THREE: usize = 1;"
7443 );
7444 assert_eq!(
7445 transaction
7446 .into_keys()
7447 .next()
7448 .unwrap()
7449 .read_with(cx, |buffer, _| buffer.text()),
7450 "const TWO: usize = one::THREE + one::THREE;"
7451 );
7452 }
7453
7454 #[gpui::test]
7455 async fn test_search(cx: &mut gpui::TestAppContext) {
7456 let fs = FakeFs::new(cx.background());
7457 fs.insert_tree(
7458 "/dir",
7459 json!({
7460 "one.rs": "const ONE: usize = 1;",
7461 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7462 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7463 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7464 }),
7465 )
7466 .await;
7467 let project = Project::test(fs.clone(), cx);
7468 let (tree, _) = project
7469 .update(cx, |project, cx| {
7470 project.find_or_create_local_worktree("/dir", true, cx)
7471 })
7472 .await
7473 .unwrap();
7474 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7475 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7476 .await;
7477
7478 assert_eq!(
7479 search(&project, SearchQuery::text("TWO", false, true), cx)
7480 .await
7481 .unwrap(),
7482 HashMap::from_iter([
7483 ("two.rs".to_string(), vec![6..9]),
7484 ("three.rs".to_string(), vec![37..40])
7485 ])
7486 );
7487
7488 let buffer_4 = project
7489 .update(cx, |project, cx| {
7490 project.open_buffer((worktree_id, "four.rs"), cx)
7491 })
7492 .await
7493 .unwrap();
7494 buffer_4.update(cx, |buffer, cx| {
7495 buffer.edit([20..28, 31..43], "two::TWO", cx);
7496 });
7497
7498 assert_eq!(
7499 search(&project, SearchQuery::text("TWO", false, true), cx)
7500 .await
7501 .unwrap(),
7502 HashMap::from_iter([
7503 ("two.rs".to_string(), vec![6..9]),
7504 ("three.rs".to_string(), vec![37..40]),
7505 ("four.rs".to_string(), vec![25..28, 36..39])
7506 ])
7507 );
7508
7509 async fn search(
7510 project: &ModelHandle<Project>,
7511 query: SearchQuery,
7512 cx: &mut gpui::TestAppContext,
7513 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7514 let results = project
7515 .update(cx, |project, cx| project.search(query, cx))
7516 .await?;
7517
7518 Ok(results
7519 .into_iter()
7520 .map(|(buffer, ranges)| {
7521 buffer.read_with(cx, |buffer, _| {
7522 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7523 let ranges = ranges
7524 .into_iter()
7525 .map(|range| range.to_offset(buffer))
7526 .collect::<Vec<_>>();
7527 (path, ranges)
7528 })
7529 })
7530 .collect())
7531 }
7532 }
7533}