1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use sha2::{Digest, Sha256};
32use similar::{ChangeTag, TextDiff};
33use std::{
34 cell::RefCell,
35 cmp::{self, Ordering},
36 convert::TryInto,
37 hash::Hash,
38 mem,
39 ops::Range,
40 path::{Component, Path, PathBuf},
41 rc::Rc,
42 sync::{
43 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
44 Arc,
45 },
46 time::Instant,
47};
48use util::{post_inc, ResultExt, TryFutureExt as _};
49
50pub use fs::*;
51pub use worktree::*;
52
53pub trait Item: Entity {
54 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
55}
56
57pub struct Project {
58 worktrees: Vec<WorktreeHandle>,
59 active_entry: Option<ProjectEntryId>,
60 languages: Arc<LanguageRegistry>,
61 language_servers:
62 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
63 started_language_servers:
64 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
65 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
66 language_server_settings: Arc<Mutex<serde_json::Value>>,
67 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
68 next_language_server_id: usize,
69 client: Arc<client::Client>,
70 next_entry_id: Arc<AtomicUsize>,
71 user_store: ModelHandle<UserStore>,
72 fs: Arc<dyn Fs>,
73 client_state: ProjectClientState,
74 collaborators: HashMap<PeerId, Collaborator>,
75 subscriptions: Vec<client::Subscription>,
76 language_servers_with_diagnostics_running: isize,
77 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
78 shared_buffers: HashMap<PeerId, HashSet<u64>>,
79 loading_buffers: HashMap<
80 ProjectPath,
81 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
82 >,
83 loading_local_worktrees:
84 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
85 opened_buffers: HashMap<u64, OpenBuffer>,
86 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
87 nonce: u128,
88}
89
90enum OpenBuffer {
91 Strong(ModelHandle<Buffer>),
92 Weak(WeakModelHandle<Buffer>),
93 Loading(Vec<Operation>),
94}
95
96enum WorktreeHandle {
97 Strong(ModelHandle<Worktree>),
98 Weak(WeakModelHandle<Worktree>),
99}
100
101enum ProjectClientState {
102 Local {
103 is_shared: bool,
104 remote_id_tx: watch::Sender<Option<u64>>,
105 remote_id_rx: watch::Receiver<Option<u64>>,
106 _maintain_remote_id_task: Task<Option<()>>,
107 },
108 Remote {
109 sharing_has_stopped: bool,
110 remote_id: u64,
111 replica_id: ReplicaId,
112 _detect_unshare_task: Task<Option<()>>,
113 },
114}
115
116#[derive(Clone, Debug)]
117pub struct Collaborator {
118 pub user: Arc<User>,
119 pub peer_id: PeerId,
120 pub replica_id: ReplicaId,
121}
122
123#[derive(Clone, Debug, PartialEq)]
124pub enum Event {
125 ActiveEntryChanged(Option<ProjectEntryId>),
126 WorktreeRemoved(WorktreeId),
127 DiskBasedDiagnosticsStarted,
128 DiskBasedDiagnosticsUpdated,
129 DiskBasedDiagnosticsFinished,
130 DiagnosticsUpdated(ProjectPath),
131 RemoteIdChanged(Option<u64>),
132 CollaboratorLeft(PeerId),
133}
134
135pub struct LanguageServerStatus {
136 pub name: String,
137 pub pending_work: BTreeMap<String, LanguageServerProgress>,
138 pending_diagnostic_updates: isize,
139}
140
141#[derive(Clone, Debug)]
142pub struct LanguageServerProgress {
143 pub message: Option<String>,
144 pub percentage: Option<usize>,
145 pub last_update_at: Instant,
146}
147
148#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
149pub struct ProjectPath {
150 pub worktree_id: WorktreeId,
151 pub path: Arc<Path>,
152}
153
154#[derive(Clone, Debug, Default, PartialEq)]
155pub struct DiagnosticSummary {
156 pub error_count: usize,
157 pub warning_count: usize,
158 pub info_count: usize,
159 pub hint_count: usize,
160}
161
162#[derive(Debug)]
163pub struct Location {
164 pub buffer: ModelHandle<Buffer>,
165 pub range: Range<language::Anchor>,
166}
167
168#[derive(Debug)]
169pub struct DocumentHighlight {
170 pub range: Range<language::Anchor>,
171 pub kind: DocumentHighlightKind,
172}
173
174#[derive(Clone, Debug)]
175pub struct Symbol {
176 pub source_worktree_id: WorktreeId,
177 pub worktree_id: WorktreeId,
178 pub language_server_name: LanguageServerName,
179 pub path: PathBuf,
180 pub label: CodeLabel,
181 pub name: String,
182 pub kind: lsp::SymbolKind,
183 pub range: Range<PointUtf16>,
184 pub signature: [u8; 32],
185}
186
187#[derive(Default)]
188pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
189
190impl DiagnosticSummary {
191 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
192 let mut this = Self {
193 error_count: 0,
194 warning_count: 0,
195 info_count: 0,
196 hint_count: 0,
197 };
198
199 for entry in diagnostics {
200 if entry.diagnostic.is_primary {
201 match entry.diagnostic.severity {
202 DiagnosticSeverity::ERROR => this.error_count += 1,
203 DiagnosticSeverity::WARNING => this.warning_count += 1,
204 DiagnosticSeverity::INFORMATION => this.info_count += 1,
205 DiagnosticSeverity::HINT => this.hint_count += 1,
206 _ => {}
207 }
208 }
209 }
210
211 this
212 }
213
214 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
215 proto::DiagnosticSummary {
216 path: path.to_string_lossy().to_string(),
217 error_count: self.error_count as u32,
218 warning_count: self.warning_count as u32,
219 info_count: self.info_count as u32,
220 hint_count: self.hint_count as u32,
221 }
222 }
223}
224
225#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
226pub struct ProjectEntryId(usize);
227
228impl ProjectEntryId {
229 pub fn new(counter: &AtomicUsize) -> Self {
230 Self(counter.fetch_add(1, SeqCst))
231 }
232
233 pub fn from_proto(id: u64) -> Self {
234 Self(id as usize)
235 }
236
237 pub fn to_proto(&self) -> u64 {
238 self.0 as u64
239 }
240
241 pub fn to_usize(&self) -> usize {
242 self.0
243 }
244}
245
246impl Project {
247 pub fn init(client: &Arc<Client>) {
248 client.add_model_message_handler(Self::handle_add_collaborator);
249 client.add_model_message_handler(Self::handle_buffer_reloaded);
250 client.add_model_message_handler(Self::handle_buffer_saved);
251 client.add_model_message_handler(Self::handle_start_language_server);
252 client.add_model_message_handler(Self::handle_update_language_server);
253 client.add_model_message_handler(Self::handle_remove_collaborator);
254 client.add_model_message_handler(Self::handle_register_worktree);
255 client.add_model_message_handler(Self::handle_unregister_worktree);
256 client.add_model_message_handler(Self::handle_unshare_project);
257 client.add_model_message_handler(Self::handle_update_buffer_file);
258 client.add_model_message_handler(Self::handle_update_buffer);
259 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
260 client.add_model_message_handler(Self::handle_update_worktree);
261 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
262 client.add_model_request_handler(Self::handle_apply_code_action);
263 client.add_model_request_handler(Self::handle_reload_buffers);
264 client.add_model_request_handler(Self::handle_format_buffers);
265 client.add_model_request_handler(Self::handle_get_code_actions);
266 client.add_model_request_handler(Self::handle_get_completions);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
269 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
271 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
272 client.add_model_request_handler(Self::handle_search_project);
273 client.add_model_request_handler(Self::handle_get_project_symbols);
274 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
275 client.add_model_request_handler(Self::handle_open_buffer_by_id);
276 client.add_model_request_handler(Self::handle_open_buffer_by_path);
277 client.add_model_request_handler(Self::handle_save_buffer);
278 }
279
280 pub fn local(
281 client: Arc<Client>,
282 user_store: ModelHandle<UserStore>,
283 languages: Arc<LanguageRegistry>,
284 fs: Arc<dyn Fs>,
285 cx: &mut MutableAppContext,
286 ) -> ModelHandle<Self> {
287 cx.add_model(|cx: &mut ModelContext<Self>| {
288 let (remote_id_tx, remote_id_rx) = watch::channel();
289 let _maintain_remote_id_task = cx.spawn_weak({
290 let rpc = client.clone();
291 move |this, mut cx| {
292 async move {
293 let mut status = rpc.status();
294 while let Some(status) = status.next().await {
295 if let Some(this) = this.upgrade(&cx) {
296 if status.is_connected() {
297 this.update(&mut cx, |this, cx| this.register(cx)).await?;
298 } else {
299 this.update(&mut cx, |this, cx| this.unregister(cx));
300 }
301 }
302 }
303 Ok(())
304 }
305 .log_err()
306 }
307 });
308
309 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
310 Self {
311 worktrees: Default::default(),
312 collaborators: Default::default(),
313 opened_buffers: Default::default(),
314 shared_buffers: Default::default(),
315 loading_buffers: Default::default(),
316 loading_local_worktrees: Default::default(),
317 buffer_snapshots: Default::default(),
318 client_state: ProjectClientState::Local {
319 is_shared: false,
320 remote_id_tx,
321 remote_id_rx,
322 _maintain_remote_id_task,
323 },
324 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
325 subscriptions: Vec::new(),
326 active_entry: None,
327 languages,
328 client,
329 user_store,
330 fs,
331 next_entry_id: Default::default(),
332 language_servers_with_diagnostics_running: 0,
333 language_servers: Default::default(),
334 started_language_servers: Default::default(),
335 language_server_statuses: Default::default(),
336 last_workspace_edits_by_language_server: Default::default(),
337 language_server_settings: Default::default(),
338 next_language_server_id: 0,
339 nonce: StdRng::from_entropy().gen(),
340 }
341 })
342 }
343
344 pub async fn remote(
345 remote_id: u64,
346 client: Arc<Client>,
347 user_store: ModelHandle<UserStore>,
348 languages: Arc<LanguageRegistry>,
349 fs: Arc<dyn Fs>,
350 cx: &mut AsyncAppContext,
351 ) -> Result<ModelHandle<Self>> {
352 client.authenticate_and_connect(true, &cx).await?;
353
354 let response = client
355 .request(proto::JoinProject {
356 project_id: remote_id,
357 })
358 .await?;
359
360 let replica_id = response.replica_id as ReplicaId;
361
362 let mut worktrees = Vec::new();
363 for worktree in response.worktrees {
364 let (worktree, load_task) = cx
365 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
366 worktrees.push(worktree);
367 load_task.detach();
368 }
369
370 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
371 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
372 let mut this = Self {
373 worktrees: Vec::new(),
374 loading_buffers: Default::default(),
375 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
376 shared_buffers: Default::default(),
377 loading_local_worktrees: Default::default(),
378 active_entry: None,
379 collaborators: Default::default(),
380 languages,
381 user_store: user_store.clone(),
382 fs,
383 next_entry_id: Default::default(),
384 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
385 client: client.clone(),
386 client_state: ProjectClientState::Remote {
387 sharing_has_stopped: false,
388 remote_id,
389 replica_id,
390 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
391 async move {
392 let mut status = client.status();
393 let is_connected =
394 status.next().await.map_or(false, |s| s.is_connected());
395 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
396 if !is_connected || status.next().await.is_some() {
397 if let Some(this) = this.upgrade(&cx) {
398 this.update(&mut cx, |this, cx| this.project_unshared(cx))
399 }
400 }
401 Ok(())
402 }
403 .log_err()
404 }),
405 },
406 language_servers_with_diagnostics_running: 0,
407 language_servers: Default::default(),
408 started_language_servers: Default::default(),
409 language_server_settings: Default::default(),
410 language_server_statuses: response
411 .language_servers
412 .into_iter()
413 .map(|server| {
414 (
415 server.id as usize,
416 LanguageServerStatus {
417 name: server.name,
418 pending_work: Default::default(),
419 pending_diagnostic_updates: 0,
420 },
421 )
422 })
423 .collect(),
424 last_workspace_edits_by_language_server: Default::default(),
425 next_language_server_id: 0,
426 opened_buffers: Default::default(),
427 buffer_snapshots: Default::default(),
428 nonce: StdRng::from_entropy().gen(),
429 };
430 for worktree in worktrees {
431 this.add_worktree(&worktree, cx);
432 }
433 this
434 });
435
436 let user_ids = response
437 .collaborators
438 .iter()
439 .map(|peer| peer.user_id)
440 .collect();
441 user_store
442 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
443 .await?;
444 let mut collaborators = HashMap::default();
445 for message in response.collaborators {
446 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
447 collaborators.insert(collaborator.peer_id, collaborator);
448 }
449
450 this.update(cx, |this, _| {
451 this.collaborators = collaborators;
452 });
453
454 Ok(this)
455 }
456
457 #[cfg(any(test, feature = "test-support"))]
458 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
459 let languages = Arc::new(LanguageRegistry::test());
460 let http_client = client::test::FakeHttpClient::with_404_response();
461 let client = client::Client::new(http_client.clone());
462 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
463 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
464 }
465
466 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
467 self.opened_buffers
468 .get(&remote_id)
469 .and_then(|buffer| buffer.upgrade(cx))
470 }
471
472 #[cfg(any(test, feature = "test-support"))]
473 pub fn languages(&self) -> &Arc<LanguageRegistry> {
474 &self.languages
475 }
476
477 #[cfg(any(test, feature = "test-support"))]
478 pub fn check_invariants(&self, cx: &AppContext) {
479 if self.is_local() {
480 let mut worktree_root_paths = HashMap::default();
481 for worktree in self.worktrees(cx) {
482 let worktree = worktree.read(cx);
483 let abs_path = worktree.as_local().unwrap().abs_path().clone();
484 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
485 assert_eq!(
486 prev_worktree_id,
487 None,
488 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
489 abs_path,
490 worktree.id(),
491 prev_worktree_id
492 )
493 }
494 } else {
495 let replica_id = self.replica_id();
496 for buffer in self.opened_buffers.values() {
497 if let Some(buffer) = buffer.upgrade(cx) {
498 let buffer = buffer.read(cx);
499 assert_eq!(
500 buffer.deferred_ops_len(),
501 0,
502 "replica {}, buffer {} has deferred operations",
503 replica_id,
504 buffer.remote_id()
505 );
506 }
507 }
508 }
509 }
510
511 #[cfg(any(test, feature = "test-support"))]
512 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
513 let path = path.into();
514 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
515 self.opened_buffers.iter().any(|(_, buffer)| {
516 if let Some(buffer) = buffer.upgrade(cx) {
517 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
518 if file.worktree == worktree && file.path() == &path.path {
519 return true;
520 }
521 }
522 }
523 false
524 })
525 } else {
526 false
527 }
528 }
529
530 pub fn fs(&self) -> &Arc<dyn Fs> {
531 &self.fs
532 }
533
534 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
535 self.unshare(cx);
536 for worktree in &self.worktrees {
537 if let Some(worktree) = worktree.upgrade(cx) {
538 worktree.update(cx, |worktree, _| {
539 worktree.as_local_mut().unwrap().unregister();
540 });
541 }
542 }
543
544 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
545 *remote_id_tx.borrow_mut() = None;
546 }
547
548 self.subscriptions.clear();
549 }
550
551 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
552 self.unregister(cx);
553
554 let response = self.client.request(proto::RegisterProject {});
555 cx.spawn(|this, mut cx| async move {
556 let remote_id = response.await?.project_id;
557
558 let mut registrations = Vec::new();
559 this.update(&mut cx, |this, cx| {
560 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
561 *remote_id_tx.borrow_mut() = Some(remote_id);
562 }
563
564 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
565
566 this.subscriptions
567 .push(this.client.add_model_for_remote_entity(remote_id, cx));
568
569 for worktree in &this.worktrees {
570 if let Some(worktree) = worktree.upgrade(cx) {
571 registrations.push(worktree.update(cx, |worktree, cx| {
572 let worktree = worktree.as_local_mut().unwrap();
573 worktree.register(remote_id, cx)
574 }));
575 }
576 }
577 });
578
579 futures::future::try_join_all(registrations).await?;
580 Ok(())
581 })
582 }
583
584 pub fn remote_id(&self) -> Option<u64> {
585 match &self.client_state {
586 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
587 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
588 }
589 }
590
591 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
592 let mut id = None;
593 let mut watch = None;
594 match &self.client_state {
595 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
596 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
597 }
598
599 async move {
600 if let Some(id) = id {
601 return id;
602 }
603 let mut watch = watch.unwrap();
604 loop {
605 let id = *watch.borrow();
606 if let Some(id) = id {
607 return id;
608 }
609 watch.next().await;
610 }
611 }
612 }
613
614 pub fn replica_id(&self) -> ReplicaId {
615 match &self.client_state {
616 ProjectClientState::Local { .. } => 0,
617 ProjectClientState::Remote { replica_id, .. } => *replica_id,
618 }
619 }
620
621 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
622 &self.collaborators
623 }
624
625 pub fn worktrees<'a>(
626 &'a self,
627 cx: &'a AppContext,
628 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
629 self.worktrees
630 .iter()
631 .filter_map(move |worktree| worktree.upgrade(cx))
632 }
633
634 pub fn visible_worktrees<'a>(
635 &'a self,
636 cx: &'a AppContext,
637 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
638 self.worktrees.iter().filter_map(|worktree| {
639 worktree.upgrade(cx).and_then(|worktree| {
640 if worktree.read(cx).is_visible() {
641 Some(worktree)
642 } else {
643 None
644 }
645 })
646 })
647 }
648
649 pub fn worktree_for_id(
650 &self,
651 id: WorktreeId,
652 cx: &AppContext,
653 ) -> Option<ModelHandle<Worktree>> {
654 self.worktrees(cx)
655 .find(|worktree| worktree.read(cx).id() == id)
656 }
657
658 pub fn worktree_for_entry(
659 &self,
660 entry_id: ProjectEntryId,
661 cx: &AppContext,
662 ) -> Option<ModelHandle<Worktree>> {
663 self.worktrees(cx)
664 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
665 }
666
667 pub fn worktree_id_for_entry(
668 &self,
669 entry_id: ProjectEntryId,
670 cx: &AppContext,
671 ) -> Option<WorktreeId> {
672 self.worktree_for_entry(entry_id, cx)
673 .map(|worktree| worktree.read(cx).id())
674 }
675
676 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
677 let rpc = self.client.clone();
678 cx.spawn(|this, mut cx| async move {
679 let project_id = this.update(&mut cx, |this, cx| {
680 if let ProjectClientState::Local {
681 is_shared,
682 remote_id_rx,
683 ..
684 } = &mut this.client_state
685 {
686 *is_shared = true;
687
688 for open_buffer in this.opened_buffers.values_mut() {
689 match open_buffer {
690 OpenBuffer::Strong(_) => {}
691 OpenBuffer::Weak(buffer) => {
692 if let Some(buffer) = buffer.upgrade(cx) {
693 *open_buffer = OpenBuffer::Strong(buffer);
694 }
695 }
696 OpenBuffer::Loading(_) => unreachable!(),
697 }
698 }
699
700 for worktree_handle in this.worktrees.iter_mut() {
701 match worktree_handle {
702 WorktreeHandle::Strong(_) => {}
703 WorktreeHandle::Weak(worktree) => {
704 if let Some(worktree) = worktree.upgrade(cx) {
705 *worktree_handle = WorktreeHandle::Strong(worktree);
706 }
707 }
708 }
709 }
710
711 remote_id_rx
712 .borrow()
713 .ok_or_else(|| anyhow!("no project id"))
714 } else {
715 Err(anyhow!("can't share a remote project"))
716 }
717 })?;
718
719 rpc.request(proto::ShareProject { project_id }).await?;
720
721 let mut tasks = Vec::new();
722 this.update(&mut cx, |this, cx| {
723 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
724 worktree.update(cx, |worktree, cx| {
725 let worktree = worktree.as_local_mut().unwrap();
726 tasks.push(worktree.share(project_id, cx));
727 });
728 }
729 });
730 for task in tasks {
731 task.await?;
732 }
733 this.update(&mut cx, |_, cx| cx.notify());
734 Ok(())
735 })
736 }
737
738 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
739 let rpc = self.client.clone();
740
741 if let ProjectClientState::Local {
742 is_shared,
743 remote_id_rx,
744 ..
745 } = &mut self.client_state
746 {
747 if !*is_shared {
748 return;
749 }
750
751 *is_shared = false;
752 self.collaborators.clear();
753 self.shared_buffers.clear();
754 for worktree_handle in self.worktrees.iter_mut() {
755 if let WorktreeHandle::Strong(worktree) = worktree_handle {
756 let is_visible = worktree.update(cx, |worktree, _| {
757 worktree.as_local_mut().unwrap().unshare();
758 worktree.is_visible()
759 });
760 if !is_visible {
761 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
762 }
763 }
764 }
765
766 for open_buffer in self.opened_buffers.values_mut() {
767 match open_buffer {
768 OpenBuffer::Strong(buffer) => {
769 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
770 }
771 _ => {}
772 }
773 }
774
775 if let Some(project_id) = *remote_id_rx.borrow() {
776 rpc.send(proto::UnshareProject { project_id }).log_err();
777 }
778
779 cx.notify();
780 } else {
781 log::error!("attempted to unshare a remote project");
782 }
783 }
784
785 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
786 if let ProjectClientState::Remote {
787 sharing_has_stopped,
788 ..
789 } = &mut self.client_state
790 {
791 *sharing_has_stopped = true;
792 self.collaborators.clear();
793 cx.notify();
794 }
795 }
796
797 pub fn is_read_only(&self) -> bool {
798 match &self.client_state {
799 ProjectClientState::Local { .. } => false,
800 ProjectClientState::Remote {
801 sharing_has_stopped,
802 ..
803 } => *sharing_has_stopped,
804 }
805 }
806
807 pub fn is_local(&self) -> bool {
808 match &self.client_state {
809 ProjectClientState::Local { .. } => true,
810 ProjectClientState::Remote { .. } => false,
811 }
812 }
813
814 pub fn is_remote(&self) -> bool {
815 !self.is_local()
816 }
817
818 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
819 if self.is_remote() {
820 return Err(anyhow!("creating buffers as a guest is not supported yet"));
821 }
822
823 let buffer = cx.add_model(|cx| {
824 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
825 });
826 self.register_buffer(&buffer, cx)?;
827 Ok(buffer)
828 }
829
830 pub fn open_path(
831 &mut self,
832 path: impl Into<ProjectPath>,
833 cx: &mut ModelContext<Self>,
834 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
835 let task = self.open_buffer(path, cx);
836 cx.spawn_weak(|_, cx| async move {
837 let buffer = task.await?;
838 let project_entry_id = buffer
839 .read_with(&cx, |buffer, cx| {
840 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
841 })
842 .ok_or_else(|| anyhow!("no project entry"))?;
843 Ok((project_entry_id, buffer.into()))
844 })
845 }
846
847 pub fn open_buffer(
848 &mut self,
849 path: impl Into<ProjectPath>,
850 cx: &mut ModelContext<Self>,
851 ) -> Task<Result<ModelHandle<Buffer>>> {
852 let project_path = path.into();
853 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
854 worktree
855 } else {
856 return Task::ready(Err(anyhow!("no such worktree")));
857 };
858
859 // If there is already a buffer for the given path, then return it.
860 let existing_buffer = self.get_open_buffer(&project_path, cx);
861 if let Some(existing_buffer) = existing_buffer {
862 return Task::ready(Ok(existing_buffer));
863 }
864
865 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
866 // If the given path is already being loaded, then wait for that existing
867 // task to complete and return the same buffer.
868 hash_map::Entry::Occupied(e) => e.get().clone(),
869
870 // Otherwise, record the fact that this path is now being loaded.
871 hash_map::Entry::Vacant(entry) => {
872 let (mut tx, rx) = postage::watch::channel();
873 entry.insert(rx.clone());
874
875 let load_buffer = if worktree.read(cx).is_local() {
876 self.open_local_buffer(&project_path.path, &worktree, cx)
877 } else {
878 self.open_remote_buffer(&project_path.path, &worktree, cx)
879 };
880
881 cx.spawn(move |this, mut cx| async move {
882 let load_result = load_buffer.await;
883 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
884 // Record the fact that the buffer is no longer loading.
885 this.loading_buffers.remove(&project_path);
886 let buffer = load_result.map_err(Arc::new)?;
887 Ok(buffer)
888 }));
889 })
890 .detach();
891 rx
892 }
893 };
894
895 cx.foreground().spawn(async move {
896 loop {
897 if let Some(result) = loading_watch.borrow().as_ref() {
898 match result {
899 Ok(buffer) => return Ok(buffer.clone()),
900 Err(error) => return Err(anyhow!("{}", error)),
901 }
902 }
903 loading_watch.next().await;
904 }
905 })
906 }
907
908 fn open_local_buffer(
909 &mut self,
910 path: &Arc<Path>,
911 worktree: &ModelHandle<Worktree>,
912 cx: &mut ModelContext<Self>,
913 ) -> Task<Result<ModelHandle<Buffer>>> {
914 let load_buffer = worktree.update(cx, |worktree, cx| {
915 let worktree = worktree.as_local_mut().unwrap();
916 worktree.load_buffer(path, cx)
917 });
918 cx.spawn(|this, mut cx| async move {
919 let buffer = load_buffer.await?;
920 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
921 Ok(buffer)
922 })
923 }
924
925 fn open_remote_buffer(
926 &mut self,
927 path: &Arc<Path>,
928 worktree: &ModelHandle<Worktree>,
929 cx: &mut ModelContext<Self>,
930 ) -> Task<Result<ModelHandle<Buffer>>> {
931 let rpc = self.client.clone();
932 let project_id = self.remote_id().unwrap();
933 let remote_worktree_id = worktree.read(cx).id();
934 let path = path.clone();
935 let path_string = path.to_string_lossy().to_string();
936 cx.spawn(|this, mut cx| async move {
937 let response = rpc
938 .request(proto::OpenBufferByPath {
939 project_id,
940 worktree_id: remote_worktree_id.to_proto(),
941 path: path_string,
942 })
943 .await?;
944 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
945 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
946 .await
947 })
948 }
949
950 fn open_local_buffer_via_lsp(
951 &mut self,
952 abs_path: lsp::Url,
953 lsp_adapter: Arc<dyn LspAdapter>,
954 lsp_server: Arc<LanguageServer>,
955 cx: &mut ModelContext<Self>,
956 ) -> Task<Result<ModelHandle<Buffer>>> {
957 cx.spawn(|this, mut cx| async move {
958 let abs_path = abs_path
959 .to_file_path()
960 .map_err(|_| anyhow!("can't convert URI to path"))?;
961 let (worktree, relative_path) = if let Some(result) =
962 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
963 {
964 result
965 } else {
966 let worktree = this
967 .update(&mut cx, |this, cx| {
968 this.create_local_worktree(&abs_path, false, cx)
969 })
970 .await?;
971 this.update(&mut cx, |this, cx| {
972 this.language_servers.insert(
973 (worktree.read(cx).id(), lsp_adapter.name()),
974 (lsp_adapter, lsp_server),
975 );
976 });
977 (worktree, PathBuf::new())
978 };
979
980 let project_path = ProjectPath {
981 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
982 path: relative_path.into(),
983 };
984 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
985 .await
986 })
987 }
988
989 pub fn open_buffer_by_id(
990 &mut self,
991 id: u64,
992 cx: &mut ModelContext<Self>,
993 ) -> Task<Result<ModelHandle<Buffer>>> {
994 if let Some(buffer) = self.buffer_for_id(id, cx) {
995 Task::ready(Ok(buffer))
996 } else if self.is_local() {
997 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
998 } else if let Some(project_id) = self.remote_id() {
999 let request = self
1000 .client
1001 .request(proto::OpenBufferById { project_id, id });
1002 cx.spawn(|this, mut cx| async move {
1003 let buffer = request
1004 .await?
1005 .buffer
1006 .ok_or_else(|| anyhow!("invalid buffer"))?;
1007 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1008 .await
1009 })
1010 } else {
1011 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1012 }
1013 }
1014
1015 pub fn save_buffer_as(
1016 &mut self,
1017 buffer: ModelHandle<Buffer>,
1018 abs_path: PathBuf,
1019 cx: &mut ModelContext<Project>,
1020 ) -> Task<Result<()>> {
1021 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1022 cx.spawn(|this, mut cx| async move {
1023 let (worktree, path) = worktree_task.await?;
1024 worktree
1025 .update(&mut cx, |worktree, cx| {
1026 worktree
1027 .as_local_mut()
1028 .unwrap()
1029 .save_buffer_as(buffer.clone(), path, cx)
1030 })
1031 .await?;
1032 this.update(&mut cx, |this, cx| {
1033 this.assign_language_to_buffer(&buffer, cx);
1034 this.register_buffer_with_language_server(&buffer, cx);
1035 });
1036 Ok(())
1037 })
1038 }
1039
1040 pub fn get_open_buffer(
1041 &mut self,
1042 path: &ProjectPath,
1043 cx: &mut ModelContext<Self>,
1044 ) -> Option<ModelHandle<Buffer>> {
1045 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1046 self.opened_buffers.values().find_map(|buffer| {
1047 let buffer = buffer.upgrade(cx)?;
1048 let file = File::from_dyn(buffer.read(cx).file())?;
1049 if file.worktree == worktree && file.path() == &path.path {
1050 Some(buffer)
1051 } else {
1052 None
1053 }
1054 })
1055 }
1056
1057 fn register_buffer(
1058 &mut self,
1059 buffer: &ModelHandle<Buffer>,
1060 cx: &mut ModelContext<Self>,
1061 ) -> Result<()> {
1062 let remote_id = buffer.read(cx).remote_id();
1063 let open_buffer = if self.is_remote() || self.is_shared() {
1064 OpenBuffer::Strong(buffer.clone())
1065 } else {
1066 OpenBuffer::Weak(buffer.downgrade())
1067 };
1068
1069 match self.opened_buffers.insert(remote_id, open_buffer) {
1070 None => {}
1071 Some(OpenBuffer::Loading(operations)) => {
1072 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1073 }
1074 Some(OpenBuffer::Weak(existing_handle)) => {
1075 if existing_handle.upgrade(cx).is_some() {
1076 Err(anyhow!(
1077 "already registered buffer with remote id {}",
1078 remote_id
1079 ))?
1080 }
1081 }
1082 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1083 "already registered buffer with remote id {}",
1084 remote_id
1085 ))?,
1086 }
1087 cx.subscribe(buffer, |this, buffer, event, cx| {
1088 this.on_buffer_event(buffer, event, cx);
1089 })
1090 .detach();
1091
1092 self.assign_language_to_buffer(buffer, cx);
1093 self.register_buffer_with_language_server(buffer, cx);
1094
1095 Ok(())
1096 }
1097
1098 fn register_buffer_with_language_server(
1099 &mut self,
1100 buffer_handle: &ModelHandle<Buffer>,
1101 cx: &mut ModelContext<Self>,
1102 ) {
1103 let buffer = buffer_handle.read(cx);
1104 let buffer_id = buffer.remote_id();
1105 if let Some(file) = File::from_dyn(buffer.file()) {
1106 if file.is_local() {
1107 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1108 let initial_snapshot = buffer.text_snapshot();
1109 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1110
1111 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1112 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1113 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1114 .log_err();
1115 }
1116 }
1117
1118 if let Some((_, server)) = language_server {
1119 server
1120 .notify::<lsp::notification::DidOpenTextDocument>(
1121 lsp::DidOpenTextDocumentParams {
1122 text_document: lsp::TextDocumentItem::new(
1123 uri,
1124 Default::default(),
1125 0,
1126 initial_snapshot.text(),
1127 ),
1128 }
1129 .clone(),
1130 )
1131 .log_err();
1132 buffer_handle.update(cx, |buffer, cx| {
1133 buffer.set_completion_triggers(
1134 server
1135 .capabilities()
1136 .completion_provider
1137 .as_ref()
1138 .and_then(|provider| provider.trigger_characters.clone())
1139 .unwrap_or(Vec::new()),
1140 cx,
1141 )
1142 });
1143 self.buffer_snapshots
1144 .insert(buffer_id, vec![(0, initial_snapshot)]);
1145 }
1146
1147 cx.observe_release(buffer_handle, |this, buffer, cx| {
1148 if let Some(file) = File::from_dyn(buffer.file()) {
1149 if file.is_local() {
1150 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1151 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1152 server
1153 .notify::<lsp::notification::DidCloseTextDocument>(
1154 lsp::DidCloseTextDocumentParams {
1155 text_document: lsp::TextDocumentIdentifier::new(
1156 uri.clone(),
1157 ),
1158 },
1159 )
1160 .log_err();
1161 }
1162 }
1163 }
1164 })
1165 .detach();
1166 }
1167 }
1168 }
1169
1170 fn on_buffer_event(
1171 &mut self,
1172 buffer: ModelHandle<Buffer>,
1173 event: &BufferEvent,
1174 cx: &mut ModelContext<Self>,
1175 ) -> Option<()> {
1176 match event {
1177 BufferEvent::Operation(operation) => {
1178 let project_id = self.remote_id()?;
1179 let request = self.client.request(proto::UpdateBuffer {
1180 project_id,
1181 buffer_id: buffer.read(cx).remote_id(),
1182 operations: vec![language::proto::serialize_operation(&operation)],
1183 });
1184 cx.background().spawn(request).detach_and_log_err(cx);
1185 }
1186 BufferEvent::Edited { .. } => {
1187 let (_, language_server) = self
1188 .language_server_for_buffer(buffer.read(cx), cx)?
1189 .clone();
1190 let buffer = buffer.read(cx);
1191 let file = File::from_dyn(buffer.file())?;
1192 let abs_path = file.as_local()?.abs_path(cx);
1193 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1194 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1195 let (version, prev_snapshot) = buffer_snapshots.last()?;
1196 let next_snapshot = buffer.text_snapshot();
1197 let next_version = version + 1;
1198
1199 let content_changes = buffer
1200 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1201 .map(|edit| {
1202 let edit_start = edit.new.start.0;
1203 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1204 let new_text = next_snapshot
1205 .text_for_range(edit.new.start.1..edit.new.end.1)
1206 .collect();
1207 lsp::TextDocumentContentChangeEvent {
1208 range: Some(lsp::Range::new(
1209 point_to_lsp(edit_start),
1210 point_to_lsp(edit_end),
1211 )),
1212 range_length: None,
1213 text: new_text,
1214 }
1215 })
1216 .collect();
1217
1218 buffer_snapshots.push((next_version, next_snapshot));
1219
1220 language_server
1221 .notify::<lsp::notification::DidChangeTextDocument>(
1222 lsp::DidChangeTextDocumentParams {
1223 text_document: lsp::VersionedTextDocumentIdentifier::new(
1224 uri,
1225 next_version,
1226 ),
1227 content_changes,
1228 },
1229 )
1230 .log_err();
1231 }
1232 BufferEvent::Saved => {
1233 let file = File::from_dyn(buffer.read(cx).file())?;
1234 let worktree_id = file.worktree_id(cx);
1235 let abs_path = file.as_local()?.abs_path(cx);
1236 let text_document = lsp::TextDocumentIdentifier {
1237 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1238 };
1239
1240 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1241 server
1242 .notify::<lsp::notification::DidSaveTextDocument>(
1243 lsp::DidSaveTextDocumentParams {
1244 text_document: text_document.clone(),
1245 text: None,
1246 },
1247 )
1248 .log_err();
1249 }
1250 }
1251 _ => {}
1252 }
1253
1254 None
1255 }
1256
1257 fn language_servers_for_worktree(
1258 &self,
1259 worktree_id: WorktreeId,
1260 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1261 self.language_servers.iter().filter_map(
1262 move |((language_server_worktree_id, _), server)| {
1263 if *language_server_worktree_id == worktree_id {
1264 Some(server)
1265 } else {
1266 None
1267 }
1268 },
1269 )
1270 }
1271
1272 fn assign_language_to_buffer(
1273 &mut self,
1274 buffer: &ModelHandle<Buffer>,
1275 cx: &mut ModelContext<Self>,
1276 ) -> Option<()> {
1277 // If the buffer has a language, set it and start the language server if we haven't already.
1278 let full_path = buffer.read(cx).file()?.full_path(cx);
1279 let language = self.languages.select_language(&full_path)?;
1280 buffer.update(cx, |buffer, cx| {
1281 buffer.set_language(Some(language.clone()), cx);
1282 });
1283
1284 let file = File::from_dyn(buffer.read(cx).file())?;
1285 let worktree = file.worktree.read(cx).as_local()?;
1286 let worktree_id = worktree.id();
1287 let worktree_abs_path = worktree.abs_path().clone();
1288 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1289
1290 None
1291 }
1292
1293 fn start_language_server(
1294 &mut self,
1295 worktree_id: WorktreeId,
1296 worktree_path: Arc<Path>,
1297 language: Arc<Language>,
1298 cx: &mut ModelContext<Self>,
1299 ) {
1300 let adapter = if let Some(adapter) = language.lsp_adapter() {
1301 adapter
1302 } else {
1303 return;
1304 };
1305 let key = (worktree_id, adapter.name());
1306 self.started_language_servers
1307 .entry(key.clone())
1308 .or_insert_with(|| {
1309 let server_id = post_inc(&mut self.next_language_server_id);
1310 let language_server = self.languages.start_language_server(
1311 server_id,
1312 language.clone(),
1313 worktree_path,
1314 self.client.http_client(),
1315 cx,
1316 );
1317 cx.spawn_weak(|this, mut cx| async move {
1318 let language_server = language_server?.await.log_err()?;
1319 let language_server = language_server
1320 .initialize(adapter.initialization_options())
1321 .await
1322 .log_err()?;
1323 let this = this.upgrade(&cx)?;
1324 let disk_based_diagnostics_progress_token =
1325 adapter.disk_based_diagnostics_progress_token();
1326
1327 language_server
1328 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1329 let this = this.downgrade();
1330 let adapter = adapter.clone();
1331 move |params, mut cx| {
1332 if let Some(this) = this.upgrade(&cx) {
1333 this.update(&mut cx, |this, cx| {
1334 this.on_lsp_diagnostics_published(
1335 server_id,
1336 params,
1337 &adapter,
1338 disk_based_diagnostics_progress_token,
1339 cx,
1340 );
1341 });
1342 }
1343 }
1344 })
1345 .detach();
1346
1347 language_server
1348 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1349 let settings = this
1350 .read_with(&cx, |this, _| this.language_server_settings.clone());
1351 move |params, _| {
1352 let settings = settings.lock().clone();
1353 async move {
1354 Ok(params
1355 .items
1356 .into_iter()
1357 .map(|item| {
1358 if let Some(section) = &item.section {
1359 settings
1360 .get(section)
1361 .cloned()
1362 .unwrap_or(serde_json::Value::Null)
1363 } else {
1364 settings.clone()
1365 }
1366 })
1367 .collect())
1368 }
1369 }
1370 })
1371 .detach();
1372
1373 language_server
1374 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1375 let this = this.downgrade();
1376 let adapter = adapter.clone();
1377 let language_server = language_server.clone();
1378 move |params, cx| {
1379 Self::on_lsp_workspace_edit(
1380 this,
1381 params,
1382 server_id,
1383 adapter.clone(),
1384 language_server.clone(),
1385 cx,
1386 )
1387 }
1388 })
1389 .detach();
1390
1391 language_server
1392 .on_notification::<lsp::notification::Progress, _>({
1393 let this = this.downgrade();
1394 move |params, mut cx| {
1395 if let Some(this) = this.upgrade(&cx) {
1396 this.update(&mut cx, |this, cx| {
1397 this.on_lsp_progress(
1398 params,
1399 server_id,
1400 disk_based_diagnostics_progress_token,
1401 cx,
1402 );
1403 });
1404 }
1405 }
1406 })
1407 .detach();
1408
1409 this.update(&mut cx, |this, cx| {
1410 this.language_servers
1411 .insert(key.clone(), (adapter, language_server.clone()));
1412 this.language_server_statuses.insert(
1413 server_id,
1414 LanguageServerStatus {
1415 name: language_server.name().to_string(),
1416 pending_work: Default::default(),
1417 pending_diagnostic_updates: 0,
1418 },
1419 );
1420 language_server
1421 .notify::<lsp::notification::DidChangeConfiguration>(
1422 lsp::DidChangeConfigurationParams {
1423 settings: this.language_server_settings.lock().clone(),
1424 },
1425 )
1426 .ok();
1427
1428 if let Some(project_id) = this.remote_id() {
1429 this.client
1430 .send(proto::StartLanguageServer {
1431 project_id,
1432 server: Some(proto::LanguageServer {
1433 id: server_id as u64,
1434 name: language_server.name().to_string(),
1435 }),
1436 })
1437 .log_err();
1438 }
1439
1440 // Tell the language server about every open buffer in the worktree that matches the language.
1441 for buffer in this.opened_buffers.values() {
1442 if let Some(buffer_handle) = buffer.upgrade(cx) {
1443 let buffer = buffer_handle.read(cx);
1444 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1445 file
1446 } else {
1447 continue;
1448 };
1449 let language = if let Some(language) = buffer.language() {
1450 language
1451 } else {
1452 continue;
1453 };
1454 if file.worktree.read(cx).id() != key.0
1455 || language.lsp_adapter().map(|a| a.name())
1456 != Some(key.1.clone())
1457 {
1458 continue;
1459 }
1460
1461 let file = file.as_local()?;
1462 let versions = this
1463 .buffer_snapshots
1464 .entry(buffer.remote_id())
1465 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1466 let (version, initial_snapshot) = versions.last().unwrap();
1467 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1468 language_server
1469 .notify::<lsp::notification::DidOpenTextDocument>(
1470 lsp::DidOpenTextDocumentParams {
1471 text_document: lsp::TextDocumentItem::new(
1472 uri,
1473 Default::default(),
1474 *version,
1475 initial_snapshot.text(),
1476 ),
1477 },
1478 )
1479 .log_err()?;
1480 buffer_handle.update(cx, |buffer, cx| {
1481 buffer.set_completion_triggers(
1482 language_server
1483 .capabilities()
1484 .completion_provider
1485 .as_ref()
1486 .and_then(|provider| {
1487 provider.trigger_characters.clone()
1488 })
1489 .unwrap_or(Vec::new()),
1490 cx,
1491 )
1492 });
1493 }
1494 }
1495
1496 cx.notify();
1497 Some(())
1498 });
1499
1500 Some(language_server)
1501 })
1502 });
1503 }
1504
1505 pub fn restart_language_servers_for_buffers(
1506 &mut self,
1507 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1508 cx: &mut ModelContext<Self>,
1509 ) -> Option<()> {
1510 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1511 .into_iter()
1512 .filter_map(|buffer| {
1513 let file = File::from_dyn(buffer.read(cx).file())?;
1514 let worktree = file.worktree.read(cx).as_local()?;
1515 let worktree_id = worktree.id();
1516 let worktree_abs_path = worktree.abs_path().clone();
1517 let full_path = file.full_path(cx);
1518 Some((worktree_id, worktree_abs_path, full_path))
1519 })
1520 .collect();
1521 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1522 let language = self.languages.select_language(&full_path)?;
1523 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1524 }
1525
1526 None
1527 }
1528
1529 fn restart_language_server(
1530 &mut self,
1531 worktree_id: WorktreeId,
1532 worktree_path: Arc<Path>,
1533 language: Arc<Language>,
1534 cx: &mut ModelContext<Self>,
1535 ) {
1536 let adapter = if let Some(adapter) = language.lsp_adapter() {
1537 adapter
1538 } else {
1539 return;
1540 };
1541 let key = (worktree_id, adapter.name());
1542 let server_to_shutdown = self.language_servers.remove(&key);
1543 self.started_language_servers.remove(&key);
1544 server_to_shutdown
1545 .as_ref()
1546 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1547 cx.spawn_weak(|this, mut cx| async move {
1548 if let Some(this) = this.upgrade(&cx) {
1549 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1550 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1551 shutdown_task.await;
1552 }
1553 }
1554
1555 this.update(&mut cx, |this, cx| {
1556 this.start_language_server(worktree_id, worktree_path, language, cx);
1557 });
1558 }
1559 })
1560 .detach();
1561 }
1562
1563 fn on_lsp_diagnostics_published(
1564 &mut self,
1565 server_id: usize,
1566 mut params: lsp::PublishDiagnosticsParams,
1567 adapter: &Arc<dyn LspAdapter>,
1568 disk_based_diagnostics_progress_token: Option<&str>,
1569 cx: &mut ModelContext<Self>,
1570 ) {
1571 adapter.process_diagnostics(&mut params);
1572 if disk_based_diagnostics_progress_token.is_none() {
1573 self.disk_based_diagnostics_started(cx);
1574 self.broadcast_language_server_update(
1575 server_id,
1576 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1577 proto::LspDiskBasedDiagnosticsUpdating {},
1578 ),
1579 );
1580 }
1581 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1582 .log_err();
1583 if disk_based_diagnostics_progress_token.is_none() {
1584 self.disk_based_diagnostics_finished(cx);
1585 self.broadcast_language_server_update(
1586 server_id,
1587 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1588 proto::LspDiskBasedDiagnosticsUpdated {},
1589 ),
1590 );
1591 }
1592 }
1593
1594 fn on_lsp_progress(
1595 &mut self,
1596 progress: lsp::ProgressParams,
1597 server_id: usize,
1598 disk_based_diagnostics_progress_token: Option<&str>,
1599 cx: &mut ModelContext<Self>,
1600 ) {
1601 let token = match progress.token {
1602 lsp::NumberOrString::String(token) => token,
1603 lsp::NumberOrString::Number(token) => {
1604 log::info!("skipping numeric progress token {}", token);
1605 return;
1606 }
1607 };
1608
1609 match progress.value {
1610 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1611 lsp::WorkDoneProgress::Begin(_) => {
1612 let language_server_status =
1613 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1614 status
1615 } else {
1616 return;
1617 };
1618
1619 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1620 language_server_status.pending_diagnostic_updates += 1;
1621 if language_server_status.pending_diagnostic_updates == 1 {
1622 self.disk_based_diagnostics_started(cx);
1623 self.broadcast_language_server_update(
1624 server_id,
1625 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1626 proto::LspDiskBasedDiagnosticsUpdating {},
1627 ),
1628 );
1629 }
1630 } else {
1631 self.on_lsp_work_start(server_id, token.clone(), cx);
1632 self.broadcast_language_server_update(
1633 server_id,
1634 proto::update_language_server::Variant::WorkStart(
1635 proto::LspWorkStart { token },
1636 ),
1637 );
1638 }
1639 }
1640 lsp::WorkDoneProgress::Report(report) => {
1641 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1642 self.on_lsp_work_progress(
1643 server_id,
1644 token.clone(),
1645 LanguageServerProgress {
1646 message: report.message.clone(),
1647 percentage: report.percentage.map(|p| p as usize),
1648 last_update_at: Instant::now(),
1649 },
1650 cx,
1651 );
1652 self.broadcast_language_server_update(
1653 server_id,
1654 proto::update_language_server::Variant::WorkProgress(
1655 proto::LspWorkProgress {
1656 token,
1657 message: report.message,
1658 percentage: report.percentage.map(|p| p as u32),
1659 },
1660 ),
1661 );
1662 }
1663 }
1664 lsp::WorkDoneProgress::End(_) => {
1665 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1666 let language_server_status = if let Some(status) =
1667 self.language_server_statuses.get_mut(&server_id)
1668 {
1669 status
1670 } else {
1671 return;
1672 };
1673
1674 language_server_status.pending_diagnostic_updates -= 1;
1675 if language_server_status.pending_diagnostic_updates == 0 {
1676 self.disk_based_diagnostics_finished(cx);
1677 self.broadcast_language_server_update(
1678 server_id,
1679 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1680 proto::LspDiskBasedDiagnosticsUpdated {},
1681 ),
1682 );
1683 }
1684 } else {
1685 self.on_lsp_work_end(server_id, token.clone(), cx);
1686 self.broadcast_language_server_update(
1687 server_id,
1688 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1689 token,
1690 }),
1691 );
1692 }
1693 }
1694 },
1695 }
1696 }
1697
1698 fn on_lsp_work_start(
1699 &mut self,
1700 language_server_id: usize,
1701 token: String,
1702 cx: &mut ModelContext<Self>,
1703 ) {
1704 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1705 status.pending_work.insert(
1706 token,
1707 LanguageServerProgress {
1708 message: None,
1709 percentage: None,
1710 last_update_at: Instant::now(),
1711 },
1712 );
1713 cx.notify();
1714 }
1715 }
1716
1717 fn on_lsp_work_progress(
1718 &mut self,
1719 language_server_id: usize,
1720 token: String,
1721 progress: LanguageServerProgress,
1722 cx: &mut ModelContext<Self>,
1723 ) {
1724 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1725 status.pending_work.insert(token, progress);
1726 cx.notify();
1727 }
1728 }
1729
1730 fn on_lsp_work_end(
1731 &mut self,
1732 language_server_id: usize,
1733 token: String,
1734 cx: &mut ModelContext<Self>,
1735 ) {
1736 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1737 status.pending_work.remove(&token);
1738 cx.notify();
1739 }
1740 }
1741
1742 async fn on_lsp_workspace_edit(
1743 this: WeakModelHandle<Self>,
1744 params: lsp::ApplyWorkspaceEditParams,
1745 server_id: usize,
1746 adapter: Arc<dyn LspAdapter>,
1747 language_server: Arc<LanguageServer>,
1748 mut cx: AsyncAppContext,
1749 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1750 let this = this
1751 .upgrade(&cx)
1752 .ok_or_else(|| anyhow!("project project closed"))?;
1753 let transaction = Self::deserialize_workspace_edit(
1754 this.clone(),
1755 params.edit,
1756 true,
1757 adapter.clone(),
1758 language_server.clone(),
1759 &mut cx,
1760 )
1761 .await
1762 .log_err();
1763 this.update(&mut cx, |this, _| {
1764 if let Some(transaction) = transaction {
1765 this.last_workspace_edits_by_language_server
1766 .insert(server_id, transaction);
1767 }
1768 });
1769 Ok(lsp::ApplyWorkspaceEditResponse {
1770 applied: true,
1771 failed_change: None,
1772 failure_reason: None,
1773 })
1774 }
1775
1776 fn broadcast_language_server_update(
1777 &self,
1778 language_server_id: usize,
1779 event: proto::update_language_server::Variant,
1780 ) {
1781 if let Some(project_id) = self.remote_id() {
1782 self.client
1783 .send(proto::UpdateLanguageServer {
1784 project_id,
1785 language_server_id: language_server_id as u64,
1786 variant: Some(event),
1787 })
1788 .log_err();
1789 }
1790 }
1791
1792 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1793 for (_, server) in self.language_servers.values() {
1794 server
1795 .notify::<lsp::notification::DidChangeConfiguration>(
1796 lsp::DidChangeConfigurationParams {
1797 settings: settings.clone(),
1798 },
1799 )
1800 .ok();
1801 }
1802 *self.language_server_settings.lock() = settings;
1803 }
1804
1805 pub fn language_server_statuses(
1806 &self,
1807 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1808 self.language_server_statuses.values()
1809 }
1810
1811 pub fn update_diagnostics(
1812 &mut self,
1813 params: lsp::PublishDiagnosticsParams,
1814 disk_based_sources: &[&str],
1815 cx: &mut ModelContext<Self>,
1816 ) -> Result<()> {
1817 let abs_path = params
1818 .uri
1819 .to_file_path()
1820 .map_err(|_| anyhow!("URI is not a file"))?;
1821 let mut next_group_id = 0;
1822 let mut diagnostics = Vec::default();
1823 let mut primary_diagnostic_group_ids = HashMap::default();
1824 let mut sources_by_group_id = HashMap::default();
1825 let mut supporting_diagnostics = HashMap::default();
1826 for diagnostic in ¶ms.diagnostics {
1827 let source = diagnostic.source.as_ref();
1828 let code = diagnostic.code.as_ref().map(|code| match code {
1829 lsp::NumberOrString::Number(code) => code.to_string(),
1830 lsp::NumberOrString::String(code) => code.clone(),
1831 });
1832 let range = range_from_lsp(diagnostic.range);
1833 let is_supporting = diagnostic
1834 .related_information
1835 .as_ref()
1836 .map_or(false, |infos| {
1837 infos.iter().any(|info| {
1838 primary_diagnostic_group_ids.contains_key(&(
1839 source,
1840 code.clone(),
1841 range_from_lsp(info.location.range),
1842 ))
1843 })
1844 });
1845
1846 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1847 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1848 });
1849
1850 if is_supporting {
1851 supporting_diagnostics.insert(
1852 (source, code.clone(), range),
1853 (diagnostic.severity, is_unnecessary),
1854 );
1855 } else {
1856 let group_id = post_inc(&mut next_group_id);
1857 let is_disk_based = source.map_or(false, |source| {
1858 disk_based_sources.contains(&source.as_str())
1859 });
1860
1861 sources_by_group_id.insert(group_id, source);
1862 primary_diagnostic_group_ids
1863 .insert((source, code.clone(), range.clone()), group_id);
1864
1865 diagnostics.push(DiagnosticEntry {
1866 range,
1867 diagnostic: Diagnostic {
1868 code: code.clone(),
1869 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1870 message: diagnostic.message.clone(),
1871 group_id,
1872 is_primary: true,
1873 is_valid: true,
1874 is_disk_based,
1875 is_unnecessary,
1876 },
1877 });
1878 if let Some(infos) = &diagnostic.related_information {
1879 for info in infos {
1880 if info.location.uri == params.uri && !info.message.is_empty() {
1881 let range = range_from_lsp(info.location.range);
1882 diagnostics.push(DiagnosticEntry {
1883 range,
1884 diagnostic: Diagnostic {
1885 code: code.clone(),
1886 severity: DiagnosticSeverity::INFORMATION,
1887 message: info.message.clone(),
1888 group_id,
1889 is_primary: false,
1890 is_valid: true,
1891 is_disk_based,
1892 is_unnecessary: false,
1893 },
1894 });
1895 }
1896 }
1897 }
1898 }
1899 }
1900
1901 for entry in &mut diagnostics {
1902 let diagnostic = &mut entry.diagnostic;
1903 if !diagnostic.is_primary {
1904 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1905 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1906 source,
1907 diagnostic.code.clone(),
1908 entry.range.clone(),
1909 )) {
1910 if let Some(severity) = severity {
1911 diagnostic.severity = severity;
1912 }
1913 diagnostic.is_unnecessary = is_unnecessary;
1914 }
1915 }
1916 }
1917
1918 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1919 Ok(())
1920 }
1921
1922 pub fn update_diagnostic_entries(
1923 &mut self,
1924 abs_path: PathBuf,
1925 version: Option<i32>,
1926 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1927 cx: &mut ModelContext<Project>,
1928 ) -> Result<(), anyhow::Error> {
1929 let (worktree, relative_path) = self
1930 .find_local_worktree(&abs_path, cx)
1931 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1932 if !worktree.read(cx).is_visible() {
1933 return Ok(());
1934 }
1935
1936 let project_path = ProjectPath {
1937 worktree_id: worktree.read(cx).id(),
1938 path: relative_path.into(),
1939 };
1940
1941 for buffer in self.opened_buffers.values() {
1942 if let Some(buffer) = buffer.upgrade(cx) {
1943 if buffer
1944 .read(cx)
1945 .file()
1946 .map_or(false, |file| *file.path() == project_path.path)
1947 {
1948 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1949 break;
1950 }
1951 }
1952 }
1953 worktree.update(cx, |worktree, cx| {
1954 worktree
1955 .as_local_mut()
1956 .ok_or_else(|| anyhow!("not a local worktree"))?
1957 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1958 })?;
1959 cx.emit(Event::DiagnosticsUpdated(project_path));
1960 Ok(())
1961 }
1962
1963 fn update_buffer_diagnostics(
1964 &mut self,
1965 buffer: &ModelHandle<Buffer>,
1966 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1967 version: Option<i32>,
1968 cx: &mut ModelContext<Self>,
1969 ) -> Result<()> {
1970 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1971 Ordering::Equal
1972 .then_with(|| b.is_primary.cmp(&a.is_primary))
1973 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1974 .then_with(|| a.severity.cmp(&b.severity))
1975 .then_with(|| a.message.cmp(&b.message))
1976 }
1977
1978 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1979
1980 diagnostics.sort_unstable_by(|a, b| {
1981 Ordering::Equal
1982 .then_with(|| a.range.start.cmp(&b.range.start))
1983 .then_with(|| b.range.end.cmp(&a.range.end))
1984 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1985 });
1986
1987 let mut sanitized_diagnostics = Vec::new();
1988 let edits_since_save = Patch::new(
1989 snapshot
1990 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1991 .collect(),
1992 );
1993 for entry in diagnostics {
1994 let start;
1995 let end;
1996 if entry.diagnostic.is_disk_based {
1997 // Some diagnostics are based on files on disk instead of buffers'
1998 // current contents. Adjust these diagnostics' ranges to reflect
1999 // any unsaved edits.
2000 start = edits_since_save.old_to_new(entry.range.start);
2001 end = edits_since_save.old_to_new(entry.range.end);
2002 } else {
2003 start = entry.range.start;
2004 end = entry.range.end;
2005 }
2006
2007 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2008 ..snapshot.clip_point_utf16(end, Bias::Right);
2009
2010 // Expand empty ranges by one character
2011 if range.start == range.end {
2012 range.end.column += 1;
2013 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2014 if range.start == range.end && range.end.column > 0 {
2015 range.start.column -= 1;
2016 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2017 }
2018 }
2019
2020 sanitized_diagnostics.push(DiagnosticEntry {
2021 range,
2022 diagnostic: entry.diagnostic,
2023 });
2024 }
2025 drop(edits_since_save);
2026
2027 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2028 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2029 Ok(())
2030 }
2031
2032 pub fn reload_buffers(
2033 &self,
2034 buffers: HashSet<ModelHandle<Buffer>>,
2035 push_to_history: bool,
2036 cx: &mut ModelContext<Self>,
2037 ) -> Task<Result<ProjectTransaction>> {
2038 let mut local_buffers = Vec::new();
2039 let mut remote_buffers = None;
2040 for buffer_handle in buffers {
2041 let buffer = buffer_handle.read(cx);
2042 if buffer.is_dirty() {
2043 if let Some(file) = File::from_dyn(buffer.file()) {
2044 if file.is_local() {
2045 local_buffers.push(buffer_handle);
2046 } else {
2047 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2048 }
2049 }
2050 }
2051 }
2052
2053 let remote_buffers = self.remote_id().zip(remote_buffers);
2054 let client = self.client.clone();
2055
2056 cx.spawn(|this, mut cx| async move {
2057 let mut project_transaction = ProjectTransaction::default();
2058
2059 if let Some((project_id, remote_buffers)) = remote_buffers {
2060 let response = client
2061 .request(proto::ReloadBuffers {
2062 project_id,
2063 buffer_ids: remote_buffers
2064 .iter()
2065 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2066 .collect(),
2067 })
2068 .await?
2069 .transaction
2070 .ok_or_else(|| anyhow!("missing transaction"))?;
2071 project_transaction = this
2072 .update(&mut cx, |this, cx| {
2073 this.deserialize_project_transaction(response, push_to_history, cx)
2074 })
2075 .await?;
2076 }
2077
2078 for buffer in local_buffers {
2079 let transaction = buffer
2080 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2081 .await?;
2082 buffer.update(&mut cx, |buffer, cx| {
2083 if let Some(transaction) = transaction {
2084 if !push_to_history {
2085 buffer.forget_transaction(transaction.id);
2086 }
2087 project_transaction.0.insert(cx.handle(), transaction);
2088 }
2089 });
2090 }
2091
2092 Ok(project_transaction)
2093 })
2094 }
2095
2096 pub fn format(
2097 &self,
2098 buffers: HashSet<ModelHandle<Buffer>>,
2099 push_to_history: bool,
2100 cx: &mut ModelContext<Project>,
2101 ) -> Task<Result<ProjectTransaction>> {
2102 let mut local_buffers = Vec::new();
2103 let mut remote_buffers = None;
2104 for buffer_handle in buffers {
2105 let buffer = buffer_handle.read(cx);
2106 if let Some(file) = File::from_dyn(buffer.file()) {
2107 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2108 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2109 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2110 }
2111 } else {
2112 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2113 }
2114 } else {
2115 return Task::ready(Ok(Default::default()));
2116 }
2117 }
2118
2119 let remote_buffers = self.remote_id().zip(remote_buffers);
2120 let client = self.client.clone();
2121
2122 cx.spawn(|this, mut cx| async move {
2123 let mut project_transaction = ProjectTransaction::default();
2124
2125 if let Some((project_id, remote_buffers)) = remote_buffers {
2126 let response = client
2127 .request(proto::FormatBuffers {
2128 project_id,
2129 buffer_ids: remote_buffers
2130 .iter()
2131 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2132 .collect(),
2133 })
2134 .await?
2135 .transaction
2136 .ok_or_else(|| anyhow!("missing transaction"))?;
2137 project_transaction = this
2138 .update(&mut cx, |this, cx| {
2139 this.deserialize_project_transaction(response, push_to_history, cx)
2140 })
2141 .await?;
2142 }
2143
2144 for (buffer, buffer_abs_path, language_server) in local_buffers {
2145 let text_document = lsp::TextDocumentIdentifier::new(
2146 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2147 );
2148 let capabilities = &language_server.capabilities();
2149 let lsp_edits = if capabilities
2150 .document_formatting_provider
2151 .as_ref()
2152 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2153 {
2154 language_server
2155 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2156 text_document,
2157 options: lsp::FormattingOptions {
2158 tab_size: 4,
2159 insert_spaces: true,
2160 insert_final_newline: Some(true),
2161 ..Default::default()
2162 },
2163 work_done_progress_params: Default::default(),
2164 })
2165 .await?
2166 } else if capabilities
2167 .document_range_formatting_provider
2168 .as_ref()
2169 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2170 {
2171 let buffer_start = lsp::Position::new(0, 0);
2172 let buffer_end =
2173 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2174 language_server
2175 .request::<lsp::request::RangeFormatting>(
2176 lsp::DocumentRangeFormattingParams {
2177 text_document,
2178 range: lsp::Range::new(buffer_start, buffer_end),
2179 options: lsp::FormattingOptions {
2180 tab_size: 4,
2181 insert_spaces: true,
2182 insert_final_newline: Some(true),
2183 ..Default::default()
2184 },
2185 work_done_progress_params: Default::default(),
2186 },
2187 )
2188 .await?
2189 } else {
2190 continue;
2191 };
2192
2193 if let Some(lsp_edits) = lsp_edits {
2194 let edits = this
2195 .update(&mut cx, |this, cx| {
2196 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2197 })
2198 .await?;
2199 buffer.update(&mut cx, |buffer, cx| {
2200 buffer.finalize_last_transaction();
2201 buffer.start_transaction();
2202 for (range, text) in edits {
2203 buffer.edit([range], text, cx);
2204 }
2205 if buffer.end_transaction(cx).is_some() {
2206 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2207 if !push_to_history {
2208 buffer.forget_transaction(transaction.id);
2209 }
2210 project_transaction.0.insert(cx.handle(), transaction);
2211 }
2212 });
2213 }
2214 }
2215
2216 Ok(project_transaction)
2217 })
2218 }
2219
2220 pub fn definition<T: ToPointUtf16>(
2221 &self,
2222 buffer: &ModelHandle<Buffer>,
2223 position: T,
2224 cx: &mut ModelContext<Self>,
2225 ) -> Task<Result<Vec<Location>>> {
2226 let position = position.to_point_utf16(buffer.read(cx));
2227 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2228 }
2229
2230 pub fn references<T: ToPointUtf16>(
2231 &self,
2232 buffer: &ModelHandle<Buffer>,
2233 position: T,
2234 cx: &mut ModelContext<Self>,
2235 ) -> Task<Result<Vec<Location>>> {
2236 let position = position.to_point_utf16(buffer.read(cx));
2237 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2238 }
2239
2240 pub fn document_highlights<T: ToPointUtf16>(
2241 &self,
2242 buffer: &ModelHandle<Buffer>,
2243 position: T,
2244 cx: &mut ModelContext<Self>,
2245 ) -> Task<Result<Vec<DocumentHighlight>>> {
2246 let position = position.to_point_utf16(buffer.read(cx));
2247
2248 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2249 }
2250
2251 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2252 if self.is_local() {
2253 let mut language_servers = HashMap::default();
2254 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2255 if let Some(worktree) = self
2256 .worktree_for_id(*worktree_id, cx)
2257 .and_then(|worktree| worktree.read(cx).as_local())
2258 {
2259 language_servers
2260 .entry(Arc::as_ptr(language_server))
2261 .or_insert((
2262 lsp_adapter.clone(),
2263 language_server.clone(),
2264 *worktree_id,
2265 worktree.abs_path().clone(),
2266 ));
2267 }
2268 }
2269
2270 let mut requests = Vec::new();
2271 for (_, language_server, _, _) in language_servers.values() {
2272 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2273 lsp::WorkspaceSymbolParams {
2274 query: query.to_string(),
2275 ..Default::default()
2276 },
2277 ));
2278 }
2279
2280 cx.spawn_weak(|this, cx| async move {
2281 let responses = futures::future::try_join_all(requests).await?;
2282
2283 let mut symbols = Vec::new();
2284 if let Some(this) = this.upgrade(&cx) {
2285 this.read_with(&cx, |this, cx| {
2286 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2287 language_servers.into_values().zip(responses)
2288 {
2289 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2290 |lsp_symbol| {
2291 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2292 let mut worktree_id = source_worktree_id;
2293 let path;
2294 if let Some((worktree, rel_path)) =
2295 this.find_local_worktree(&abs_path, cx)
2296 {
2297 worktree_id = worktree.read(cx).id();
2298 path = rel_path;
2299 } else {
2300 path = relativize_path(&worktree_abs_path, &abs_path);
2301 }
2302
2303 let label = this
2304 .languages
2305 .select_language(&path)
2306 .and_then(|language| {
2307 language
2308 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2309 })
2310 .unwrap_or_else(|| {
2311 CodeLabel::plain(lsp_symbol.name.clone(), None)
2312 });
2313 let signature = this.symbol_signature(worktree_id, &path);
2314
2315 Some(Symbol {
2316 source_worktree_id,
2317 worktree_id,
2318 language_server_name: adapter.name(),
2319 name: lsp_symbol.name,
2320 kind: lsp_symbol.kind,
2321 label,
2322 path,
2323 range: range_from_lsp(lsp_symbol.location.range),
2324 signature,
2325 })
2326 },
2327 ));
2328 }
2329 })
2330 }
2331
2332 Ok(symbols)
2333 })
2334 } else if let Some(project_id) = self.remote_id() {
2335 let request = self.client.request(proto::GetProjectSymbols {
2336 project_id,
2337 query: query.to_string(),
2338 });
2339 cx.spawn_weak(|this, cx| async move {
2340 let response = request.await?;
2341 let mut symbols = Vec::new();
2342 if let Some(this) = this.upgrade(&cx) {
2343 this.read_with(&cx, |this, _| {
2344 symbols.extend(
2345 response
2346 .symbols
2347 .into_iter()
2348 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2349 );
2350 })
2351 }
2352 Ok(symbols)
2353 })
2354 } else {
2355 Task::ready(Ok(Default::default()))
2356 }
2357 }
2358
2359 pub fn open_buffer_for_symbol(
2360 &mut self,
2361 symbol: &Symbol,
2362 cx: &mut ModelContext<Self>,
2363 ) -> Task<Result<ModelHandle<Buffer>>> {
2364 if self.is_local() {
2365 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2366 symbol.source_worktree_id,
2367 symbol.language_server_name.clone(),
2368 )) {
2369 server.clone()
2370 } else {
2371 return Task::ready(Err(anyhow!(
2372 "language server for worktree and language not found"
2373 )));
2374 };
2375
2376 let worktree_abs_path = if let Some(worktree_abs_path) = self
2377 .worktree_for_id(symbol.worktree_id, cx)
2378 .and_then(|worktree| worktree.read(cx).as_local())
2379 .map(|local_worktree| local_worktree.abs_path())
2380 {
2381 worktree_abs_path
2382 } else {
2383 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2384 };
2385 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2386 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2387 uri
2388 } else {
2389 return Task::ready(Err(anyhow!("invalid symbol path")));
2390 };
2391
2392 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2393 } else if let Some(project_id) = self.remote_id() {
2394 let request = self.client.request(proto::OpenBufferForSymbol {
2395 project_id,
2396 symbol: Some(serialize_symbol(symbol)),
2397 });
2398 cx.spawn(|this, mut cx| async move {
2399 let response = request.await?;
2400 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2401 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2402 .await
2403 })
2404 } else {
2405 Task::ready(Err(anyhow!("project does not have a remote id")))
2406 }
2407 }
2408
2409 pub fn completions<T: ToPointUtf16>(
2410 &self,
2411 source_buffer_handle: &ModelHandle<Buffer>,
2412 position: T,
2413 cx: &mut ModelContext<Self>,
2414 ) -> Task<Result<Vec<Completion>>> {
2415 let source_buffer_handle = source_buffer_handle.clone();
2416 let source_buffer = source_buffer_handle.read(cx);
2417 let buffer_id = source_buffer.remote_id();
2418 let language = source_buffer.language().cloned();
2419 let worktree;
2420 let buffer_abs_path;
2421 if let Some(file) = File::from_dyn(source_buffer.file()) {
2422 worktree = file.worktree.clone();
2423 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2424 } else {
2425 return Task::ready(Ok(Default::default()));
2426 };
2427
2428 let position = position.to_point_utf16(source_buffer);
2429 let anchor = source_buffer.anchor_after(position);
2430
2431 if worktree.read(cx).as_local().is_some() {
2432 let buffer_abs_path = buffer_abs_path.unwrap();
2433 let (_, lang_server) =
2434 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2435 server.clone()
2436 } else {
2437 return Task::ready(Ok(Default::default()));
2438 };
2439
2440 cx.spawn(|_, cx| async move {
2441 let completions = lang_server
2442 .request::<lsp::request::Completion>(lsp::CompletionParams {
2443 text_document_position: lsp::TextDocumentPositionParams::new(
2444 lsp::TextDocumentIdentifier::new(
2445 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2446 ),
2447 point_to_lsp(position),
2448 ),
2449 context: Default::default(),
2450 work_done_progress_params: Default::default(),
2451 partial_result_params: Default::default(),
2452 })
2453 .await
2454 .context("lsp completion request failed")?;
2455
2456 let completions = if let Some(completions) = completions {
2457 match completions {
2458 lsp::CompletionResponse::Array(completions) => completions,
2459 lsp::CompletionResponse::List(list) => list.items,
2460 }
2461 } else {
2462 Default::default()
2463 };
2464
2465 source_buffer_handle.read_with(&cx, |this, _| {
2466 Ok(completions
2467 .into_iter()
2468 .filter_map(|lsp_completion| {
2469 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2470 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2471 (range_from_lsp(edit.range), edit.new_text.clone())
2472 }
2473 None => {
2474 let clipped_position =
2475 this.clip_point_utf16(position, Bias::Left);
2476 if position != clipped_position {
2477 log::info!("completion out of expected range");
2478 return None;
2479 }
2480 (
2481 this.common_prefix_at(
2482 clipped_position,
2483 &lsp_completion.label,
2484 ),
2485 lsp_completion.label.clone(),
2486 )
2487 }
2488 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2489 log::info!("unsupported insert/replace completion");
2490 return None;
2491 }
2492 };
2493
2494 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2495 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2496 if clipped_start == old_range.start && clipped_end == old_range.end {
2497 Some(Completion {
2498 old_range: this.anchor_before(old_range.start)
2499 ..this.anchor_after(old_range.end),
2500 new_text,
2501 label: language
2502 .as_ref()
2503 .and_then(|l| l.label_for_completion(&lsp_completion))
2504 .unwrap_or_else(|| {
2505 CodeLabel::plain(
2506 lsp_completion.label.clone(),
2507 lsp_completion.filter_text.as_deref(),
2508 )
2509 }),
2510 lsp_completion,
2511 })
2512 } else {
2513 log::info!("completion out of expected range");
2514 None
2515 }
2516 })
2517 .collect())
2518 })
2519 })
2520 } else if let Some(project_id) = self.remote_id() {
2521 let rpc = self.client.clone();
2522 let message = proto::GetCompletions {
2523 project_id,
2524 buffer_id,
2525 position: Some(language::proto::serialize_anchor(&anchor)),
2526 version: serialize_version(&source_buffer.version()),
2527 };
2528 cx.spawn_weak(|_, mut cx| async move {
2529 let response = rpc.request(message).await?;
2530
2531 source_buffer_handle
2532 .update(&mut cx, |buffer, _| {
2533 buffer.wait_for_version(deserialize_version(response.version))
2534 })
2535 .await;
2536
2537 response
2538 .completions
2539 .into_iter()
2540 .map(|completion| {
2541 language::proto::deserialize_completion(completion, language.as_ref())
2542 })
2543 .collect()
2544 })
2545 } else {
2546 Task::ready(Ok(Default::default()))
2547 }
2548 }
2549
2550 pub fn apply_additional_edits_for_completion(
2551 &self,
2552 buffer_handle: ModelHandle<Buffer>,
2553 completion: Completion,
2554 push_to_history: bool,
2555 cx: &mut ModelContext<Self>,
2556 ) -> Task<Result<Option<Transaction>>> {
2557 let buffer = buffer_handle.read(cx);
2558 let buffer_id = buffer.remote_id();
2559
2560 if self.is_local() {
2561 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2562 {
2563 server.clone()
2564 } else {
2565 return Task::ready(Ok(Default::default()));
2566 };
2567
2568 cx.spawn(|this, mut cx| async move {
2569 let resolved_completion = lang_server
2570 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2571 .await?;
2572 if let Some(edits) = resolved_completion.additional_text_edits {
2573 let edits = this
2574 .update(&mut cx, |this, cx| {
2575 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2576 })
2577 .await?;
2578 buffer_handle.update(&mut cx, |buffer, cx| {
2579 buffer.finalize_last_transaction();
2580 buffer.start_transaction();
2581 for (range, text) in edits {
2582 buffer.edit([range], text, cx);
2583 }
2584 let transaction = if buffer.end_transaction(cx).is_some() {
2585 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2586 if !push_to_history {
2587 buffer.forget_transaction(transaction.id);
2588 }
2589 Some(transaction)
2590 } else {
2591 None
2592 };
2593 Ok(transaction)
2594 })
2595 } else {
2596 Ok(None)
2597 }
2598 })
2599 } else if let Some(project_id) = self.remote_id() {
2600 let client = self.client.clone();
2601 cx.spawn(|_, mut cx| async move {
2602 let response = client
2603 .request(proto::ApplyCompletionAdditionalEdits {
2604 project_id,
2605 buffer_id,
2606 completion: Some(language::proto::serialize_completion(&completion)),
2607 })
2608 .await?;
2609
2610 if let Some(transaction) = response.transaction {
2611 let transaction = language::proto::deserialize_transaction(transaction)?;
2612 buffer_handle
2613 .update(&mut cx, |buffer, _| {
2614 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2615 })
2616 .await;
2617 if push_to_history {
2618 buffer_handle.update(&mut cx, |buffer, _| {
2619 buffer.push_transaction(transaction.clone(), Instant::now());
2620 });
2621 }
2622 Ok(Some(transaction))
2623 } else {
2624 Ok(None)
2625 }
2626 })
2627 } else {
2628 Task::ready(Err(anyhow!("project does not have a remote id")))
2629 }
2630 }
2631
2632 pub fn code_actions<T: Clone + ToOffset>(
2633 &self,
2634 buffer_handle: &ModelHandle<Buffer>,
2635 range: Range<T>,
2636 cx: &mut ModelContext<Self>,
2637 ) -> Task<Result<Vec<CodeAction>>> {
2638 let buffer_handle = buffer_handle.clone();
2639 let buffer = buffer_handle.read(cx);
2640 let snapshot = buffer.snapshot();
2641 let relevant_diagnostics = snapshot
2642 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2643 .map(|entry| entry.to_lsp_diagnostic_stub())
2644 .collect();
2645 let buffer_id = buffer.remote_id();
2646 let worktree;
2647 let buffer_abs_path;
2648 if let Some(file) = File::from_dyn(buffer.file()) {
2649 worktree = file.worktree.clone();
2650 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2651 } else {
2652 return Task::ready(Ok(Default::default()));
2653 };
2654 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2655
2656 if worktree.read(cx).as_local().is_some() {
2657 let buffer_abs_path = buffer_abs_path.unwrap();
2658 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2659 {
2660 server.clone()
2661 } else {
2662 return Task::ready(Ok(Default::default()));
2663 };
2664
2665 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2666 cx.foreground().spawn(async move {
2667 if !lang_server.capabilities().code_action_provider.is_some() {
2668 return Ok(Default::default());
2669 }
2670
2671 Ok(lang_server
2672 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2673 text_document: lsp::TextDocumentIdentifier::new(
2674 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2675 ),
2676 range: lsp_range,
2677 work_done_progress_params: Default::default(),
2678 partial_result_params: Default::default(),
2679 context: lsp::CodeActionContext {
2680 diagnostics: relevant_diagnostics,
2681 only: Some(vec![
2682 lsp::CodeActionKind::QUICKFIX,
2683 lsp::CodeActionKind::REFACTOR,
2684 lsp::CodeActionKind::REFACTOR_EXTRACT,
2685 lsp::CodeActionKind::SOURCE,
2686 ]),
2687 },
2688 })
2689 .await?
2690 .unwrap_or_default()
2691 .into_iter()
2692 .filter_map(|entry| {
2693 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2694 Some(CodeAction {
2695 range: range.clone(),
2696 lsp_action,
2697 })
2698 } else {
2699 None
2700 }
2701 })
2702 .collect())
2703 })
2704 } else if let Some(project_id) = self.remote_id() {
2705 let rpc = self.client.clone();
2706 let version = buffer.version();
2707 cx.spawn_weak(|_, mut cx| async move {
2708 let response = rpc
2709 .request(proto::GetCodeActions {
2710 project_id,
2711 buffer_id,
2712 start: Some(language::proto::serialize_anchor(&range.start)),
2713 end: Some(language::proto::serialize_anchor(&range.end)),
2714 version: serialize_version(&version),
2715 })
2716 .await?;
2717
2718 buffer_handle
2719 .update(&mut cx, |buffer, _| {
2720 buffer.wait_for_version(deserialize_version(response.version))
2721 })
2722 .await;
2723
2724 response
2725 .actions
2726 .into_iter()
2727 .map(language::proto::deserialize_code_action)
2728 .collect()
2729 })
2730 } else {
2731 Task::ready(Ok(Default::default()))
2732 }
2733 }
2734
2735 pub fn apply_code_action(
2736 &self,
2737 buffer_handle: ModelHandle<Buffer>,
2738 mut action: CodeAction,
2739 push_to_history: bool,
2740 cx: &mut ModelContext<Self>,
2741 ) -> Task<Result<ProjectTransaction>> {
2742 if self.is_local() {
2743 let buffer = buffer_handle.read(cx);
2744 let (lsp_adapter, lang_server) =
2745 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2746 server.clone()
2747 } else {
2748 return Task::ready(Ok(Default::default()));
2749 };
2750 let range = action.range.to_point_utf16(buffer);
2751
2752 cx.spawn(|this, mut cx| async move {
2753 if let Some(lsp_range) = action
2754 .lsp_action
2755 .data
2756 .as_mut()
2757 .and_then(|d| d.get_mut("codeActionParams"))
2758 .and_then(|d| d.get_mut("range"))
2759 {
2760 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2761 action.lsp_action = lang_server
2762 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2763 .await?;
2764 } else {
2765 let actions = this
2766 .update(&mut cx, |this, cx| {
2767 this.code_actions(&buffer_handle, action.range, cx)
2768 })
2769 .await?;
2770 action.lsp_action = actions
2771 .into_iter()
2772 .find(|a| a.lsp_action.title == action.lsp_action.title)
2773 .ok_or_else(|| anyhow!("code action is outdated"))?
2774 .lsp_action;
2775 }
2776
2777 if let Some(edit) = action.lsp_action.edit {
2778 Self::deserialize_workspace_edit(
2779 this,
2780 edit,
2781 push_to_history,
2782 lsp_adapter,
2783 lang_server,
2784 &mut cx,
2785 )
2786 .await
2787 } else if let Some(command) = action.lsp_action.command {
2788 this.update(&mut cx, |this, _| {
2789 this.last_workspace_edits_by_language_server
2790 .remove(&lang_server.server_id());
2791 });
2792 lang_server
2793 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2794 command: command.command,
2795 arguments: command.arguments.unwrap_or_default(),
2796 ..Default::default()
2797 })
2798 .await?;
2799 Ok(this.update(&mut cx, |this, _| {
2800 this.last_workspace_edits_by_language_server
2801 .remove(&lang_server.server_id())
2802 .unwrap_or_default()
2803 }))
2804 } else {
2805 Ok(ProjectTransaction::default())
2806 }
2807 })
2808 } else if let Some(project_id) = self.remote_id() {
2809 let client = self.client.clone();
2810 let request = proto::ApplyCodeAction {
2811 project_id,
2812 buffer_id: buffer_handle.read(cx).remote_id(),
2813 action: Some(language::proto::serialize_code_action(&action)),
2814 };
2815 cx.spawn(|this, mut cx| async move {
2816 let response = client
2817 .request(request)
2818 .await?
2819 .transaction
2820 .ok_or_else(|| anyhow!("missing transaction"))?;
2821 this.update(&mut cx, |this, cx| {
2822 this.deserialize_project_transaction(response, push_to_history, cx)
2823 })
2824 .await
2825 })
2826 } else {
2827 Task::ready(Err(anyhow!("project does not have a remote id")))
2828 }
2829 }
2830
2831 async fn deserialize_workspace_edit(
2832 this: ModelHandle<Self>,
2833 edit: lsp::WorkspaceEdit,
2834 push_to_history: bool,
2835 lsp_adapter: Arc<dyn LspAdapter>,
2836 language_server: Arc<LanguageServer>,
2837 cx: &mut AsyncAppContext,
2838 ) -> Result<ProjectTransaction> {
2839 let fs = this.read_with(cx, |this, _| this.fs.clone());
2840 let mut operations = Vec::new();
2841 if let Some(document_changes) = edit.document_changes {
2842 match document_changes {
2843 lsp::DocumentChanges::Edits(edits) => {
2844 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2845 }
2846 lsp::DocumentChanges::Operations(ops) => operations = ops,
2847 }
2848 } else if let Some(changes) = edit.changes {
2849 operations.extend(changes.into_iter().map(|(uri, edits)| {
2850 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2851 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2852 uri,
2853 version: None,
2854 },
2855 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2856 })
2857 }));
2858 }
2859
2860 let mut project_transaction = ProjectTransaction::default();
2861 for operation in operations {
2862 match operation {
2863 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2864 let abs_path = op
2865 .uri
2866 .to_file_path()
2867 .map_err(|_| anyhow!("can't convert URI to path"))?;
2868
2869 if let Some(parent_path) = abs_path.parent() {
2870 fs.create_dir(parent_path).await?;
2871 }
2872 if abs_path.ends_with("/") {
2873 fs.create_dir(&abs_path).await?;
2874 } else {
2875 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2876 .await?;
2877 }
2878 }
2879 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2880 let source_abs_path = op
2881 .old_uri
2882 .to_file_path()
2883 .map_err(|_| anyhow!("can't convert URI to path"))?;
2884 let target_abs_path = op
2885 .new_uri
2886 .to_file_path()
2887 .map_err(|_| anyhow!("can't convert URI to path"))?;
2888 fs.rename(
2889 &source_abs_path,
2890 &target_abs_path,
2891 op.options.map(Into::into).unwrap_or_default(),
2892 )
2893 .await?;
2894 }
2895 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2896 let abs_path = op
2897 .uri
2898 .to_file_path()
2899 .map_err(|_| anyhow!("can't convert URI to path"))?;
2900 let options = op.options.map(Into::into).unwrap_or_default();
2901 if abs_path.ends_with("/") {
2902 fs.remove_dir(&abs_path, options).await?;
2903 } else {
2904 fs.remove_file(&abs_path, options).await?;
2905 }
2906 }
2907 lsp::DocumentChangeOperation::Edit(op) => {
2908 let buffer_to_edit = this
2909 .update(cx, |this, cx| {
2910 this.open_local_buffer_via_lsp(
2911 op.text_document.uri,
2912 lsp_adapter.clone(),
2913 language_server.clone(),
2914 cx,
2915 )
2916 })
2917 .await?;
2918
2919 let edits = this
2920 .update(cx, |this, cx| {
2921 let edits = op.edits.into_iter().map(|edit| match edit {
2922 lsp::OneOf::Left(edit) => edit,
2923 lsp::OneOf::Right(edit) => edit.text_edit,
2924 });
2925 this.edits_from_lsp(
2926 &buffer_to_edit,
2927 edits,
2928 op.text_document.version,
2929 cx,
2930 )
2931 })
2932 .await?;
2933
2934 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2935 buffer.finalize_last_transaction();
2936 buffer.start_transaction();
2937 for (range, text) in edits {
2938 buffer.edit([range], text, cx);
2939 }
2940 let transaction = if buffer.end_transaction(cx).is_some() {
2941 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2942 if !push_to_history {
2943 buffer.forget_transaction(transaction.id);
2944 }
2945 Some(transaction)
2946 } else {
2947 None
2948 };
2949
2950 transaction
2951 });
2952 if let Some(transaction) = transaction {
2953 project_transaction.0.insert(buffer_to_edit, transaction);
2954 }
2955 }
2956 }
2957 }
2958
2959 Ok(project_transaction)
2960 }
2961
2962 pub fn prepare_rename<T: ToPointUtf16>(
2963 &self,
2964 buffer: ModelHandle<Buffer>,
2965 position: T,
2966 cx: &mut ModelContext<Self>,
2967 ) -> Task<Result<Option<Range<Anchor>>>> {
2968 let position = position.to_point_utf16(buffer.read(cx));
2969 self.request_lsp(buffer, PrepareRename { position }, cx)
2970 }
2971
2972 pub fn perform_rename<T: ToPointUtf16>(
2973 &self,
2974 buffer: ModelHandle<Buffer>,
2975 position: T,
2976 new_name: String,
2977 push_to_history: bool,
2978 cx: &mut ModelContext<Self>,
2979 ) -> Task<Result<ProjectTransaction>> {
2980 let position = position.to_point_utf16(buffer.read(cx));
2981 self.request_lsp(
2982 buffer,
2983 PerformRename {
2984 position,
2985 new_name,
2986 push_to_history,
2987 },
2988 cx,
2989 )
2990 }
2991
2992 pub fn search(
2993 &self,
2994 query: SearchQuery,
2995 cx: &mut ModelContext<Self>,
2996 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2997 if self.is_local() {
2998 let snapshots = self
2999 .visible_worktrees(cx)
3000 .filter_map(|tree| {
3001 let tree = tree.read(cx).as_local()?;
3002 Some(tree.snapshot())
3003 })
3004 .collect::<Vec<_>>();
3005
3006 let background = cx.background().clone();
3007 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3008 if path_count == 0 {
3009 return Task::ready(Ok(Default::default()));
3010 }
3011 let workers = background.num_cpus().min(path_count);
3012 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3013 cx.background()
3014 .spawn({
3015 let fs = self.fs.clone();
3016 let background = cx.background().clone();
3017 let query = query.clone();
3018 async move {
3019 let fs = &fs;
3020 let query = &query;
3021 let matching_paths_tx = &matching_paths_tx;
3022 let paths_per_worker = (path_count + workers - 1) / workers;
3023 let snapshots = &snapshots;
3024 background
3025 .scoped(|scope| {
3026 for worker_ix in 0..workers {
3027 let worker_start_ix = worker_ix * paths_per_worker;
3028 let worker_end_ix = worker_start_ix + paths_per_worker;
3029 scope.spawn(async move {
3030 let mut snapshot_start_ix = 0;
3031 let mut abs_path = PathBuf::new();
3032 for snapshot in snapshots {
3033 let snapshot_end_ix =
3034 snapshot_start_ix + snapshot.visible_file_count();
3035 if worker_end_ix <= snapshot_start_ix {
3036 break;
3037 } else if worker_start_ix > snapshot_end_ix {
3038 snapshot_start_ix = snapshot_end_ix;
3039 continue;
3040 } else {
3041 let start_in_snapshot = worker_start_ix
3042 .saturating_sub(snapshot_start_ix);
3043 let end_in_snapshot =
3044 cmp::min(worker_end_ix, snapshot_end_ix)
3045 - snapshot_start_ix;
3046
3047 for entry in snapshot
3048 .files(false, start_in_snapshot)
3049 .take(end_in_snapshot - start_in_snapshot)
3050 {
3051 if matching_paths_tx.is_closed() {
3052 break;
3053 }
3054
3055 abs_path.clear();
3056 abs_path.push(&snapshot.abs_path());
3057 abs_path.push(&entry.path);
3058 let matches = if let Some(file) =
3059 fs.open_sync(&abs_path).await.log_err()
3060 {
3061 query.detect(file).unwrap_or(false)
3062 } else {
3063 false
3064 };
3065
3066 if matches {
3067 let project_path =
3068 (snapshot.id(), entry.path.clone());
3069 if matching_paths_tx
3070 .send(project_path)
3071 .await
3072 .is_err()
3073 {
3074 break;
3075 }
3076 }
3077 }
3078
3079 snapshot_start_ix = snapshot_end_ix;
3080 }
3081 }
3082 });
3083 }
3084 })
3085 .await;
3086 }
3087 })
3088 .detach();
3089
3090 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3091 let open_buffers = self
3092 .opened_buffers
3093 .values()
3094 .filter_map(|b| b.upgrade(cx))
3095 .collect::<HashSet<_>>();
3096 cx.spawn(|this, cx| async move {
3097 for buffer in &open_buffers {
3098 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3099 buffers_tx.send((buffer.clone(), snapshot)).await?;
3100 }
3101
3102 let open_buffers = Rc::new(RefCell::new(open_buffers));
3103 while let Some(project_path) = matching_paths_rx.next().await {
3104 if buffers_tx.is_closed() {
3105 break;
3106 }
3107
3108 let this = this.clone();
3109 let open_buffers = open_buffers.clone();
3110 let buffers_tx = buffers_tx.clone();
3111 cx.spawn(|mut cx| async move {
3112 if let Some(buffer) = this
3113 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3114 .await
3115 .log_err()
3116 {
3117 if open_buffers.borrow_mut().insert(buffer.clone()) {
3118 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3119 buffers_tx.send((buffer, snapshot)).await?;
3120 }
3121 }
3122
3123 Ok::<_, anyhow::Error>(())
3124 })
3125 .detach();
3126 }
3127
3128 Ok::<_, anyhow::Error>(())
3129 })
3130 .detach_and_log_err(cx);
3131
3132 let background = cx.background().clone();
3133 cx.background().spawn(async move {
3134 let query = &query;
3135 let mut matched_buffers = Vec::new();
3136 for _ in 0..workers {
3137 matched_buffers.push(HashMap::default());
3138 }
3139 background
3140 .scoped(|scope| {
3141 for worker_matched_buffers in matched_buffers.iter_mut() {
3142 let mut buffers_rx = buffers_rx.clone();
3143 scope.spawn(async move {
3144 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3145 let buffer_matches = query
3146 .search(snapshot.as_rope())
3147 .await
3148 .iter()
3149 .map(|range| {
3150 snapshot.anchor_before(range.start)
3151 ..snapshot.anchor_after(range.end)
3152 })
3153 .collect::<Vec<_>>();
3154 if !buffer_matches.is_empty() {
3155 worker_matched_buffers
3156 .insert(buffer.clone(), buffer_matches);
3157 }
3158 }
3159 });
3160 }
3161 })
3162 .await;
3163 Ok(matched_buffers.into_iter().flatten().collect())
3164 })
3165 } else if let Some(project_id) = self.remote_id() {
3166 let request = self.client.request(query.to_proto(project_id));
3167 cx.spawn(|this, mut cx| async move {
3168 let response = request.await?;
3169 let mut result = HashMap::default();
3170 for location in response.locations {
3171 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3172 let target_buffer = this
3173 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3174 .await?;
3175 let start = location
3176 .start
3177 .and_then(deserialize_anchor)
3178 .ok_or_else(|| anyhow!("missing target start"))?;
3179 let end = location
3180 .end
3181 .and_then(deserialize_anchor)
3182 .ok_or_else(|| anyhow!("missing target end"))?;
3183 result
3184 .entry(target_buffer)
3185 .or_insert(Vec::new())
3186 .push(start..end)
3187 }
3188 Ok(result)
3189 })
3190 } else {
3191 Task::ready(Ok(Default::default()))
3192 }
3193 }
3194
3195 fn request_lsp<R: LspCommand>(
3196 &self,
3197 buffer_handle: ModelHandle<Buffer>,
3198 request: R,
3199 cx: &mut ModelContext<Self>,
3200 ) -> Task<Result<R::Response>>
3201 where
3202 <R::LspRequest as lsp::request::Request>::Result: Send,
3203 {
3204 let buffer = buffer_handle.read(cx);
3205 if self.is_local() {
3206 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3207 if let Some((file, (_, language_server))) =
3208 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3209 {
3210 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3211 return cx.spawn(|this, cx| async move {
3212 if !request.check_capabilities(&language_server.capabilities()) {
3213 return Ok(Default::default());
3214 }
3215
3216 let response = language_server
3217 .request::<R::LspRequest>(lsp_params)
3218 .await
3219 .context("lsp request failed")?;
3220 request
3221 .response_from_lsp(response, this, buffer_handle, cx)
3222 .await
3223 });
3224 }
3225 } else if let Some(project_id) = self.remote_id() {
3226 let rpc = self.client.clone();
3227 let message = request.to_proto(project_id, buffer);
3228 return cx.spawn(|this, cx| async move {
3229 let response = rpc.request(message).await?;
3230 request
3231 .response_from_proto(response, this, buffer_handle, cx)
3232 .await
3233 });
3234 }
3235 Task::ready(Ok(Default::default()))
3236 }
3237
3238 pub fn find_or_create_local_worktree(
3239 &mut self,
3240 abs_path: impl AsRef<Path>,
3241 visible: bool,
3242 cx: &mut ModelContext<Self>,
3243 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3244 let abs_path = abs_path.as_ref();
3245 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3246 Task::ready(Ok((tree.clone(), relative_path.into())))
3247 } else {
3248 let worktree = self.create_local_worktree(abs_path, visible, cx);
3249 cx.foreground()
3250 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3251 }
3252 }
3253
3254 pub fn find_local_worktree(
3255 &self,
3256 abs_path: &Path,
3257 cx: &AppContext,
3258 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3259 for tree in self.worktrees(cx) {
3260 if let Some(relative_path) = tree
3261 .read(cx)
3262 .as_local()
3263 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3264 {
3265 return Some((tree.clone(), relative_path.into()));
3266 }
3267 }
3268 None
3269 }
3270
3271 pub fn is_shared(&self) -> bool {
3272 match &self.client_state {
3273 ProjectClientState::Local { is_shared, .. } => *is_shared,
3274 ProjectClientState::Remote { .. } => false,
3275 }
3276 }
3277
3278 fn create_local_worktree(
3279 &mut self,
3280 abs_path: impl AsRef<Path>,
3281 visible: bool,
3282 cx: &mut ModelContext<Self>,
3283 ) -> Task<Result<ModelHandle<Worktree>>> {
3284 let fs = self.fs.clone();
3285 let client = self.client.clone();
3286 let next_entry_id = self.next_entry_id.clone();
3287 let path: Arc<Path> = abs_path.as_ref().into();
3288 let task = self
3289 .loading_local_worktrees
3290 .entry(path.clone())
3291 .or_insert_with(|| {
3292 cx.spawn(|project, mut cx| {
3293 async move {
3294 let worktree = Worktree::local(
3295 client.clone(),
3296 path.clone(),
3297 visible,
3298 fs,
3299 next_entry_id,
3300 &mut cx,
3301 )
3302 .await;
3303 project.update(&mut cx, |project, _| {
3304 project.loading_local_worktrees.remove(&path);
3305 });
3306 let worktree = worktree?;
3307
3308 let (remote_project_id, is_shared) =
3309 project.update(&mut cx, |project, cx| {
3310 project.add_worktree(&worktree, cx);
3311 (project.remote_id(), project.is_shared())
3312 });
3313
3314 if let Some(project_id) = remote_project_id {
3315 if is_shared {
3316 worktree
3317 .update(&mut cx, |worktree, cx| {
3318 worktree.as_local_mut().unwrap().share(project_id, cx)
3319 })
3320 .await?;
3321 } else {
3322 worktree
3323 .update(&mut cx, |worktree, cx| {
3324 worktree.as_local_mut().unwrap().register(project_id, cx)
3325 })
3326 .await?;
3327 }
3328 }
3329
3330 Ok(worktree)
3331 }
3332 .map_err(|err| Arc::new(err))
3333 })
3334 .shared()
3335 })
3336 .clone();
3337 cx.foreground().spawn(async move {
3338 match task.await {
3339 Ok(worktree) => Ok(worktree),
3340 Err(err) => Err(anyhow!("{}", err)),
3341 }
3342 })
3343 }
3344
3345 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3346 self.worktrees.retain(|worktree| {
3347 worktree
3348 .upgrade(cx)
3349 .map_or(false, |w| w.read(cx).id() != id)
3350 });
3351 cx.notify();
3352 }
3353
3354 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3355 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3356 if worktree.read(cx).is_local() {
3357 cx.subscribe(&worktree, |this, worktree, _, cx| {
3358 this.update_local_worktree_buffers(worktree, cx);
3359 })
3360 .detach();
3361 }
3362
3363 let push_strong_handle = {
3364 let worktree = worktree.read(cx);
3365 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3366 };
3367 if push_strong_handle {
3368 self.worktrees
3369 .push(WorktreeHandle::Strong(worktree.clone()));
3370 } else {
3371 cx.observe_release(&worktree, |this, _, cx| {
3372 this.worktrees
3373 .retain(|worktree| worktree.upgrade(cx).is_some());
3374 cx.notify();
3375 })
3376 .detach();
3377 self.worktrees
3378 .push(WorktreeHandle::Weak(worktree.downgrade()));
3379 }
3380 cx.notify();
3381 }
3382
3383 fn update_local_worktree_buffers(
3384 &mut self,
3385 worktree_handle: ModelHandle<Worktree>,
3386 cx: &mut ModelContext<Self>,
3387 ) {
3388 let snapshot = worktree_handle.read(cx).snapshot();
3389 let mut buffers_to_delete = Vec::new();
3390 for (buffer_id, buffer) in &self.opened_buffers {
3391 if let Some(buffer) = buffer.upgrade(cx) {
3392 buffer.update(cx, |buffer, cx| {
3393 if let Some(old_file) = File::from_dyn(buffer.file()) {
3394 if old_file.worktree != worktree_handle {
3395 return;
3396 }
3397
3398 let new_file = if let Some(entry) = old_file
3399 .entry_id
3400 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3401 {
3402 File {
3403 is_local: true,
3404 entry_id: Some(entry.id),
3405 mtime: entry.mtime,
3406 path: entry.path.clone(),
3407 worktree: worktree_handle.clone(),
3408 }
3409 } else if let Some(entry) =
3410 snapshot.entry_for_path(old_file.path().as_ref())
3411 {
3412 File {
3413 is_local: true,
3414 entry_id: Some(entry.id),
3415 mtime: entry.mtime,
3416 path: entry.path.clone(),
3417 worktree: worktree_handle.clone(),
3418 }
3419 } else {
3420 File {
3421 is_local: true,
3422 entry_id: None,
3423 path: old_file.path().clone(),
3424 mtime: old_file.mtime(),
3425 worktree: worktree_handle.clone(),
3426 }
3427 };
3428
3429 if let Some(project_id) = self.remote_id() {
3430 self.client
3431 .send(proto::UpdateBufferFile {
3432 project_id,
3433 buffer_id: *buffer_id as u64,
3434 file: Some(new_file.to_proto()),
3435 })
3436 .log_err();
3437 }
3438 buffer.file_updated(Box::new(new_file), cx).detach();
3439 }
3440 });
3441 } else {
3442 buffers_to_delete.push(*buffer_id);
3443 }
3444 }
3445
3446 for buffer_id in buffers_to_delete {
3447 self.opened_buffers.remove(&buffer_id);
3448 }
3449 }
3450
3451 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3452 let new_active_entry = entry.and_then(|project_path| {
3453 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3454 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3455 Some(entry.id)
3456 });
3457 if new_active_entry != self.active_entry {
3458 self.active_entry = new_active_entry;
3459 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3460 }
3461 }
3462
3463 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3464 self.language_servers_with_diagnostics_running > 0
3465 }
3466
3467 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3468 let mut summary = DiagnosticSummary::default();
3469 for (_, path_summary) in self.diagnostic_summaries(cx) {
3470 summary.error_count += path_summary.error_count;
3471 summary.warning_count += path_summary.warning_count;
3472 summary.info_count += path_summary.info_count;
3473 summary.hint_count += path_summary.hint_count;
3474 }
3475 summary
3476 }
3477
3478 pub fn diagnostic_summaries<'a>(
3479 &'a self,
3480 cx: &'a AppContext,
3481 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3482 self.worktrees(cx).flat_map(move |worktree| {
3483 let worktree = worktree.read(cx);
3484 let worktree_id = worktree.id();
3485 worktree
3486 .diagnostic_summaries()
3487 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3488 })
3489 }
3490
3491 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3492 self.language_servers_with_diagnostics_running += 1;
3493 if self.language_servers_with_diagnostics_running == 1 {
3494 cx.emit(Event::DiskBasedDiagnosticsStarted);
3495 }
3496 }
3497
3498 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3499 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3500 self.language_servers_with_diagnostics_running -= 1;
3501 if self.language_servers_with_diagnostics_running == 0 {
3502 cx.emit(Event::DiskBasedDiagnosticsFinished);
3503 }
3504 }
3505
3506 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3507 self.active_entry
3508 }
3509
3510 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3511 self.worktree_for_id(path.worktree_id, cx)?
3512 .read(cx)
3513 .entry_for_path(&path.path)
3514 .map(|entry| entry.id)
3515 }
3516
3517 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3518 let worktree = self.worktree_for_entry(entry_id, cx)?;
3519 let worktree = worktree.read(cx);
3520 let worktree_id = worktree.id();
3521 let path = worktree.entry_for_id(entry_id)?.path.clone();
3522 Some(ProjectPath { worktree_id, path })
3523 }
3524
3525 // RPC message handlers
3526
3527 async fn handle_unshare_project(
3528 this: ModelHandle<Self>,
3529 _: TypedEnvelope<proto::UnshareProject>,
3530 _: Arc<Client>,
3531 mut cx: AsyncAppContext,
3532 ) -> Result<()> {
3533 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3534 Ok(())
3535 }
3536
3537 async fn handle_add_collaborator(
3538 this: ModelHandle<Self>,
3539 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3540 _: Arc<Client>,
3541 mut cx: AsyncAppContext,
3542 ) -> Result<()> {
3543 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3544 let collaborator = envelope
3545 .payload
3546 .collaborator
3547 .take()
3548 .ok_or_else(|| anyhow!("empty collaborator"))?;
3549
3550 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3551 this.update(&mut cx, |this, cx| {
3552 this.collaborators
3553 .insert(collaborator.peer_id, collaborator);
3554 cx.notify();
3555 });
3556
3557 Ok(())
3558 }
3559
3560 async fn handle_remove_collaborator(
3561 this: ModelHandle<Self>,
3562 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3563 _: Arc<Client>,
3564 mut cx: AsyncAppContext,
3565 ) -> Result<()> {
3566 this.update(&mut cx, |this, cx| {
3567 let peer_id = PeerId(envelope.payload.peer_id);
3568 let replica_id = this
3569 .collaborators
3570 .remove(&peer_id)
3571 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3572 .replica_id;
3573 for (_, buffer) in &this.opened_buffers {
3574 if let Some(buffer) = buffer.upgrade(cx) {
3575 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3576 }
3577 }
3578 cx.emit(Event::CollaboratorLeft(peer_id));
3579 cx.notify();
3580 Ok(())
3581 })
3582 }
3583
3584 async fn handle_register_worktree(
3585 this: ModelHandle<Self>,
3586 envelope: TypedEnvelope<proto::RegisterWorktree>,
3587 client: Arc<Client>,
3588 mut cx: AsyncAppContext,
3589 ) -> Result<()> {
3590 this.update(&mut cx, |this, cx| {
3591 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3592 let replica_id = this.replica_id();
3593 let worktree = proto::Worktree {
3594 id: envelope.payload.worktree_id,
3595 root_name: envelope.payload.root_name,
3596 entries: Default::default(),
3597 diagnostic_summaries: Default::default(),
3598 visible: envelope.payload.visible,
3599 };
3600 let (worktree, load_task) =
3601 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3602 this.add_worktree(&worktree, cx);
3603 load_task.detach();
3604 Ok(())
3605 })
3606 }
3607
3608 async fn handle_unregister_worktree(
3609 this: ModelHandle<Self>,
3610 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3611 _: Arc<Client>,
3612 mut cx: AsyncAppContext,
3613 ) -> Result<()> {
3614 this.update(&mut cx, |this, cx| {
3615 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3616 this.remove_worktree(worktree_id, cx);
3617 Ok(())
3618 })
3619 }
3620
3621 async fn handle_update_worktree(
3622 this: ModelHandle<Self>,
3623 envelope: TypedEnvelope<proto::UpdateWorktree>,
3624 _: Arc<Client>,
3625 mut cx: AsyncAppContext,
3626 ) -> Result<()> {
3627 this.update(&mut cx, |this, cx| {
3628 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3629 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3630 worktree.update(cx, |worktree, _| {
3631 let worktree = worktree.as_remote_mut().unwrap();
3632 worktree.update_from_remote(envelope)
3633 })?;
3634 }
3635 Ok(())
3636 })
3637 }
3638
3639 async fn handle_update_diagnostic_summary(
3640 this: ModelHandle<Self>,
3641 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3642 _: Arc<Client>,
3643 mut cx: AsyncAppContext,
3644 ) -> Result<()> {
3645 this.update(&mut cx, |this, cx| {
3646 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3647 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3648 if let Some(summary) = envelope.payload.summary {
3649 let project_path = ProjectPath {
3650 worktree_id,
3651 path: Path::new(&summary.path).into(),
3652 };
3653 worktree.update(cx, |worktree, _| {
3654 worktree
3655 .as_remote_mut()
3656 .unwrap()
3657 .update_diagnostic_summary(project_path.path.clone(), &summary);
3658 });
3659 cx.emit(Event::DiagnosticsUpdated(project_path));
3660 }
3661 }
3662 Ok(())
3663 })
3664 }
3665
3666 async fn handle_start_language_server(
3667 this: ModelHandle<Self>,
3668 envelope: TypedEnvelope<proto::StartLanguageServer>,
3669 _: Arc<Client>,
3670 mut cx: AsyncAppContext,
3671 ) -> Result<()> {
3672 let server = envelope
3673 .payload
3674 .server
3675 .ok_or_else(|| anyhow!("invalid server"))?;
3676 this.update(&mut cx, |this, cx| {
3677 this.language_server_statuses.insert(
3678 server.id as usize,
3679 LanguageServerStatus {
3680 name: server.name,
3681 pending_work: Default::default(),
3682 pending_diagnostic_updates: 0,
3683 },
3684 );
3685 cx.notify();
3686 });
3687 Ok(())
3688 }
3689
3690 async fn handle_update_language_server(
3691 this: ModelHandle<Self>,
3692 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3693 _: Arc<Client>,
3694 mut cx: AsyncAppContext,
3695 ) -> Result<()> {
3696 let language_server_id = envelope.payload.language_server_id as usize;
3697 match envelope
3698 .payload
3699 .variant
3700 .ok_or_else(|| anyhow!("invalid variant"))?
3701 {
3702 proto::update_language_server::Variant::WorkStart(payload) => {
3703 this.update(&mut cx, |this, cx| {
3704 this.on_lsp_work_start(language_server_id, payload.token, cx);
3705 })
3706 }
3707 proto::update_language_server::Variant::WorkProgress(payload) => {
3708 this.update(&mut cx, |this, cx| {
3709 this.on_lsp_work_progress(
3710 language_server_id,
3711 payload.token,
3712 LanguageServerProgress {
3713 message: payload.message,
3714 percentage: payload.percentage.map(|p| p as usize),
3715 last_update_at: Instant::now(),
3716 },
3717 cx,
3718 );
3719 })
3720 }
3721 proto::update_language_server::Variant::WorkEnd(payload) => {
3722 this.update(&mut cx, |this, cx| {
3723 this.on_lsp_work_end(language_server_id, payload.token, cx);
3724 })
3725 }
3726 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3727 this.update(&mut cx, |this, cx| {
3728 this.disk_based_diagnostics_started(cx);
3729 })
3730 }
3731 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3732 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3733 }
3734 }
3735
3736 Ok(())
3737 }
3738
3739 async fn handle_update_buffer(
3740 this: ModelHandle<Self>,
3741 envelope: TypedEnvelope<proto::UpdateBuffer>,
3742 _: Arc<Client>,
3743 mut cx: AsyncAppContext,
3744 ) -> Result<()> {
3745 this.update(&mut cx, |this, cx| {
3746 let payload = envelope.payload.clone();
3747 let buffer_id = payload.buffer_id;
3748 let ops = payload
3749 .operations
3750 .into_iter()
3751 .map(|op| language::proto::deserialize_operation(op))
3752 .collect::<Result<Vec<_>, _>>()?;
3753 match this.opened_buffers.entry(buffer_id) {
3754 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3755 OpenBuffer::Strong(buffer) => {
3756 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3757 }
3758 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3759 OpenBuffer::Weak(_) => {}
3760 },
3761 hash_map::Entry::Vacant(e) => {
3762 e.insert(OpenBuffer::Loading(ops));
3763 }
3764 }
3765 Ok(())
3766 })
3767 }
3768
3769 async fn handle_update_buffer_file(
3770 this: ModelHandle<Self>,
3771 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3772 _: Arc<Client>,
3773 mut cx: AsyncAppContext,
3774 ) -> Result<()> {
3775 this.update(&mut cx, |this, cx| {
3776 let payload = envelope.payload.clone();
3777 let buffer_id = payload.buffer_id;
3778 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3779 let worktree = this
3780 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3781 .ok_or_else(|| anyhow!("no such worktree"))?;
3782 let file = File::from_proto(file, worktree.clone(), cx)?;
3783 let buffer = this
3784 .opened_buffers
3785 .get_mut(&buffer_id)
3786 .and_then(|b| b.upgrade(cx))
3787 .ok_or_else(|| anyhow!("no such buffer"))?;
3788 buffer.update(cx, |buffer, cx| {
3789 buffer.file_updated(Box::new(file), cx).detach();
3790 });
3791 Ok(())
3792 })
3793 }
3794
3795 async fn handle_save_buffer(
3796 this: ModelHandle<Self>,
3797 envelope: TypedEnvelope<proto::SaveBuffer>,
3798 _: Arc<Client>,
3799 mut cx: AsyncAppContext,
3800 ) -> Result<proto::BufferSaved> {
3801 let buffer_id = envelope.payload.buffer_id;
3802 let requested_version = deserialize_version(envelope.payload.version);
3803
3804 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3805 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3806 let buffer = this
3807 .opened_buffers
3808 .get(&buffer_id)
3809 .map(|buffer| buffer.upgrade(cx).unwrap())
3810 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3811 Ok::<_, anyhow::Error>((project_id, buffer))
3812 })?;
3813 buffer
3814 .update(&mut cx, |buffer, _| {
3815 buffer.wait_for_version(requested_version)
3816 })
3817 .await;
3818
3819 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3820 Ok(proto::BufferSaved {
3821 project_id,
3822 buffer_id,
3823 version: serialize_version(&saved_version),
3824 mtime: Some(mtime.into()),
3825 })
3826 }
3827
3828 async fn handle_reload_buffers(
3829 this: ModelHandle<Self>,
3830 envelope: TypedEnvelope<proto::ReloadBuffers>,
3831 _: Arc<Client>,
3832 mut cx: AsyncAppContext,
3833 ) -> Result<proto::ReloadBuffersResponse> {
3834 let sender_id = envelope.original_sender_id()?;
3835 let reload = this.update(&mut cx, |this, cx| {
3836 let mut buffers = HashSet::default();
3837 for buffer_id in &envelope.payload.buffer_ids {
3838 buffers.insert(
3839 this.opened_buffers
3840 .get(buffer_id)
3841 .map(|buffer| buffer.upgrade(cx).unwrap())
3842 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3843 );
3844 }
3845 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3846 })?;
3847
3848 let project_transaction = reload.await?;
3849 let project_transaction = this.update(&mut cx, |this, cx| {
3850 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3851 });
3852 Ok(proto::ReloadBuffersResponse {
3853 transaction: Some(project_transaction),
3854 })
3855 }
3856
3857 async fn handle_format_buffers(
3858 this: ModelHandle<Self>,
3859 envelope: TypedEnvelope<proto::FormatBuffers>,
3860 _: Arc<Client>,
3861 mut cx: AsyncAppContext,
3862 ) -> Result<proto::FormatBuffersResponse> {
3863 let sender_id = envelope.original_sender_id()?;
3864 let format = this.update(&mut cx, |this, cx| {
3865 let mut buffers = HashSet::default();
3866 for buffer_id in &envelope.payload.buffer_ids {
3867 buffers.insert(
3868 this.opened_buffers
3869 .get(buffer_id)
3870 .map(|buffer| buffer.upgrade(cx).unwrap())
3871 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3872 );
3873 }
3874 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3875 })?;
3876
3877 let project_transaction = format.await?;
3878 let project_transaction = this.update(&mut cx, |this, cx| {
3879 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3880 });
3881 Ok(proto::FormatBuffersResponse {
3882 transaction: Some(project_transaction),
3883 })
3884 }
3885
3886 async fn handle_get_completions(
3887 this: ModelHandle<Self>,
3888 envelope: TypedEnvelope<proto::GetCompletions>,
3889 _: Arc<Client>,
3890 mut cx: AsyncAppContext,
3891 ) -> Result<proto::GetCompletionsResponse> {
3892 let position = envelope
3893 .payload
3894 .position
3895 .and_then(language::proto::deserialize_anchor)
3896 .ok_or_else(|| anyhow!("invalid position"))?;
3897 let version = deserialize_version(envelope.payload.version);
3898 let buffer = this.read_with(&cx, |this, cx| {
3899 this.opened_buffers
3900 .get(&envelope.payload.buffer_id)
3901 .map(|buffer| buffer.upgrade(cx).unwrap())
3902 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3903 })?;
3904 buffer
3905 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3906 .await;
3907 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3908 let completions = this
3909 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3910 .await?;
3911
3912 Ok(proto::GetCompletionsResponse {
3913 completions: completions
3914 .iter()
3915 .map(language::proto::serialize_completion)
3916 .collect(),
3917 version: serialize_version(&version),
3918 })
3919 }
3920
3921 async fn handle_apply_additional_edits_for_completion(
3922 this: ModelHandle<Self>,
3923 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3924 _: Arc<Client>,
3925 mut cx: AsyncAppContext,
3926 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3927 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3928 let buffer = this
3929 .opened_buffers
3930 .get(&envelope.payload.buffer_id)
3931 .map(|buffer| buffer.upgrade(cx).unwrap())
3932 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3933 let language = buffer.read(cx).language();
3934 let completion = language::proto::deserialize_completion(
3935 envelope
3936 .payload
3937 .completion
3938 .ok_or_else(|| anyhow!("invalid completion"))?,
3939 language,
3940 )?;
3941 Ok::<_, anyhow::Error>(
3942 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3943 )
3944 })?;
3945
3946 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3947 transaction: apply_additional_edits
3948 .await?
3949 .as_ref()
3950 .map(language::proto::serialize_transaction),
3951 })
3952 }
3953
3954 async fn handle_get_code_actions(
3955 this: ModelHandle<Self>,
3956 envelope: TypedEnvelope<proto::GetCodeActions>,
3957 _: Arc<Client>,
3958 mut cx: AsyncAppContext,
3959 ) -> Result<proto::GetCodeActionsResponse> {
3960 let start = envelope
3961 .payload
3962 .start
3963 .and_then(language::proto::deserialize_anchor)
3964 .ok_or_else(|| anyhow!("invalid start"))?;
3965 let end = envelope
3966 .payload
3967 .end
3968 .and_then(language::proto::deserialize_anchor)
3969 .ok_or_else(|| anyhow!("invalid end"))?;
3970 let buffer = this.update(&mut cx, |this, cx| {
3971 this.opened_buffers
3972 .get(&envelope.payload.buffer_id)
3973 .map(|buffer| buffer.upgrade(cx).unwrap())
3974 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3975 })?;
3976 buffer
3977 .update(&mut cx, |buffer, _| {
3978 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3979 })
3980 .await;
3981
3982 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3983 let code_actions = this.update(&mut cx, |this, cx| {
3984 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3985 })?;
3986
3987 Ok(proto::GetCodeActionsResponse {
3988 actions: code_actions
3989 .await?
3990 .iter()
3991 .map(language::proto::serialize_code_action)
3992 .collect(),
3993 version: serialize_version(&version),
3994 })
3995 }
3996
3997 async fn handle_apply_code_action(
3998 this: ModelHandle<Self>,
3999 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4000 _: Arc<Client>,
4001 mut cx: AsyncAppContext,
4002 ) -> Result<proto::ApplyCodeActionResponse> {
4003 let sender_id = envelope.original_sender_id()?;
4004 let action = language::proto::deserialize_code_action(
4005 envelope
4006 .payload
4007 .action
4008 .ok_or_else(|| anyhow!("invalid action"))?,
4009 )?;
4010 let apply_code_action = this.update(&mut cx, |this, cx| {
4011 let buffer = this
4012 .opened_buffers
4013 .get(&envelope.payload.buffer_id)
4014 .map(|buffer| buffer.upgrade(cx).unwrap())
4015 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4016 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4017 })?;
4018
4019 let project_transaction = apply_code_action.await?;
4020 let project_transaction = this.update(&mut cx, |this, cx| {
4021 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4022 });
4023 Ok(proto::ApplyCodeActionResponse {
4024 transaction: Some(project_transaction),
4025 })
4026 }
4027
4028 async fn handle_lsp_command<T: LspCommand>(
4029 this: ModelHandle<Self>,
4030 envelope: TypedEnvelope<T::ProtoRequest>,
4031 _: Arc<Client>,
4032 mut cx: AsyncAppContext,
4033 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4034 where
4035 <T::LspRequest as lsp::request::Request>::Result: Send,
4036 {
4037 let sender_id = envelope.original_sender_id()?;
4038 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4039 let buffer_handle = this.read_with(&cx, |this, _| {
4040 this.opened_buffers
4041 .get(&buffer_id)
4042 .and_then(|buffer| buffer.upgrade(&cx))
4043 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4044 })?;
4045 let request = T::from_proto(
4046 envelope.payload,
4047 this.clone(),
4048 buffer_handle.clone(),
4049 cx.clone(),
4050 )
4051 .await?;
4052 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4053 let response = this
4054 .update(&mut cx, |this, cx| {
4055 this.request_lsp(buffer_handle, request, cx)
4056 })
4057 .await?;
4058 this.update(&mut cx, |this, cx| {
4059 Ok(T::response_to_proto(
4060 response,
4061 this,
4062 sender_id,
4063 &buffer_version,
4064 cx,
4065 ))
4066 })
4067 }
4068
4069 async fn handle_get_project_symbols(
4070 this: ModelHandle<Self>,
4071 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4072 _: Arc<Client>,
4073 mut cx: AsyncAppContext,
4074 ) -> Result<proto::GetProjectSymbolsResponse> {
4075 let symbols = this
4076 .update(&mut cx, |this, cx| {
4077 this.symbols(&envelope.payload.query, cx)
4078 })
4079 .await?;
4080
4081 Ok(proto::GetProjectSymbolsResponse {
4082 symbols: symbols.iter().map(serialize_symbol).collect(),
4083 })
4084 }
4085
4086 async fn handle_search_project(
4087 this: ModelHandle<Self>,
4088 envelope: TypedEnvelope<proto::SearchProject>,
4089 _: Arc<Client>,
4090 mut cx: AsyncAppContext,
4091 ) -> Result<proto::SearchProjectResponse> {
4092 let peer_id = envelope.original_sender_id()?;
4093 let query = SearchQuery::from_proto(envelope.payload)?;
4094 let result = this
4095 .update(&mut cx, |this, cx| this.search(query, cx))
4096 .await?;
4097
4098 this.update(&mut cx, |this, cx| {
4099 let mut locations = Vec::new();
4100 for (buffer, ranges) in result {
4101 for range in ranges {
4102 let start = serialize_anchor(&range.start);
4103 let end = serialize_anchor(&range.end);
4104 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4105 locations.push(proto::Location {
4106 buffer: Some(buffer),
4107 start: Some(start),
4108 end: Some(end),
4109 });
4110 }
4111 }
4112 Ok(proto::SearchProjectResponse { locations })
4113 })
4114 }
4115
4116 async fn handle_open_buffer_for_symbol(
4117 this: ModelHandle<Self>,
4118 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4119 _: Arc<Client>,
4120 mut cx: AsyncAppContext,
4121 ) -> Result<proto::OpenBufferForSymbolResponse> {
4122 let peer_id = envelope.original_sender_id()?;
4123 let symbol = envelope
4124 .payload
4125 .symbol
4126 .ok_or_else(|| anyhow!("invalid symbol"))?;
4127 let symbol = this.read_with(&cx, |this, _| {
4128 let symbol = this.deserialize_symbol(symbol)?;
4129 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4130 if signature == symbol.signature {
4131 Ok(symbol)
4132 } else {
4133 Err(anyhow!("invalid symbol signature"))
4134 }
4135 })?;
4136 let buffer = this
4137 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4138 .await?;
4139
4140 Ok(proto::OpenBufferForSymbolResponse {
4141 buffer: Some(this.update(&mut cx, |this, cx| {
4142 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4143 })),
4144 })
4145 }
4146
4147 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4148 let mut hasher = Sha256::new();
4149 hasher.update(worktree_id.to_proto().to_be_bytes());
4150 hasher.update(path.to_string_lossy().as_bytes());
4151 hasher.update(self.nonce.to_be_bytes());
4152 hasher.finalize().as_slice().try_into().unwrap()
4153 }
4154
4155 async fn handle_open_buffer_by_id(
4156 this: ModelHandle<Self>,
4157 envelope: TypedEnvelope<proto::OpenBufferById>,
4158 _: Arc<Client>,
4159 mut cx: AsyncAppContext,
4160 ) -> Result<proto::OpenBufferResponse> {
4161 let peer_id = envelope.original_sender_id()?;
4162 let buffer = this
4163 .update(&mut cx, |this, cx| {
4164 this.open_buffer_by_id(envelope.payload.id, cx)
4165 })
4166 .await?;
4167 this.update(&mut cx, |this, cx| {
4168 Ok(proto::OpenBufferResponse {
4169 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4170 })
4171 })
4172 }
4173
4174 async fn handle_open_buffer_by_path(
4175 this: ModelHandle<Self>,
4176 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4177 _: Arc<Client>,
4178 mut cx: AsyncAppContext,
4179 ) -> Result<proto::OpenBufferResponse> {
4180 let peer_id = envelope.original_sender_id()?;
4181 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4182 let open_buffer = this.update(&mut cx, |this, cx| {
4183 this.open_buffer(
4184 ProjectPath {
4185 worktree_id,
4186 path: PathBuf::from(envelope.payload.path).into(),
4187 },
4188 cx,
4189 )
4190 });
4191
4192 let buffer = open_buffer.await?;
4193 this.update(&mut cx, |this, cx| {
4194 Ok(proto::OpenBufferResponse {
4195 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4196 })
4197 })
4198 }
4199
4200 fn serialize_project_transaction_for_peer(
4201 &mut self,
4202 project_transaction: ProjectTransaction,
4203 peer_id: PeerId,
4204 cx: &AppContext,
4205 ) -> proto::ProjectTransaction {
4206 let mut serialized_transaction = proto::ProjectTransaction {
4207 buffers: Default::default(),
4208 transactions: Default::default(),
4209 };
4210 for (buffer, transaction) in project_transaction.0 {
4211 serialized_transaction
4212 .buffers
4213 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4214 serialized_transaction
4215 .transactions
4216 .push(language::proto::serialize_transaction(&transaction));
4217 }
4218 serialized_transaction
4219 }
4220
4221 fn deserialize_project_transaction(
4222 &mut self,
4223 message: proto::ProjectTransaction,
4224 push_to_history: bool,
4225 cx: &mut ModelContext<Self>,
4226 ) -> Task<Result<ProjectTransaction>> {
4227 cx.spawn(|this, mut cx| async move {
4228 let mut project_transaction = ProjectTransaction::default();
4229 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4230 let buffer = this
4231 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4232 .await?;
4233 let transaction = language::proto::deserialize_transaction(transaction)?;
4234 project_transaction.0.insert(buffer, transaction);
4235 }
4236
4237 for (buffer, transaction) in &project_transaction.0 {
4238 buffer
4239 .update(&mut cx, |buffer, _| {
4240 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4241 })
4242 .await;
4243
4244 if push_to_history {
4245 buffer.update(&mut cx, |buffer, _| {
4246 buffer.push_transaction(transaction.clone(), Instant::now());
4247 });
4248 }
4249 }
4250
4251 Ok(project_transaction)
4252 })
4253 }
4254
4255 fn serialize_buffer_for_peer(
4256 &mut self,
4257 buffer: &ModelHandle<Buffer>,
4258 peer_id: PeerId,
4259 cx: &AppContext,
4260 ) -> proto::Buffer {
4261 let buffer_id = buffer.read(cx).remote_id();
4262 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4263 if shared_buffers.insert(buffer_id) {
4264 proto::Buffer {
4265 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4266 }
4267 } else {
4268 proto::Buffer {
4269 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4270 }
4271 }
4272 }
4273
4274 fn deserialize_buffer(
4275 &mut self,
4276 buffer: proto::Buffer,
4277 cx: &mut ModelContext<Self>,
4278 ) -> Task<Result<ModelHandle<Buffer>>> {
4279 let replica_id = self.replica_id();
4280
4281 let opened_buffer_tx = self.opened_buffer.0.clone();
4282 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4283 cx.spawn(|this, mut cx| async move {
4284 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4285 proto::buffer::Variant::Id(id) => {
4286 let buffer = loop {
4287 let buffer = this.read_with(&cx, |this, cx| {
4288 this.opened_buffers
4289 .get(&id)
4290 .and_then(|buffer| buffer.upgrade(cx))
4291 });
4292 if let Some(buffer) = buffer {
4293 break buffer;
4294 }
4295 opened_buffer_rx
4296 .next()
4297 .await
4298 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4299 };
4300 Ok(buffer)
4301 }
4302 proto::buffer::Variant::State(mut buffer) => {
4303 let mut buffer_worktree = None;
4304 let mut buffer_file = None;
4305 if let Some(file) = buffer.file.take() {
4306 this.read_with(&cx, |this, cx| {
4307 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4308 let worktree =
4309 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4310 anyhow!("no worktree found for id {}", file.worktree_id)
4311 })?;
4312 buffer_file =
4313 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4314 as Box<dyn language::File>);
4315 buffer_worktree = Some(worktree);
4316 Ok::<_, anyhow::Error>(())
4317 })?;
4318 }
4319
4320 let buffer = cx.add_model(|cx| {
4321 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4322 });
4323
4324 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4325
4326 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4327 Ok(buffer)
4328 }
4329 }
4330 })
4331 }
4332
4333 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4334 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4335 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4336 let start = serialized_symbol
4337 .start
4338 .ok_or_else(|| anyhow!("invalid start"))?;
4339 let end = serialized_symbol
4340 .end
4341 .ok_or_else(|| anyhow!("invalid end"))?;
4342 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4343 let path = PathBuf::from(serialized_symbol.path);
4344 let language = self.languages.select_language(&path);
4345 Ok(Symbol {
4346 source_worktree_id,
4347 worktree_id,
4348 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4349 label: language
4350 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4351 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4352 name: serialized_symbol.name,
4353 path,
4354 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4355 kind,
4356 signature: serialized_symbol
4357 .signature
4358 .try_into()
4359 .map_err(|_| anyhow!("invalid signature"))?,
4360 })
4361 }
4362
4363 async fn handle_buffer_saved(
4364 this: ModelHandle<Self>,
4365 envelope: TypedEnvelope<proto::BufferSaved>,
4366 _: Arc<Client>,
4367 mut cx: AsyncAppContext,
4368 ) -> Result<()> {
4369 let version = deserialize_version(envelope.payload.version);
4370 let mtime = envelope
4371 .payload
4372 .mtime
4373 .ok_or_else(|| anyhow!("missing mtime"))?
4374 .into();
4375
4376 this.update(&mut cx, |this, cx| {
4377 let buffer = this
4378 .opened_buffers
4379 .get(&envelope.payload.buffer_id)
4380 .and_then(|buffer| buffer.upgrade(cx));
4381 if let Some(buffer) = buffer {
4382 buffer.update(cx, |buffer, cx| {
4383 buffer.did_save(version, mtime, None, cx);
4384 });
4385 }
4386 Ok(())
4387 })
4388 }
4389
4390 async fn handle_buffer_reloaded(
4391 this: ModelHandle<Self>,
4392 envelope: TypedEnvelope<proto::BufferReloaded>,
4393 _: Arc<Client>,
4394 mut cx: AsyncAppContext,
4395 ) -> Result<()> {
4396 let payload = envelope.payload.clone();
4397 let version = deserialize_version(payload.version);
4398 let mtime = payload
4399 .mtime
4400 .ok_or_else(|| anyhow!("missing mtime"))?
4401 .into();
4402 this.update(&mut cx, |this, cx| {
4403 let buffer = this
4404 .opened_buffers
4405 .get(&payload.buffer_id)
4406 .and_then(|buffer| buffer.upgrade(cx));
4407 if let Some(buffer) = buffer {
4408 buffer.update(cx, |buffer, cx| {
4409 buffer.did_reload(version, mtime, cx);
4410 });
4411 }
4412 Ok(())
4413 })
4414 }
4415
4416 pub fn match_paths<'a>(
4417 &self,
4418 query: &'a str,
4419 include_ignored: bool,
4420 smart_case: bool,
4421 max_results: usize,
4422 cancel_flag: &'a AtomicBool,
4423 cx: &AppContext,
4424 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4425 let worktrees = self
4426 .worktrees(cx)
4427 .filter(|worktree| worktree.read(cx).is_visible())
4428 .collect::<Vec<_>>();
4429 let include_root_name = worktrees.len() > 1;
4430 let candidate_sets = worktrees
4431 .into_iter()
4432 .map(|worktree| CandidateSet {
4433 snapshot: worktree.read(cx).snapshot(),
4434 include_ignored,
4435 include_root_name,
4436 })
4437 .collect::<Vec<_>>();
4438
4439 let background = cx.background().clone();
4440 async move {
4441 fuzzy::match_paths(
4442 candidate_sets.as_slice(),
4443 query,
4444 smart_case,
4445 max_results,
4446 cancel_flag,
4447 background,
4448 )
4449 .await
4450 }
4451 }
4452
4453 fn edits_from_lsp(
4454 &mut self,
4455 buffer: &ModelHandle<Buffer>,
4456 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4457 version: Option<i32>,
4458 cx: &mut ModelContext<Self>,
4459 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4460 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4461 cx.background().spawn(async move {
4462 let snapshot = snapshot?;
4463 let mut lsp_edits = lsp_edits
4464 .into_iter()
4465 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4466 .peekable();
4467
4468 let mut edits = Vec::new();
4469 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4470 // Combine any LSP edits that are adjacent.
4471 //
4472 // Also, combine LSP edits that are separated from each other by only
4473 // a newline. This is important because for some code actions,
4474 // Rust-analyzer rewrites the entire buffer via a series of edits that
4475 // are separated by unchanged newline characters.
4476 //
4477 // In order for the diffing logic below to work properly, any edits that
4478 // cancel each other out must be combined into one.
4479 while let Some((next_range, next_text)) = lsp_edits.peek() {
4480 if next_range.start > range.end {
4481 if next_range.start.row > range.end.row + 1
4482 || next_range.start.column > 0
4483 || snapshot.clip_point_utf16(
4484 PointUtf16::new(range.end.row, u32::MAX),
4485 Bias::Left,
4486 ) > range.end
4487 {
4488 break;
4489 }
4490 new_text.push('\n');
4491 }
4492 range.end = next_range.end;
4493 new_text.push_str(&next_text);
4494 lsp_edits.next();
4495 }
4496
4497 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4498 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4499 {
4500 return Err(anyhow!("invalid edits received from language server"));
4501 }
4502
4503 // For multiline edits, perform a diff of the old and new text so that
4504 // we can identify the changes more precisely, preserving the locations
4505 // of any anchors positioned in the unchanged regions.
4506 if range.end.row > range.start.row {
4507 let mut offset = range.start.to_offset(&snapshot);
4508 let old_text = snapshot.text_for_range(range).collect::<String>();
4509
4510 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4511 let mut moved_since_edit = true;
4512 for change in diff.iter_all_changes() {
4513 let tag = change.tag();
4514 let value = change.value();
4515 match tag {
4516 ChangeTag::Equal => {
4517 offset += value.len();
4518 moved_since_edit = true;
4519 }
4520 ChangeTag::Delete => {
4521 let start = snapshot.anchor_after(offset);
4522 let end = snapshot.anchor_before(offset + value.len());
4523 if moved_since_edit {
4524 edits.push((start..end, String::new()));
4525 } else {
4526 edits.last_mut().unwrap().0.end = end;
4527 }
4528 offset += value.len();
4529 moved_since_edit = false;
4530 }
4531 ChangeTag::Insert => {
4532 if moved_since_edit {
4533 let anchor = snapshot.anchor_after(offset);
4534 edits.push((anchor.clone()..anchor, value.to_string()));
4535 } else {
4536 edits.last_mut().unwrap().1.push_str(value);
4537 }
4538 moved_since_edit = false;
4539 }
4540 }
4541 }
4542 } else if range.end == range.start {
4543 let anchor = snapshot.anchor_after(range.start);
4544 edits.push((anchor.clone()..anchor, new_text));
4545 } else {
4546 let edit_start = snapshot.anchor_after(range.start);
4547 let edit_end = snapshot.anchor_before(range.end);
4548 edits.push((edit_start..edit_end, new_text));
4549 }
4550 }
4551
4552 Ok(edits)
4553 })
4554 }
4555
4556 fn buffer_snapshot_for_lsp_version(
4557 &mut self,
4558 buffer: &ModelHandle<Buffer>,
4559 version: Option<i32>,
4560 cx: &AppContext,
4561 ) -> Result<TextBufferSnapshot> {
4562 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4563
4564 if let Some(version) = version {
4565 let buffer_id = buffer.read(cx).remote_id();
4566 let snapshots = self
4567 .buffer_snapshots
4568 .get_mut(&buffer_id)
4569 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4570 let mut found_snapshot = None;
4571 snapshots.retain(|(snapshot_version, snapshot)| {
4572 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4573 false
4574 } else {
4575 if *snapshot_version == version {
4576 found_snapshot = Some(snapshot.clone());
4577 }
4578 true
4579 }
4580 });
4581
4582 found_snapshot.ok_or_else(|| {
4583 anyhow!(
4584 "snapshot not found for buffer {} at version {}",
4585 buffer_id,
4586 version
4587 )
4588 })
4589 } else {
4590 Ok((buffer.read(cx)).text_snapshot())
4591 }
4592 }
4593
4594 fn language_server_for_buffer(
4595 &self,
4596 buffer: &Buffer,
4597 cx: &AppContext,
4598 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4599 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4600 let worktree_id = file.worktree_id(cx);
4601 self.language_servers
4602 .get(&(worktree_id, language.lsp_adapter()?.name()))
4603 } else {
4604 None
4605 }
4606 }
4607}
4608
4609impl WorktreeHandle {
4610 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4611 match self {
4612 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4613 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4614 }
4615 }
4616}
4617
4618impl OpenBuffer {
4619 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4620 match self {
4621 OpenBuffer::Strong(handle) => Some(handle.clone()),
4622 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4623 OpenBuffer::Loading(_) => None,
4624 }
4625 }
4626}
4627
4628struct CandidateSet {
4629 snapshot: Snapshot,
4630 include_ignored: bool,
4631 include_root_name: bool,
4632}
4633
4634impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4635 type Candidates = CandidateSetIter<'a>;
4636
4637 fn id(&self) -> usize {
4638 self.snapshot.id().to_usize()
4639 }
4640
4641 fn len(&self) -> usize {
4642 if self.include_ignored {
4643 self.snapshot.file_count()
4644 } else {
4645 self.snapshot.visible_file_count()
4646 }
4647 }
4648
4649 fn prefix(&self) -> Arc<str> {
4650 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4651 self.snapshot.root_name().into()
4652 } else if self.include_root_name {
4653 format!("{}/", self.snapshot.root_name()).into()
4654 } else {
4655 "".into()
4656 }
4657 }
4658
4659 fn candidates(&'a self, start: usize) -> Self::Candidates {
4660 CandidateSetIter {
4661 traversal: self.snapshot.files(self.include_ignored, start),
4662 }
4663 }
4664}
4665
4666struct CandidateSetIter<'a> {
4667 traversal: Traversal<'a>,
4668}
4669
4670impl<'a> Iterator for CandidateSetIter<'a> {
4671 type Item = PathMatchCandidate<'a>;
4672
4673 fn next(&mut self) -> Option<Self::Item> {
4674 self.traversal.next().map(|entry| {
4675 if let EntryKind::File(char_bag) = entry.kind {
4676 PathMatchCandidate {
4677 path: &entry.path,
4678 char_bag,
4679 }
4680 } else {
4681 unreachable!()
4682 }
4683 })
4684 }
4685}
4686
4687impl Entity for Project {
4688 type Event = Event;
4689
4690 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4691 match &self.client_state {
4692 ProjectClientState::Local { remote_id_rx, .. } => {
4693 if let Some(project_id) = *remote_id_rx.borrow() {
4694 self.client
4695 .send(proto::UnregisterProject { project_id })
4696 .log_err();
4697 }
4698 }
4699 ProjectClientState::Remote { remote_id, .. } => {
4700 self.client
4701 .send(proto::LeaveProject {
4702 project_id: *remote_id,
4703 })
4704 .log_err();
4705 }
4706 }
4707 }
4708
4709 fn app_will_quit(
4710 &mut self,
4711 _: &mut MutableAppContext,
4712 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4713 let shutdown_futures = self
4714 .language_servers
4715 .drain()
4716 .filter_map(|(_, (_, server))| server.shutdown())
4717 .collect::<Vec<_>>();
4718 Some(
4719 async move {
4720 futures::future::join_all(shutdown_futures).await;
4721 }
4722 .boxed(),
4723 )
4724 }
4725}
4726
4727impl Collaborator {
4728 fn from_proto(
4729 message: proto::Collaborator,
4730 user_store: &ModelHandle<UserStore>,
4731 cx: &mut AsyncAppContext,
4732 ) -> impl Future<Output = Result<Self>> {
4733 let user = user_store.update(cx, |user_store, cx| {
4734 user_store.fetch_user(message.user_id, cx)
4735 });
4736
4737 async move {
4738 Ok(Self {
4739 peer_id: PeerId(message.peer_id),
4740 user: user.await?,
4741 replica_id: message.replica_id as ReplicaId,
4742 })
4743 }
4744 }
4745}
4746
4747impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4748 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4749 Self {
4750 worktree_id,
4751 path: path.as_ref().into(),
4752 }
4753 }
4754}
4755
4756impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4757 fn from(options: lsp::CreateFileOptions) -> Self {
4758 Self {
4759 overwrite: options.overwrite.unwrap_or(false),
4760 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4761 }
4762 }
4763}
4764
4765impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4766 fn from(options: lsp::RenameFileOptions) -> Self {
4767 Self {
4768 overwrite: options.overwrite.unwrap_or(false),
4769 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4770 }
4771 }
4772}
4773
4774impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4775 fn from(options: lsp::DeleteFileOptions) -> Self {
4776 Self {
4777 recursive: options.recursive.unwrap_or(false),
4778 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4779 }
4780 }
4781}
4782
4783fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4784 proto::Symbol {
4785 source_worktree_id: symbol.source_worktree_id.to_proto(),
4786 worktree_id: symbol.worktree_id.to_proto(),
4787 language_server_name: symbol.language_server_name.0.to_string(),
4788 name: symbol.name.clone(),
4789 kind: unsafe { mem::transmute(symbol.kind) },
4790 path: symbol.path.to_string_lossy().to_string(),
4791 start: Some(proto::Point {
4792 row: symbol.range.start.row,
4793 column: symbol.range.start.column,
4794 }),
4795 end: Some(proto::Point {
4796 row: symbol.range.end.row,
4797 column: symbol.range.end.column,
4798 }),
4799 signature: symbol.signature.to_vec(),
4800 }
4801}
4802
4803fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4804 let mut path_components = path.components();
4805 let mut base_components = base.components();
4806 let mut components: Vec<Component> = Vec::new();
4807 loop {
4808 match (path_components.next(), base_components.next()) {
4809 (None, None) => break,
4810 (Some(a), None) => {
4811 components.push(a);
4812 components.extend(path_components.by_ref());
4813 break;
4814 }
4815 (None, _) => components.push(Component::ParentDir),
4816 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4817 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4818 (Some(a), Some(_)) => {
4819 components.push(Component::ParentDir);
4820 for _ in base_components {
4821 components.push(Component::ParentDir);
4822 }
4823 components.push(a);
4824 components.extend(path_components.by_ref());
4825 break;
4826 }
4827 }
4828 }
4829 components.iter().map(|c| c.as_os_str()).collect()
4830}
4831
4832impl Item for Buffer {
4833 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4834 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4835 }
4836}
4837
4838#[cfg(test)]
4839mod tests {
4840 use super::{Event, *};
4841 use fs::RealFs;
4842 use futures::{future, StreamExt};
4843 use gpui::test::subscribe;
4844 use language::{
4845 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4846 ToPoint,
4847 };
4848 use lsp::Url;
4849 use serde_json::json;
4850 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4851 use unindent::Unindent as _;
4852 use util::{assert_set_eq, test::temp_tree};
4853 use worktree::WorktreeHandle as _;
4854
4855 #[gpui::test]
4856 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4857 let dir = temp_tree(json!({
4858 "root": {
4859 "apple": "",
4860 "banana": {
4861 "carrot": {
4862 "date": "",
4863 "endive": "",
4864 }
4865 },
4866 "fennel": {
4867 "grape": "",
4868 }
4869 }
4870 }));
4871
4872 let root_link_path = dir.path().join("root_link");
4873 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4874 unix::fs::symlink(
4875 &dir.path().join("root/fennel"),
4876 &dir.path().join("root/finnochio"),
4877 )
4878 .unwrap();
4879
4880 let project = Project::test(Arc::new(RealFs), cx);
4881
4882 let (tree, _) = project
4883 .update(cx, |project, cx| {
4884 project.find_or_create_local_worktree(&root_link_path, true, cx)
4885 })
4886 .await
4887 .unwrap();
4888
4889 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4890 .await;
4891 cx.read(|cx| {
4892 let tree = tree.read(cx);
4893 assert_eq!(tree.file_count(), 5);
4894 assert_eq!(
4895 tree.inode_for_path("fennel/grape"),
4896 tree.inode_for_path("finnochio/grape")
4897 );
4898 });
4899
4900 let cancel_flag = Default::default();
4901 let results = project
4902 .read_with(cx, |project, cx| {
4903 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4904 })
4905 .await;
4906 assert_eq!(
4907 results
4908 .into_iter()
4909 .map(|result| result.path)
4910 .collect::<Vec<Arc<Path>>>(),
4911 vec![
4912 PathBuf::from("banana/carrot/date").into(),
4913 PathBuf::from("banana/carrot/endive").into(),
4914 ]
4915 );
4916 }
4917
4918 #[gpui::test]
4919 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4920 cx.foreground().forbid_parking();
4921
4922 let mut rust_language = Language::new(
4923 LanguageConfig {
4924 name: "Rust".into(),
4925 path_suffixes: vec!["rs".to_string()],
4926 ..Default::default()
4927 },
4928 Some(tree_sitter_rust::language()),
4929 );
4930 let mut json_language = Language::new(
4931 LanguageConfig {
4932 name: "JSON".into(),
4933 path_suffixes: vec!["json".to_string()],
4934 ..Default::default()
4935 },
4936 None,
4937 );
4938 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4939 name: "the-rust-language-server",
4940 capabilities: lsp::ServerCapabilities {
4941 completion_provider: Some(lsp::CompletionOptions {
4942 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4943 ..Default::default()
4944 }),
4945 ..Default::default()
4946 },
4947 ..Default::default()
4948 });
4949 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4950 name: "the-json-language-server",
4951 capabilities: lsp::ServerCapabilities {
4952 completion_provider: Some(lsp::CompletionOptions {
4953 trigger_characters: Some(vec![":".to_string()]),
4954 ..Default::default()
4955 }),
4956 ..Default::default()
4957 },
4958 ..Default::default()
4959 });
4960
4961 let fs = FakeFs::new(cx.background());
4962 fs.insert_tree(
4963 "/the-root",
4964 json!({
4965 "test.rs": "const A: i32 = 1;",
4966 "test2.rs": "",
4967 "Cargo.toml": "a = 1",
4968 "package.json": "{\"a\": 1}",
4969 }),
4970 )
4971 .await;
4972
4973 let project = Project::test(fs, cx);
4974 project.update(cx, |project, _| {
4975 project.languages.add(Arc::new(rust_language));
4976 project.languages.add(Arc::new(json_language));
4977 });
4978
4979 let worktree_id = project
4980 .update(cx, |project, cx| {
4981 project.find_or_create_local_worktree("/the-root", true, cx)
4982 })
4983 .await
4984 .unwrap()
4985 .0
4986 .read_with(cx, |tree, _| tree.id());
4987
4988 // Open a buffer without an associated language server.
4989 let toml_buffer = project
4990 .update(cx, |project, cx| {
4991 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4992 })
4993 .await
4994 .unwrap();
4995
4996 // Open a buffer with an associated language server.
4997 let rust_buffer = project
4998 .update(cx, |project, cx| {
4999 project.open_buffer((worktree_id, "test.rs"), cx)
5000 })
5001 .await
5002 .unwrap();
5003
5004 // A server is started up, and it is notified about Rust files.
5005 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5006 assert_eq!(
5007 fake_rust_server
5008 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5009 .await
5010 .text_document,
5011 lsp::TextDocumentItem {
5012 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5013 version: 0,
5014 text: "const A: i32 = 1;".to_string(),
5015 language_id: Default::default()
5016 }
5017 );
5018
5019 // The buffer is configured based on the language server's capabilities.
5020 rust_buffer.read_with(cx, |buffer, _| {
5021 assert_eq!(
5022 buffer.completion_triggers(),
5023 &[".".to_string(), "::".to_string()]
5024 );
5025 });
5026 toml_buffer.read_with(cx, |buffer, _| {
5027 assert!(buffer.completion_triggers().is_empty());
5028 });
5029
5030 // Edit a buffer. The changes are reported to the language server.
5031 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5032 assert_eq!(
5033 fake_rust_server
5034 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5035 .await
5036 .text_document,
5037 lsp::VersionedTextDocumentIdentifier::new(
5038 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5039 1
5040 )
5041 );
5042
5043 // Open a third buffer with a different associated language server.
5044 let json_buffer = project
5045 .update(cx, |project, cx| {
5046 project.open_buffer((worktree_id, "package.json"), cx)
5047 })
5048 .await
5049 .unwrap();
5050
5051 // A json language server is started up and is only notified about the json buffer.
5052 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5053 assert_eq!(
5054 fake_json_server
5055 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5056 .await
5057 .text_document,
5058 lsp::TextDocumentItem {
5059 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5060 version: 0,
5061 text: "{\"a\": 1}".to_string(),
5062 language_id: Default::default()
5063 }
5064 );
5065
5066 // This buffer is configured based on the second language server's
5067 // capabilities.
5068 json_buffer.read_with(cx, |buffer, _| {
5069 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5070 });
5071
5072 // When opening another buffer whose language server is already running,
5073 // it is also configured based on the existing language server's capabilities.
5074 let rust_buffer2 = project
5075 .update(cx, |project, cx| {
5076 project.open_buffer((worktree_id, "test2.rs"), cx)
5077 })
5078 .await
5079 .unwrap();
5080 rust_buffer2.read_with(cx, |buffer, _| {
5081 assert_eq!(
5082 buffer.completion_triggers(),
5083 &[".".to_string(), "::".to_string()]
5084 );
5085 });
5086
5087 // Changes are reported only to servers matching the buffer's language.
5088 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5089 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5090 assert_eq!(
5091 fake_rust_server
5092 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5093 .await
5094 .text_document,
5095 lsp::VersionedTextDocumentIdentifier::new(
5096 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5097 1
5098 )
5099 );
5100
5101 // Save notifications are reported to all servers.
5102 toml_buffer
5103 .update(cx, |buffer, cx| buffer.save(cx))
5104 .await
5105 .unwrap();
5106 assert_eq!(
5107 fake_rust_server
5108 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5109 .await
5110 .text_document,
5111 lsp::TextDocumentIdentifier::new(
5112 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5113 )
5114 );
5115 assert_eq!(
5116 fake_json_server
5117 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5118 .await
5119 .text_document,
5120 lsp::TextDocumentIdentifier::new(
5121 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5122 )
5123 );
5124
5125 // Restart language servers
5126 project.update(cx, |project, cx| {
5127 project.restart_language_servers_for_buffers(
5128 vec![rust_buffer.clone(), json_buffer.clone()],
5129 cx,
5130 );
5131 });
5132
5133 let mut rust_shutdown_requests = fake_rust_server
5134 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5135 let mut json_shutdown_requests = fake_json_server
5136 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5137 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5138
5139 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5140 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5141
5142 // Ensure both rust documents are reopened in new rust language server without worrying about order
5143 assert_set_eq!(
5144 [
5145 fake_rust_server
5146 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5147 .await
5148 .text_document,
5149 fake_rust_server
5150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5151 .await
5152 .text_document,
5153 ],
5154 [
5155 lsp::TextDocumentItem {
5156 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5157 version: 1,
5158 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5159 language_id: Default::default()
5160 },
5161 lsp::TextDocumentItem {
5162 uri: lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5163 version: 1,
5164 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5165 language_id: Default::default()
5166 },
5167 ]
5168 );
5169
5170 // Ensure json document is reopened in new json language server
5171 assert_eq!(
5172 fake_json_server
5173 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5174 .await
5175 .text_document,
5176 lsp::TextDocumentItem {
5177 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5178 version: 0,
5179 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5180 language_id: Default::default()
5181 }
5182 );
5183
5184 // Close notifications are reported only to servers matching the buffer's language.
5185 cx.update(|_| drop(json_buffer));
5186 let close_message = lsp::DidCloseTextDocumentParams {
5187 text_document: lsp::TextDocumentIdentifier::new(
5188 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5189 ),
5190 };
5191 assert_eq!(
5192 fake_json_server
5193 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5194 .await,
5195 close_message,
5196 );
5197 }
5198
5199 #[gpui::test]
5200 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5201 cx.foreground().forbid_parking();
5202
5203 let progress_token = "the-progress-token";
5204 let mut language = Language::new(
5205 LanguageConfig {
5206 name: "Rust".into(),
5207 path_suffixes: vec!["rs".to_string()],
5208 ..Default::default()
5209 },
5210 Some(tree_sitter_rust::language()),
5211 );
5212 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5213 disk_based_diagnostics_progress_token: Some(progress_token),
5214 disk_based_diagnostics_sources: &["disk"],
5215 ..Default::default()
5216 });
5217
5218 let fs = FakeFs::new(cx.background());
5219 fs.insert_tree(
5220 "/dir",
5221 json!({
5222 "a.rs": "fn a() { A }",
5223 "b.rs": "const y: i32 = 1",
5224 }),
5225 )
5226 .await;
5227
5228 let project = Project::test(fs, cx);
5229 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5230
5231 let (tree, _) = project
5232 .update(cx, |project, cx| {
5233 project.find_or_create_local_worktree("/dir", true, cx)
5234 })
5235 .await
5236 .unwrap();
5237 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5238
5239 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5240 .await;
5241
5242 // Cause worktree to start the fake language server
5243 let _buffer = project
5244 .update(cx, |project, cx| {
5245 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5246 })
5247 .await
5248 .unwrap();
5249
5250 let mut events = subscribe(&project, cx);
5251
5252 let mut fake_server = fake_servers.next().await.unwrap();
5253 fake_server.start_progress(progress_token).await;
5254 assert_eq!(
5255 events.next().await.unwrap(),
5256 Event::DiskBasedDiagnosticsStarted
5257 );
5258
5259 fake_server.start_progress(progress_token).await;
5260 fake_server.end_progress(progress_token).await;
5261 fake_server.start_progress(progress_token).await;
5262
5263 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5264 lsp::PublishDiagnosticsParams {
5265 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5266 version: None,
5267 diagnostics: vec![lsp::Diagnostic {
5268 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5269 severity: Some(lsp::DiagnosticSeverity::ERROR),
5270 message: "undefined variable 'A'".to_string(),
5271 ..Default::default()
5272 }],
5273 },
5274 );
5275 assert_eq!(
5276 events.next().await.unwrap(),
5277 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5278 );
5279
5280 fake_server.end_progress(progress_token).await;
5281 fake_server.end_progress(progress_token).await;
5282 assert_eq!(
5283 events.next().await.unwrap(),
5284 Event::DiskBasedDiagnosticsUpdated
5285 );
5286 assert_eq!(
5287 events.next().await.unwrap(),
5288 Event::DiskBasedDiagnosticsFinished
5289 );
5290
5291 let buffer = project
5292 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5293 .await
5294 .unwrap();
5295
5296 buffer.read_with(cx, |buffer, _| {
5297 let snapshot = buffer.snapshot();
5298 let diagnostics = snapshot
5299 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5300 .collect::<Vec<_>>();
5301 assert_eq!(
5302 diagnostics,
5303 &[DiagnosticEntry {
5304 range: Point::new(0, 9)..Point::new(0, 10),
5305 diagnostic: Diagnostic {
5306 severity: lsp::DiagnosticSeverity::ERROR,
5307 message: "undefined variable 'A'".to_string(),
5308 group_id: 0,
5309 is_primary: true,
5310 ..Default::default()
5311 }
5312 }]
5313 )
5314 });
5315 }
5316
5317 #[gpui::test]
5318 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5319 cx.foreground().forbid_parking();
5320
5321 let mut language = Language::new(
5322 LanguageConfig {
5323 name: "Rust".into(),
5324 path_suffixes: vec!["rs".to_string()],
5325 ..Default::default()
5326 },
5327 Some(tree_sitter_rust::language()),
5328 );
5329 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5330 disk_based_diagnostics_sources: &["disk"],
5331 ..Default::default()
5332 });
5333
5334 let text = "
5335 fn a() { A }
5336 fn b() { BB }
5337 fn c() { CCC }
5338 "
5339 .unindent();
5340
5341 let fs = FakeFs::new(cx.background());
5342 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5343
5344 let project = Project::test(fs, cx);
5345 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5346
5347 let worktree_id = project
5348 .update(cx, |project, cx| {
5349 project.find_or_create_local_worktree("/dir", true, cx)
5350 })
5351 .await
5352 .unwrap()
5353 .0
5354 .read_with(cx, |tree, _| tree.id());
5355
5356 let buffer = project
5357 .update(cx, |project, cx| {
5358 project.open_buffer((worktree_id, "a.rs"), cx)
5359 })
5360 .await
5361 .unwrap();
5362
5363 let mut fake_server = fake_servers.next().await.unwrap();
5364 let open_notification = fake_server
5365 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5366 .await;
5367
5368 // Edit the buffer, moving the content down
5369 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5370 let change_notification_1 = fake_server
5371 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5372 .await;
5373 assert!(
5374 change_notification_1.text_document.version > open_notification.text_document.version
5375 );
5376
5377 // Report some diagnostics for the initial version of the buffer
5378 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5379 lsp::PublishDiagnosticsParams {
5380 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5381 version: Some(open_notification.text_document.version),
5382 diagnostics: vec![
5383 lsp::Diagnostic {
5384 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5385 severity: Some(DiagnosticSeverity::ERROR),
5386 message: "undefined variable 'A'".to_string(),
5387 source: Some("disk".to_string()),
5388 ..Default::default()
5389 },
5390 lsp::Diagnostic {
5391 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5392 severity: Some(DiagnosticSeverity::ERROR),
5393 message: "undefined variable 'BB'".to_string(),
5394 source: Some("disk".to_string()),
5395 ..Default::default()
5396 },
5397 lsp::Diagnostic {
5398 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5399 severity: Some(DiagnosticSeverity::ERROR),
5400 source: Some("disk".to_string()),
5401 message: "undefined variable 'CCC'".to_string(),
5402 ..Default::default()
5403 },
5404 ],
5405 },
5406 );
5407
5408 // The diagnostics have moved down since they were created.
5409 buffer.next_notification(cx).await;
5410 buffer.read_with(cx, |buffer, _| {
5411 assert_eq!(
5412 buffer
5413 .snapshot()
5414 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5415 .collect::<Vec<_>>(),
5416 &[
5417 DiagnosticEntry {
5418 range: Point::new(3, 9)..Point::new(3, 11),
5419 diagnostic: Diagnostic {
5420 severity: DiagnosticSeverity::ERROR,
5421 message: "undefined variable 'BB'".to_string(),
5422 is_disk_based: true,
5423 group_id: 1,
5424 is_primary: true,
5425 ..Default::default()
5426 },
5427 },
5428 DiagnosticEntry {
5429 range: Point::new(4, 9)..Point::new(4, 12),
5430 diagnostic: Diagnostic {
5431 severity: DiagnosticSeverity::ERROR,
5432 message: "undefined variable 'CCC'".to_string(),
5433 is_disk_based: true,
5434 group_id: 2,
5435 is_primary: true,
5436 ..Default::default()
5437 }
5438 }
5439 ]
5440 );
5441 assert_eq!(
5442 chunks_with_diagnostics(buffer, 0..buffer.len()),
5443 [
5444 ("\n\nfn a() { ".to_string(), None),
5445 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5446 (" }\nfn b() { ".to_string(), None),
5447 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5448 (" }\nfn c() { ".to_string(), None),
5449 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5450 (" }\n".to_string(), None),
5451 ]
5452 );
5453 assert_eq!(
5454 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5455 [
5456 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5457 (" }\nfn c() { ".to_string(), None),
5458 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5459 ]
5460 );
5461 });
5462
5463 // Ensure overlapping diagnostics are highlighted correctly.
5464 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5465 lsp::PublishDiagnosticsParams {
5466 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5467 version: Some(open_notification.text_document.version),
5468 diagnostics: vec![
5469 lsp::Diagnostic {
5470 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5471 severity: Some(DiagnosticSeverity::ERROR),
5472 message: "undefined variable 'A'".to_string(),
5473 source: Some("disk".to_string()),
5474 ..Default::default()
5475 },
5476 lsp::Diagnostic {
5477 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5478 severity: Some(DiagnosticSeverity::WARNING),
5479 message: "unreachable statement".to_string(),
5480 source: Some("disk".to_string()),
5481 ..Default::default()
5482 },
5483 ],
5484 },
5485 );
5486
5487 buffer.next_notification(cx).await;
5488 buffer.read_with(cx, |buffer, _| {
5489 assert_eq!(
5490 buffer
5491 .snapshot()
5492 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5493 .collect::<Vec<_>>(),
5494 &[
5495 DiagnosticEntry {
5496 range: Point::new(2, 9)..Point::new(2, 12),
5497 diagnostic: Diagnostic {
5498 severity: DiagnosticSeverity::WARNING,
5499 message: "unreachable statement".to_string(),
5500 is_disk_based: true,
5501 group_id: 1,
5502 is_primary: true,
5503 ..Default::default()
5504 }
5505 },
5506 DiagnosticEntry {
5507 range: Point::new(2, 9)..Point::new(2, 10),
5508 diagnostic: Diagnostic {
5509 severity: DiagnosticSeverity::ERROR,
5510 message: "undefined variable 'A'".to_string(),
5511 is_disk_based: true,
5512 group_id: 0,
5513 is_primary: true,
5514 ..Default::default()
5515 },
5516 }
5517 ]
5518 );
5519 assert_eq!(
5520 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5521 [
5522 ("fn a() { ".to_string(), None),
5523 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5524 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5525 ("\n".to_string(), None),
5526 ]
5527 );
5528 assert_eq!(
5529 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5530 [
5531 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5532 ("\n".to_string(), None),
5533 ]
5534 );
5535 });
5536
5537 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5538 // changes since the last save.
5539 buffer.update(cx, |buffer, cx| {
5540 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5541 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5542 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5543 });
5544 let change_notification_2 = fake_server
5545 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5546 .await;
5547 assert!(
5548 change_notification_2.text_document.version
5549 > change_notification_1.text_document.version
5550 );
5551
5552 // Handle out-of-order diagnostics
5553 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5554 lsp::PublishDiagnosticsParams {
5555 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5556 version: Some(change_notification_2.text_document.version),
5557 diagnostics: vec![
5558 lsp::Diagnostic {
5559 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5560 severity: Some(DiagnosticSeverity::ERROR),
5561 message: "undefined variable 'BB'".to_string(),
5562 source: Some("disk".to_string()),
5563 ..Default::default()
5564 },
5565 lsp::Diagnostic {
5566 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5567 severity: Some(DiagnosticSeverity::WARNING),
5568 message: "undefined variable 'A'".to_string(),
5569 source: Some("disk".to_string()),
5570 ..Default::default()
5571 },
5572 ],
5573 },
5574 );
5575
5576 buffer.next_notification(cx).await;
5577 buffer.read_with(cx, |buffer, _| {
5578 assert_eq!(
5579 buffer
5580 .snapshot()
5581 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5582 .collect::<Vec<_>>(),
5583 &[
5584 DiagnosticEntry {
5585 range: Point::new(2, 21)..Point::new(2, 22),
5586 diagnostic: Diagnostic {
5587 severity: DiagnosticSeverity::WARNING,
5588 message: "undefined variable 'A'".to_string(),
5589 is_disk_based: true,
5590 group_id: 1,
5591 is_primary: true,
5592 ..Default::default()
5593 }
5594 },
5595 DiagnosticEntry {
5596 range: Point::new(3, 9)..Point::new(3, 14),
5597 diagnostic: Diagnostic {
5598 severity: DiagnosticSeverity::ERROR,
5599 message: "undefined variable 'BB'".to_string(),
5600 is_disk_based: true,
5601 group_id: 0,
5602 is_primary: true,
5603 ..Default::default()
5604 },
5605 }
5606 ]
5607 );
5608 });
5609 }
5610
5611 #[gpui::test]
5612 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5613 cx.foreground().forbid_parking();
5614
5615 let text = concat!(
5616 "let one = ;\n", //
5617 "let two = \n",
5618 "let three = 3;\n",
5619 );
5620
5621 let fs = FakeFs::new(cx.background());
5622 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5623
5624 let project = Project::test(fs, cx);
5625 let worktree_id = project
5626 .update(cx, |project, cx| {
5627 project.find_or_create_local_worktree("/dir", true, cx)
5628 })
5629 .await
5630 .unwrap()
5631 .0
5632 .read_with(cx, |tree, _| tree.id());
5633
5634 let buffer = project
5635 .update(cx, |project, cx| {
5636 project.open_buffer((worktree_id, "a.rs"), cx)
5637 })
5638 .await
5639 .unwrap();
5640
5641 project.update(cx, |project, cx| {
5642 project
5643 .update_buffer_diagnostics(
5644 &buffer,
5645 vec![
5646 DiagnosticEntry {
5647 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5648 diagnostic: Diagnostic {
5649 severity: DiagnosticSeverity::ERROR,
5650 message: "syntax error 1".to_string(),
5651 ..Default::default()
5652 },
5653 },
5654 DiagnosticEntry {
5655 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5656 diagnostic: Diagnostic {
5657 severity: DiagnosticSeverity::ERROR,
5658 message: "syntax error 2".to_string(),
5659 ..Default::default()
5660 },
5661 },
5662 ],
5663 None,
5664 cx,
5665 )
5666 .unwrap();
5667 });
5668
5669 // An empty range is extended forward to include the following character.
5670 // At the end of a line, an empty range is extended backward to include
5671 // the preceding character.
5672 buffer.read_with(cx, |buffer, _| {
5673 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5674 assert_eq!(
5675 chunks
5676 .iter()
5677 .map(|(s, d)| (s.as_str(), *d))
5678 .collect::<Vec<_>>(),
5679 &[
5680 ("let one = ", None),
5681 (";", Some(DiagnosticSeverity::ERROR)),
5682 ("\nlet two =", None),
5683 (" ", Some(DiagnosticSeverity::ERROR)),
5684 ("\nlet three = 3;\n", None)
5685 ]
5686 );
5687 });
5688 }
5689
5690 #[gpui::test]
5691 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5692 cx.foreground().forbid_parking();
5693
5694 let mut language = Language::new(
5695 LanguageConfig {
5696 name: "Rust".into(),
5697 path_suffixes: vec!["rs".to_string()],
5698 ..Default::default()
5699 },
5700 Some(tree_sitter_rust::language()),
5701 );
5702 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5703
5704 let text = "
5705 fn a() {
5706 f1();
5707 }
5708 fn b() {
5709 f2();
5710 }
5711 fn c() {
5712 f3();
5713 }
5714 "
5715 .unindent();
5716
5717 let fs = FakeFs::new(cx.background());
5718 fs.insert_tree(
5719 "/dir",
5720 json!({
5721 "a.rs": text.clone(),
5722 }),
5723 )
5724 .await;
5725
5726 let project = Project::test(fs, cx);
5727 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5728
5729 let worktree_id = project
5730 .update(cx, |project, cx| {
5731 project.find_or_create_local_worktree("/dir", true, cx)
5732 })
5733 .await
5734 .unwrap()
5735 .0
5736 .read_with(cx, |tree, _| tree.id());
5737
5738 let buffer = project
5739 .update(cx, |project, cx| {
5740 project.open_buffer((worktree_id, "a.rs"), cx)
5741 })
5742 .await
5743 .unwrap();
5744
5745 let mut fake_server = fake_servers.next().await.unwrap();
5746 let lsp_document_version = fake_server
5747 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5748 .await
5749 .text_document
5750 .version;
5751
5752 // Simulate editing the buffer after the language server computes some edits.
5753 buffer.update(cx, |buffer, cx| {
5754 buffer.edit(
5755 [Point::new(0, 0)..Point::new(0, 0)],
5756 "// above first function\n",
5757 cx,
5758 );
5759 buffer.edit(
5760 [Point::new(2, 0)..Point::new(2, 0)],
5761 " // inside first function\n",
5762 cx,
5763 );
5764 buffer.edit(
5765 [Point::new(6, 4)..Point::new(6, 4)],
5766 "// inside second function ",
5767 cx,
5768 );
5769
5770 assert_eq!(
5771 buffer.text(),
5772 "
5773 // above first function
5774 fn a() {
5775 // inside first function
5776 f1();
5777 }
5778 fn b() {
5779 // inside second function f2();
5780 }
5781 fn c() {
5782 f3();
5783 }
5784 "
5785 .unindent()
5786 );
5787 });
5788
5789 let edits = project
5790 .update(cx, |project, cx| {
5791 project.edits_from_lsp(
5792 &buffer,
5793 vec![
5794 // replace body of first function
5795 lsp::TextEdit {
5796 range: lsp::Range::new(
5797 lsp::Position::new(0, 0),
5798 lsp::Position::new(3, 0),
5799 ),
5800 new_text: "
5801 fn a() {
5802 f10();
5803 }
5804 "
5805 .unindent(),
5806 },
5807 // edit inside second function
5808 lsp::TextEdit {
5809 range: lsp::Range::new(
5810 lsp::Position::new(4, 6),
5811 lsp::Position::new(4, 6),
5812 ),
5813 new_text: "00".into(),
5814 },
5815 // edit inside third function via two distinct edits
5816 lsp::TextEdit {
5817 range: lsp::Range::new(
5818 lsp::Position::new(7, 5),
5819 lsp::Position::new(7, 5),
5820 ),
5821 new_text: "4000".into(),
5822 },
5823 lsp::TextEdit {
5824 range: lsp::Range::new(
5825 lsp::Position::new(7, 5),
5826 lsp::Position::new(7, 6),
5827 ),
5828 new_text: "".into(),
5829 },
5830 ],
5831 Some(lsp_document_version),
5832 cx,
5833 )
5834 })
5835 .await
5836 .unwrap();
5837
5838 buffer.update(cx, |buffer, cx| {
5839 for (range, new_text) in edits {
5840 buffer.edit([range], new_text, cx);
5841 }
5842 assert_eq!(
5843 buffer.text(),
5844 "
5845 // above first function
5846 fn a() {
5847 // inside first function
5848 f10();
5849 }
5850 fn b() {
5851 // inside second function f200();
5852 }
5853 fn c() {
5854 f4000();
5855 }
5856 "
5857 .unindent()
5858 );
5859 });
5860 }
5861
5862 #[gpui::test]
5863 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5864 cx.foreground().forbid_parking();
5865
5866 let text = "
5867 use a::b;
5868 use a::c;
5869
5870 fn f() {
5871 b();
5872 c();
5873 }
5874 "
5875 .unindent();
5876
5877 let fs = FakeFs::new(cx.background());
5878 fs.insert_tree(
5879 "/dir",
5880 json!({
5881 "a.rs": text.clone(),
5882 }),
5883 )
5884 .await;
5885
5886 let project = Project::test(fs, cx);
5887 let worktree_id = project
5888 .update(cx, |project, cx| {
5889 project.find_or_create_local_worktree("/dir", true, cx)
5890 })
5891 .await
5892 .unwrap()
5893 .0
5894 .read_with(cx, |tree, _| tree.id());
5895
5896 let buffer = project
5897 .update(cx, |project, cx| {
5898 project.open_buffer((worktree_id, "a.rs"), cx)
5899 })
5900 .await
5901 .unwrap();
5902
5903 // Simulate the language server sending us a small edit in the form of a very large diff.
5904 // Rust-analyzer does this when performing a merge-imports code action.
5905 let edits = project
5906 .update(cx, |project, cx| {
5907 project.edits_from_lsp(
5908 &buffer,
5909 [
5910 // Replace the first use statement without editing the semicolon.
5911 lsp::TextEdit {
5912 range: lsp::Range::new(
5913 lsp::Position::new(0, 4),
5914 lsp::Position::new(0, 8),
5915 ),
5916 new_text: "a::{b, c}".into(),
5917 },
5918 // Reinsert the remainder of the file between the semicolon and the final
5919 // newline of the file.
5920 lsp::TextEdit {
5921 range: lsp::Range::new(
5922 lsp::Position::new(0, 9),
5923 lsp::Position::new(0, 9),
5924 ),
5925 new_text: "\n\n".into(),
5926 },
5927 lsp::TextEdit {
5928 range: lsp::Range::new(
5929 lsp::Position::new(0, 9),
5930 lsp::Position::new(0, 9),
5931 ),
5932 new_text: "
5933 fn f() {
5934 b();
5935 c();
5936 }"
5937 .unindent(),
5938 },
5939 // Delete everything after the first newline of the file.
5940 lsp::TextEdit {
5941 range: lsp::Range::new(
5942 lsp::Position::new(1, 0),
5943 lsp::Position::new(7, 0),
5944 ),
5945 new_text: "".into(),
5946 },
5947 ],
5948 None,
5949 cx,
5950 )
5951 })
5952 .await
5953 .unwrap();
5954
5955 buffer.update(cx, |buffer, cx| {
5956 let edits = edits
5957 .into_iter()
5958 .map(|(range, text)| {
5959 (
5960 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5961 text,
5962 )
5963 })
5964 .collect::<Vec<_>>();
5965
5966 assert_eq!(
5967 edits,
5968 [
5969 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5970 (Point::new(1, 0)..Point::new(2, 0), "".into())
5971 ]
5972 );
5973
5974 for (range, new_text) in edits {
5975 buffer.edit([range], new_text, cx);
5976 }
5977 assert_eq!(
5978 buffer.text(),
5979 "
5980 use a::{b, c};
5981
5982 fn f() {
5983 b();
5984 c();
5985 }
5986 "
5987 .unindent()
5988 );
5989 });
5990 }
5991
5992 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5993 buffer: &Buffer,
5994 range: Range<T>,
5995 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5996 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5997 for chunk in buffer.snapshot().chunks(range, true) {
5998 if chunks.last().map_or(false, |prev_chunk| {
5999 prev_chunk.1 == chunk.diagnostic_severity
6000 }) {
6001 chunks.last_mut().unwrap().0.push_str(chunk.text);
6002 } else {
6003 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6004 }
6005 }
6006 chunks
6007 }
6008
6009 #[gpui::test]
6010 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6011 let dir = temp_tree(json!({
6012 "root": {
6013 "dir1": {},
6014 "dir2": {
6015 "dir3": {}
6016 }
6017 }
6018 }));
6019
6020 let project = Project::test(Arc::new(RealFs), cx);
6021 let (tree, _) = project
6022 .update(cx, |project, cx| {
6023 project.find_or_create_local_worktree(&dir.path(), true, cx)
6024 })
6025 .await
6026 .unwrap();
6027
6028 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6029 .await;
6030
6031 let cancel_flag = Default::default();
6032 let results = project
6033 .read_with(cx, |project, cx| {
6034 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6035 })
6036 .await;
6037
6038 assert!(results.is_empty());
6039 }
6040
6041 #[gpui::test]
6042 async fn test_definition(cx: &mut gpui::TestAppContext) {
6043 let mut language = Language::new(
6044 LanguageConfig {
6045 name: "Rust".into(),
6046 path_suffixes: vec!["rs".to_string()],
6047 ..Default::default()
6048 },
6049 Some(tree_sitter_rust::language()),
6050 );
6051 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6052
6053 let fs = FakeFs::new(cx.background());
6054 fs.insert_tree(
6055 "/dir",
6056 json!({
6057 "a.rs": "const fn a() { A }",
6058 "b.rs": "const y: i32 = crate::a()",
6059 }),
6060 )
6061 .await;
6062
6063 let project = Project::test(fs, cx);
6064 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6065
6066 let (tree, _) = project
6067 .update(cx, |project, cx| {
6068 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6069 })
6070 .await
6071 .unwrap();
6072 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6073 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6074 .await;
6075
6076 let buffer = project
6077 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6078 .await
6079 .unwrap();
6080
6081 let fake_server = fake_servers.next().await.unwrap();
6082 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6083 let params = params.text_document_position_params;
6084 assert_eq!(
6085 params.text_document.uri.to_file_path().unwrap(),
6086 Path::new("/dir/b.rs"),
6087 );
6088 assert_eq!(params.position, lsp::Position::new(0, 22));
6089
6090 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6091 lsp::Location::new(
6092 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6093 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6094 ),
6095 )))
6096 });
6097
6098 let mut definitions = project
6099 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6100 .await
6101 .unwrap();
6102
6103 assert_eq!(definitions.len(), 1);
6104 let definition = definitions.pop().unwrap();
6105 cx.update(|cx| {
6106 let target_buffer = definition.buffer.read(cx);
6107 assert_eq!(
6108 target_buffer
6109 .file()
6110 .unwrap()
6111 .as_local()
6112 .unwrap()
6113 .abs_path(cx),
6114 Path::new("/dir/a.rs"),
6115 );
6116 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6117 assert_eq!(
6118 list_worktrees(&project, cx),
6119 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6120 );
6121
6122 drop(definition);
6123 });
6124 cx.read(|cx| {
6125 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6126 });
6127
6128 fn list_worktrees<'a>(
6129 project: &'a ModelHandle<Project>,
6130 cx: &'a AppContext,
6131 ) -> Vec<(&'a Path, bool)> {
6132 project
6133 .read(cx)
6134 .worktrees(cx)
6135 .map(|worktree| {
6136 let worktree = worktree.read(cx);
6137 (
6138 worktree.as_local().unwrap().abs_path().as_ref(),
6139 worktree.is_visible(),
6140 )
6141 })
6142 .collect::<Vec<_>>()
6143 }
6144 }
6145
6146 #[gpui::test(iterations = 10)]
6147 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6148 let mut language = Language::new(
6149 LanguageConfig {
6150 name: "TypeScript".into(),
6151 path_suffixes: vec!["ts".to_string()],
6152 ..Default::default()
6153 },
6154 None,
6155 );
6156 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6157
6158 let fs = FakeFs::new(cx.background());
6159 fs.insert_tree(
6160 "/dir",
6161 json!({
6162 "a.ts": "a",
6163 }),
6164 )
6165 .await;
6166
6167 let project = Project::test(fs, cx);
6168 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6169
6170 let (tree, _) = project
6171 .update(cx, |project, cx| {
6172 project.find_or_create_local_worktree("/dir", true, cx)
6173 })
6174 .await
6175 .unwrap();
6176 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6177 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6178 .await;
6179
6180 let buffer = project
6181 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6182 .await
6183 .unwrap();
6184
6185 let fake_server = fake_language_servers.next().await.unwrap();
6186
6187 // Language server returns code actions that contain commands, and not edits.
6188 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6189 fake_server
6190 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6191 Ok(Some(vec![
6192 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6193 title: "The code action".into(),
6194 command: Some(lsp::Command {
6195 title: "The command".into(),
6196 command: "_the/command".into(),
6197 arguments: Some(vec![json!("the-argument")]),
6198 }),
6199 ..Default::default()
6200 }),
6201 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6202 title: "two".into(),
6203 ..Default::default()
6204 }),
6205 ]))
6206 })
6207 .next()
6208 .await;
6209
6210 let action = actions.await.unwrap()[0].clone();
6211 let apply = project.update(cx, |project, cx| {
6212 project.apply_code_action(buffer.clone(), action, true, cx)
6213 });
6214
6215 // Resolving the code action does not populate its edits. In absence of
6216 // edits, we must execute the given command.
6217 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6218 |action, _| async move { Ok(action) },
6219 );
6220
6221 // While executing the command, the language server sends the editor
6222 // a `workspaceEdit` request.
6223 fake_server
6224 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6225 let fake = fake_server.clone();
6226 move |params, _| {
6227 assert_eq!(params.command, "_the/command");
6228 let fake = fake.clone();
6229 async move {
6230 fake.server
6231 .request::<lsp::request::ApplyWorkspaceEdit>(
6232 lsp::ApplyWorkspaceEditParams {
6233 label: None,
6234 edit: lsp::WorkspaceEdit {
6235 changes: Some(
6236 [(
6237 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6238 vec![lsp::TextEdit {
6239 range: lsp::Range::new(
6240 lsp::Position::new(0, 0),
6241 lsp::Position::new(0, 0),
6242 ),
6243 new_text: "X".into(),
6244 }],
6245 )]
6246 .into_iter()
6247 .collect(),
6248 ),
6249 ..Default::default()
6250 },
6251 },
6252 )
6253 .await
6254 .unwrap();
6255 Ok(Some(json!(null)))
6256 }
6257 }
6258 })
6259 .next()
6260 .await;
6261
6262 // Applying the code action returns a project transaction containing the edits
6263 // sent by the language server in its `workspaceEdit` request.
6264 let transaction = apply.await.unwrap();
6265 assert!(transaction.0.contains_key(&buffer));
6266 buffer.update(cx, |buffer, cx| {
6267 assert_eq!(buffer.text(), "Xa");
6268 buffer.undo(cx);
6269 assert_eq!(buffer.text(), "a");
6270 });
6271 }
6272
6273 #[gpui::test]
6274 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6275 let fs = FakeFs::new(cx.background());
6276 fs.insert_tree(
6277 "/dir",
6278 json!({
6279 "file1": "the old contents",
6280 }),
6281 )
6282 .await;
6283
6284 let project = Project::test(fs.clone(), cx);
6285 let worktree_id = project
6286 .update(cx, |p, cx| {
6287 p.find_or_create_local_worktree("/dir", true, cx)
6288 })
6289 .await
6290 .unwrap()
6291 .0
6292 .read_with(cx, |tree, _| tree.id());
6293
6294 let buffer = project
6295 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6296 .await
6297 .unwrap();
6298 buffer
6299 .update(cx, |buffer, cx| {
6300 assert_eq!(buffer.text(), "the old contents");
6301 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6302 buffer.save(cx)
6303 })
6304 .await
6305 .unwrap();
6306
6307 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6308 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6309 }
6310
6311 #[gpui::test]
6312 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6313 let fs = FakeFs::new(cx.background());
6314 fs.insert_tree(
6315 "/dir",
6316 json!({
6317 "file1": "the old contents",
6318 }),
6319 )
6320 .await;
6321
6322 let project = Project::test(fs.clone(), cx);
6323 let worktree_id = project
6324 .update(cx, |p, cx| {
6325 p.find_or_create_local_worktree("/dir/file1", true, cx)
6326 })
6327 .await
6328 .unwrap()
6329 .0
6330 .read_with(cx, |tree, _| tree.id());
6331
6332 let buffer = project
6333 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6334 .await
6335 .unwrap();
6336 buffer
6337 .update(cx, |buffer, cx| {
6338 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6339 buffer.save(cx)
6340 })
6341 .await
6342 .unwrap();
6343
6344 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6345 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6346 }
6347
6348 #[gpui::test]
6349 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6350 let fs = FakeFs::new(cx.background());
6351 fs.insert_tree("/dir", json!({})).await;
6352
6353 let project = Project::test(fs.clone(), cx);
6354 let (worktree, _) = project
6355 .update(cx, |project, cx| {
6356 project.find_or_create_local_worktree("/dir", true, cx)
6357 })
6358 .await
6359 .unwrap();
6360 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6361
6362 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6363 buffer.update(cx, |buffer, cx| {
6364 buffer.edit([0..0], "abc", cx);
6365 assert!(buffer.is_dirty());
6366 assert!(!buffer.has_conflict());
6367 });
6368 project
6369 .update(cx, |project, cx| {
6370 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6371 })
6372 .await
6373 .unwrap();
6374 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6375 buffer.read_with(cx, |buffer, cx| {
6376 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6377 assert!(!buffer.is_dirty());
6378 assert!(!buffer.has_conflict());
6379 });
6380
6381 let opened_buffer = project
6382 .update(cx, |project, cx| {
6383 project.open_buffer((worktree_id, "file1"), cx)
6384 })
6385 .await
6386 .unwrap();
6387 assert_eq!(opened_buffer, buffer);
6388 }
6389
6390 #[gpui::test(retries = 5)]
6391 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6392 let dir = temp_tree(json!({
6393 "a": {
6394 "file1": "",
6395 "file2": "",
6396 "file3": "",
6397 },
6398 "b": {
6399 "c": {
6400 "file4": "",
6401 "file5": "",
6402 }
6403 }
6404 }));
6405
6406 let project = Project::test(Arc::new(RealFs), cx);
6407 let rpc = project.read_with(cx, |p, _| p.client.clone());
6408
6409 let (tree, _) = project
6410 .update(cx, |p, cx| {
6411 p.find_or_create_local_worktree(dir.path(), true, cx)
6412 })
6413 .await
6414 .unwrap();
6415 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6416
6417 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6418 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6419 async move { buffer.await.unwrap() }
6420 };
6421 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6422 tree.read_with(cx, |tree, _| {
6423 tree.entry_for_path(path)
6424 .expect(&format!("no entry for path {}", path))
6425 .id
6426 })
6427 };
6428
6429 let buffer2 = buffer_for_path("a/file2", cx).await;
6430 let buffer3 = buffer_for_path("a/file3", cx).await;
6431 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6432 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6433
6434 let file2_id = id_for_path("a/file2", &cx);
6435 let file3_id = id_for_path("a/file3", &cx);
6436 let file4_id = id_for_path("b/c/file4", &cx);
6437
6438 // Wait for the initial scan.
6439 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6440 .await;
6441
6442 // Create a remote copy of this worktree.
6443 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6444 let (remote, load_task) = cx.update(|cx| {
6445 Worktree::remote(
6446 1,
6447 1,
6448 initial_snapshot.to_proto(&Default::default(), true),
6449 rpc.clone(),
6450 cx,
6451 )
6452 });
6453 load_task.await;
6454
6455 cx.read(|cx| {
6456 assert!(!buffer2.read(cx).is_dirty());
6457 assert!(!buffer3.read(cx).is_dirty());
6458 assert!(!buffer4.read(cx).is_dirty());
6459 assert!(!buffer5.read(cx).is_dirty());
6460 });
6461
6462 // Rename and delete files and directories.
6463 tree.flush_fs_events(&cx).await;
6464 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6465 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6466 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6467 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6468 tree.flush_fs_events(&cx).await;
6469
6470 let expected_paths = vec![
6471 "a",
6472 "a/file1",
6473 "a/file2.new",
6474 "b",
6475 "d",
6476 "d/file3",
6477 "d/file4",
6478 ];
6479
6480 cx.read(|app| {
6481 assert_eq!(
6482 tree.read(app)
6483 .paths()
6484 .map(|p| p.to_str().unwrap())
6485 .collect::<Vec<_>>(),
6486 expected_paths
6487 );
6488
6489 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6490 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6491 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6492
6493 assert_eq!(
6494 buffer2.read(app).file().unwrap().path().as_ref(),
6495 Path::new("a/file2.new")
6496 );
6497 assert_eq!(
6498 buffer3.read(app).file().unwrap().path().as_ref(),
6499 Path::new("d/file3")
6500 );
6501 assert_eq!(
6502 buffer4.read(app).file().unwrap().path().as_ref(),
6503 Path::new("d/file4")
6504 );
6505 assert_eq!(
6506 buffer5.read(app).file().unwrap().path().as_ref(),
6507 Path::new("b/c/file5")
6508 );
6509
6510 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6511 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6512 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6513 assert!(buffer5.read(app).file().unwrap().is_deleted());
6514 });
6515
6516 // Update the remote worktree. Check that it becomes consistent with the
6517 // local worktree.
6518 remote.update(cx, |remote, cx| {
6519 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6520 &initial_snapshot,
6521 1,
6522 1,
6523 true,
6524 );
6525 remote
6526 .as_remote_mut()
6527 .unwrap()
6528 .snapshot
6529 .apply_remote_update(update_message)
6530 .unwrap();
6531
6532 assert_eq!(
6533 remote
6534 .paths()
6535 .map(|p| p.to_str().unwrap())
6536 .collect::<Vec<_>>(),
6537 expected_paths
6538 );
6539 });
6540 }
6541
6542 #[gpui::test]
6543 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6544 let fs = FakeFs::new(cx.background());
6545 fs.insert_tree(
6546 "/the-dir",
6547 json!({
6548 "a.txt": "a-contents",
6549 "b.txt": "b-contents",
6550 }),
6551 )
6552 .await;
6553
6554 let project = Project::test(fs.clone(), cx);
6555 let worktree_id = project
6556 .update(cx, |p, cx| {
6557 p.find_or_create_local_worktree("/the-dir", true, cx)
6558 })
6559 .await
6560 .unwrap()
6561 .0
6562 .read_with(cx, |tree, _| tree.id());
6563
6564 // Spawn multiple tasks to open paths, repeating some paths.
6565 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6566 (
6567 p.open_buffer((worktree_id, "a.txt"), cx),
6568 p.open_buffer((worktree_id, "b.txt"), cx),
6569 p.open_buffer((worktree_id, "a.txt"), cx),
6570 )
6571 });
6572
6573 let buffer_a_1 = buffer_a_1.await.unwrap();
6574 let buffer_a_2 = buffer_a_2.await.unwrap();
6575 let buffer_b = buffer_b.await.unwrap();
6576 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6577 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6578
6579 // There is only one buffer per path.
6580 let buffer_a_id = buffer_a_1.id();
6581 assert_eq!(buffer_a_2.id(), buffer_a_id);
6582
6583 // Open the same path again while it is still open.
6584 drop(buffer_a_1);
6585 let buffer_a_3 = project
6586 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6587 .await
6588 .unwrap();
6589
6590 // There's still only one buffer per path.
6591 assert_eq!(buffer_a_3.id(), buffer_a_id);
6592 }
6593
6594 #[gpui::test]
6595 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6596 use std::fs;
6597
6598 let dir = temp_tree(json!({
6599 "file1": "abc",
6600 "file2": "def",
6601 "file3": "ghi",
6602 }));
6603
6604 let project = Project::test(Arc::new(RealFs), cx);
6605 let (worktree, _) = project
6606 .update(cx, |p, cx| {
6607 p.find_or_create_local_worktree(dir.path(), true, cx)
6608 })
6609 .await
6610 .unwrap();
6611 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6612
6613 worktree.flush_fs_events(&cx).await;
6614 worktree
6615 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6616 .await;
6617
6618 let buffer1 = project
6619 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6620 .await
6621 .unwrap();
6622 let events = Rc::new(RefCell::new(Vec::new()));
6623
6624 // initially, the buffer isn't dirty.
6625 buffer1.update(cx, |buffer, cx| {
6626 cx.subscribe(&buffer1, {
6627 let events = events.clone();
6628 move |_, _, event, _| match event {
6629 BufferEvent::Operation(_) => {}
6630 _ => events.borrow_mut().push(event.clone()),
6631 }
6632 })
6633 .detach();
6634
6635 assert!(!buffer.is_dirty());
6636 assert!(events.borrow().is_empty());
6637
6638 buffer.edit(vec![1..2], "", cx);
6639 });
6640
6641 // after the first edit, the buffer is dirty, and emits a dirtied event.
6642 buffer1.update(cx, |buffer, cx| {
6643 assert!(buffer.text() == "ac");
6644 assert!(buffer.is_dirty());
6645 assert_eq!(
6646 *events.borrow(),
6647 &[language::Event::Edited, language::Event::Dirtied]
6648 );
6649 events.borrow_mut().clear();
6650 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6651 });
6652
6653 // after saving, the buffer is not dirty, and emits a saved event.
6654 buffer1.update(cx, |buffer, cx| {
6655 assert!(!buffer.is_dirty());
6656 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6657 events.borrow_mut().clear();
6658
6659 buffer.edit(vec![1..1], "B", cx);
6660 buffer.edit(vec![2..2], "D", cx);
6661 });
6662
6663 // after editing again, the buffer is dirty, and emits another dirty event.
6664 buffer1.update(cx, |buffer, cx| {
6665 assert!(buffer.text() == "aBDc");
6666 assert!(buffer.is_dirty());
6667 assert_eq!(
6668 *events.borrow(),
6669 &[
6670 language::Event::Edited,
6671 language::Event::Dirtied,
6672 language::Event::Edited,
6673 ],
6674 );
6675 events.borrow_mut().clear();
6676
6677 // TODO - currently, after restoring the buffer to its
6678 // previously-saved state, the is still considered dirty.
6679 buffer.edit([1..3], "", cx);
6680 assert!(buffer.text() == "ac");
6681 assert!(buffer.is_dirty());
6682 });
6683
6684 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6685
6686 // When a file is deleted, the buffer is considered dirty.
6687 let events = Rc::new(RefCell::new(Vec::new()));
6688 let buffer2 = project
6689 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6690 .await
6691 .unwrap();
6692 buffer2.update(cx, |_, cx| {
6693 cx.subscribe(&buffer2, {
6694 let events = events.clone();
6695 move |_, _, event, _| events.borrow_mut().push(event.clone())
6696 })
6697 .detach();
6698 });
6699
6700 fs::remove_file(dir.path().join("file2")).unwrap();
6701 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6702 assert_eq!(
6703 *events.borrow(),
6704 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6705 );
6706
6707 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6708 let events = Rc::new(RefCell::new(Vec::new()));
6709 let buffer3 = project
6710 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6711 .await
6712 .unwrap();
6713 buffer3.update(cx, |_, cx| {
6714 cx.subscribe(&buffer3, {
6715 let events = events.clone();
6716 move |_, _, event, _| events.borrow_mut().push(event.clone())
6717 })
6718 .detach();
6719 });
6720
6721 worktree.flush_fs_events(&cx).await;
6722 buffer3.update(cx, |buffer, cx| {
6723 buffer.edit(Some(0..0), "x", cx);
6724 });
6725 events.borrow_mut().clear();
6726 fs::remove_file(dir.path().join("file3")).unwrap();
6727 buffer3
6728 .condition(&cx, |_, _| !events.borrow().is_empty())
6729 .await;
6730 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6731 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6732 }
6733
6734 #[gpui::test]
6735 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6736 use std::fs;
6737
6738 let initial_contents = "aaa\nbbbbb\nc\n";
6739 let dir = temp_tree(json!({ "the-file": initial_contents }));
6740
6741 let project = Project::test(Arc::new(RealFs), cx);
6742 let (worktree, _) = project
6743 .update(cx, |p, cx| {
6744 p.find_or_create_local_worktree(dir.path(), true, cx)
6745 })
6746 .await
6747 .unwrap();
6748 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6749
6750 worktree
6751 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6752 .await;
6753
6754 let abs_path = dir.path().join("the-file");
6755 let buffer = project
6756 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6757 .await
6758 .unwrap();
6759
6760 // TODO
6761 // Add a cursor on each row.
6762 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6763 // assert!(!buffer.is_dirty());
6764 // buffer.add_selection_set(
6765 // &(0..3)
6766 // .map(|row| Selection {
6767 // id: row as usize,
6768 // start: Point::new(row, 1),
6769 // end: Point::new(row, 1),
6770 // reversed: false,
6771 // goal: SelectionGoal::None,
6772 // })
6773 // .collect::<Vec<_>>(),
6774 // cx,
6775 // )
6776 // });
6777
6778 // Change the file on disk, adding two new lines of text, and removing
6779 // one line.
6780 buffer.read_with(cx, |buffer, _| {
6781 assert!(!buffer.is_dirty());
6782 assert!(!buffer.has_conflict());
6783 });
6784 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6785 fs::write(&abs_path, new_contents).unwrap();
6786
6787 // Because the buffer was not modified, it is reloaded from disk. Its
6788 // contents are edited according to the diff between the old and new
6789 // file contents.
6790 buffer
6791 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6792 .await;
6793
6794 buffer.update(cx, |buffer, _| {
6795 assert_eq!(buffer.text(), new_contents);
6796 assert!(!buffer.is_dirty());
6797 assert!(!buffer.has_conflict());
6798
6799 // TODO
6800 // let cursor_positions = buffer
6801 // .selection_set(selection_set_id)
6802 // .unwrap()
6803 // .selections::<Point>(&*buffer)
6804 // .map(|selection| {
6805 // assert_eq!(selection.start, selection.end);
6806 // selection.start
6807 // })
6808 // .collect::<Vec<_>>();
6809 // assert_eq!(
6810 // cursor_positions,
6811 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6812 // );
6813 });
6814
6815 // Modify the buffer
6816 buffer.update(cx, |buffer, cx| {
6817 buffer.edit(vec![0..0], " ", cx);
6818 assert!(buffer.is_dirty());
6819 assert!(!buffer.has_conflict());
6820 });
6821
6822 // Change the file on disk again, adding blank lines to the beginning.
6823 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6824
6825 // Because the buffer is modified, it doesn't reload from disk, but is
6826 // marked as having a conflict.
6827 buffer
6828 .condition(&cx, |buffer, _| buffer.has_conflict())
6829 .await;
6830 }
6831
6832 #[gpui::test]
6833 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6834 cx.foreground().forbid_parking();
6835
6836 let fs = FakeFs::new(cx.background());
6837 fs.insert_tree(
6838 "/the-dir",
6839 json!({
6840 "a.rs": "
6841 fn foo(mut v: Vec<usize>) {
6842 for x in &v {
6843 v.push(1);
6844 }
6845 }
6846 "
6847 .unindent(),
6848 }),
6849 )
6850 .await;
6851
6852 let project = Project::test(fs.clone(), cx);
6853 let (worktree, _) = project
6854 .update(cx, |p, cx| {
6855 p.find_or_create_local_worktree("/the-dir", true, cx)
6856 })
6857 .await
6858 .unwrap();
6859 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6860
6861 let buffer = project
6862 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6863 .await
6864 .unwrap();
6865
6866 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6867 let message = lsp::PublishDiagnosticsParams {
6868 uri: buffer_uri.clone(),
6869 diagnostics: vec![
6870 lsp::Diagnostic {
6871 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6872 severity: Some(DiagnosticSeverity::WARNING),
6873 message: "error 1".to_string(),
6874 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6875 location: lsp::Location {
6876 uri: buffer_uri.clone(),
6877 range: lsp::Range::new(
6878 lsp::Position::new(1, 8),
6879 lsp::Position::new(1, 9),
6880 ),
6881 },
6882 message: "error 1 hint 1".to_string(),
6883 }]),
6884 ..Default::default()
6885 },
6886 lsp::Diagnostic {
6887 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6888 severity: Some(DiagnosticSeverity::HINT),
6889 message: "error 1 hint 1".to_string(),
6890 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6891 location: lsp::Location {
6892 uri: buffer_uri.clone(),
6893 range: lsp::Range::new(
6894 lsp::Position::new(1, 8),
6895 lsp::Position::new(1, 9),
6896 ),
6897 },
6898 message: "original diagnostic".to_string(),
6899 }]),
6900 ..Default::default()
6901 },
6902 lsp::Diagnostic {
6903 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6904 severity: Some(DiagnosticSeverity::ERROR),
6905 message: "error 2".to_string(),
6906 related_information: Some(vec![
6907 lsp::DiagnosticRelatedInformation {
6908 location: lsp::Location {
6909 uri: buffer_uri.clone(),
6910 range: lsp::Range::new(
6911 lsp::Position::new(1, 13),
6912 lsp::Position::new(1, 15),
6913 ),
6914 },
6915 message: "error 2 hint 1".to_string(),
6916 },
6917 lsp::DiagnosticRelatedInformation {
6918 location: lsp::Location {
6919 uri: buffer_uri.clone(),
6920 range: lsp::Range::new(
6921 lsp::Position::new(1, 13),
6922 lsp::Position::new(1, 15),
6923 ),
6924 },
6925 message: "error 2 hint 2".to_string(),
6926 },
6927 ]),
6928 ..Default::default()
6929 },
6930 lsp::Diagnostic {
6931 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6932 severity: Some(DiagnosticSeverity::HINT),
6933 message: "error 2 hint 1".to_string(),
6934 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6935 location: lsp::Location {
6936 uri: buffer_uri.clone(),
6937 range: lsp::Range::new(
6938 lsp::Position::new(2, 8),
6939 lsp::Position::new(2, 17),
6940 ),
6941 },
6942 message: "original diagnostic".to_string(),
6943 }]),
6944 ..Default::default()
6945 },
6946 lsp::Diagnostic {
6947 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6948 severity: Some(DiagnosticSeverity::HINT),
6949 message: "error 2 hint 2".to_string(),
6950 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6951 location: lsp::Location {
6952 uri: buffer_uri.clone(),
6953 range: lsp::Range::new(
6954 lsp::Position::new(2, 8),
6955 lsp::Position::new(2, 17),
6956 ),
6957 },
6958 message: "original diagnostic".to_string(),
6959 }]),
6960 ..Default::default()
6961 },
6962 ],
6963 version: None,
6964 };
6965
6966 project
6967 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
6968 .unwrap();
6969 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6970
6971 assert_eq!(
6972 buffer
6973 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6974 .collect::<Vec<_>>(),
6975 &[
6976 DiagnosticEntry {
6977 range: Point::new(1, 8)..Point::new(1, 9),
6978 diagnostic: Diagnostic {
6979 severity: DiagnosticSeverity::WARNING,
6980 message: "error 1".to_string(),
6981 group_id: 0,
6982 is_primary: true,
6983 ..Default::default()
6984 }
6985 },
6986 DiagnosticEntry {
6987 range: Point::new(1, 8)..Point::new(1, 9),
6988 diagnostic: Diagnostic {
6989 severity: DiagnosticSeverity::HINT,
6990 message: "error 1 hint 1".to_string(),
6991 group_id: 0,
6992 is_primary: false,
6993 ..Default::default()
6994 }
6995 },
6996 DiagnosticEntry {
6997 range: Point::new(1, 13)..Point::new(1, 15),
6998 diagnostic: Diagnostic {
6999 severity: DiagnosticSeverity::HINT,
7000 message: "error 2 hint 1".to_string(),
7001 group_id: 1,
7002 is_primary: false,
7003 ..Default::default()
7004 }
7005 },
7006 DiagnosticEntry {
7007 range: Point::new(1, 13)..Point::new(1, 15),
7008 diagnostic: Diagnostic {
7009 severity: DiagnosticSeverity::HINT,
7010 message: "error 2 hint 2".to_string(),
7011 group_id: 1,
7012 is_primary: false,
7013 ..Default::default()
7014 }
7015 },
7016 DiagnosticEntry {
7017 range: Point::new(2, 8)..Point::new(2, 17),
7018 diagnostic: Diagnostic {
7019 severity: DiagnosticSeverity::ERROR,
7020 message: "error 2".to_string(),
7021 group_id: 1,
7022 is_primary: true,
7023 ..Default::default()
7024 }
7025 }
7026 ]
7027 );
7028
7029 assert_eq!(
7030 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7031 &[
7032 DiagnosticEntry {
7033 range: Point::new(1, 8)..Point::new(1, 9),
7034 diagnostic: Diagnostic {
7035 severity: DiagnosticSeverity::WARNING,
7036 message: "error 1".to_string(),
7037 group_id: 0,
7038 is_primary: true,
7039 ..Default::default()
7040 }
7041 },
7042 DiagnosticEntry {
7043 range: Point::new(1, 8)..Point::new(1, 9),
7044 diagnostic: Diagnostic {
7045 severity: DiagnosticSeverity::HINT,
7046 message: "error 1 hint 1".to_string(),
7047 group_id: 0,
7048 is_primary: false,
7049 ..Default::default()
7050 }
7051 },
7052 ]
7053 );
7054 assert_eq!(
7055 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7056 &[
7057 DiagnosticEntry {
7058 range: Point::new(1, 13)..Point::new(1, 15),
7059 diagnostic: Diagnostic {
7060 severity: DiagnosticSeverity::HINT,
7061 message: "error 2 hint 1".to_string(),
7062 group_id: 1,
7063 is_primary: false,
7064 ..Default::default()
7065 }
7066 },
7067 DiagnosticEntry {
7068 range: Point::new(1, 13)..Point::new(1, 15),
7069 diagnostic: Diagnostic {
7070 severity: DiagnosticSeverity::HINT,
7071 message: "error 2 hint 2".to_string(),
7072 group_id: 1,
7073 is_primary: false,
7074 ..Default::default()
7075 }
7076 },
7077 DiagnosticEntry {
7078 range: Point::new(2, 8)..Point::new(2, 17),
7079 diagnostic: Diagnostic {
7080 severity: DiagnosticSeverity::ERROR,
7081 message: "error 2".to_string(),
7082 group_id: 1,
7083 is_primary: true,
7084 ..Default::default()
7085 }
7086 }
7087 ]
7088 );
7089 }
7090
7091 #[gpui::test]
7092 async fn test_rename(cx: &mut gpui::TestAppContext) {
7093 cx.foreground().forbid_parking();
7094
7095 let mut language = Language::new(
7096 LanguageConfig {
7097 name: "Rust".into(),
7098 path_suffixes: vec!["rs".to_string()],
7099 ..Default::default()
7100 },
7101 Some(tree_sitter_rust::language()),
7102 );
7103 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7104
7105 let fs = FakeFs::new(cx.background());
7106 fs.insert_tree(
7107 "/dir",
7108 json!({
7109 "one.rs": "const ONE: usize = 1;",
7110 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7111 }),
7112 )
7113 .await;
7114
7115 let project = Project::test(fs.clone(), cx);
7116 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7117
7118 let (tree, _) = project
7119 .update(cx, |project, cx| {
7120 project.find_or_create_local_worktree("/dir", true, cx)
7121 })
7122 .await
7123 .unwrap();
7124 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7125 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7126 .await;
7127
7128 let buffer = project
7129 .update(cx, |project, cx| {
7130 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7131 })
7132 .await
7133 .unwrap();
7134
7135 let fake_server = fake_servers.next().await.unwrap();
7136
7137 let response = project.update(cx, |project, cx| {
7138 project.prepare_rename(buffer.clone(), 7, cx)
7139 });
7140 fake_server
7141 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7142 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7143 assert_eq!(params.position, lsp::Position::new(0, 7));
7144 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7145 lsp::Position::new(0, 6),
7146 lsp::Position::new(0, 9),
7147 ))))
7148 })
7149 .next()
7150 .await
7151 .unwrap();
7152 let range = response.await.unwrap().unwrap();
7153 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7154 assert_eq!(range, 6..9);
7155
7156 let response = project.update(cx, |project, cx| {
7157 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7158 });
7159 fake_server
7160 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7161 assert_eq!(
7162 params.text_document_position.text_document.uri.as_str(),
7163 "file:///dir/one.rs"
7164 );
7165 assert_eq!(
7166 params.text_document_position.position,
7167 lsp::Position::new(0, 7)
7168 );
7169 assert_eq!(params.new_name, "THREE");
7170 Ok(Some(lsp::WorkspaceEdit {
7171 changes: Some(
7172 [
7173 (
7174 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7175 vec![lsp::TextEdit::new(
7176 lsp::Range::new(
7177 lsp::Position::new(0, 6),
7178 lsp::Position::new(0, 9),
7179 ),
7180 "THREE".to_string(),
7181 )],
7182 ),
7183 (
7184 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7185 vec![
7186 lsp::TextEdit::new(
7187 lsp::Range::new(
7188 lsp::Position::new(0, 24),
7189 lsp::Position::new(0, 27),
7190 ),
7191 "THREE".to_string(),
7192 ),
7193 lsp::TextEdit::new(
7194 lsp::Range::new(
7195 lsp::Position::new(0, 35),
7196 lsp::Position::new(0, 38),
7197 ),
7198 "THREE".to_string(),
7199 ),
7200 ],
7201 ),
7202 ]
7203 .into_iter()
7204 .collect(),
7205 ),
7206 ..Default::default()
7207 }))
7208 })
7209 .next()
7210 .await
7211 .unwrap();
7212 let mut transaction = response.await.unwrap().0;
7213 assert_eq!(transaction.len(), 2);
7214 assert_eq!(
7215 transaction
7216 .remove_entry(&buffer)
7217 .unwrap()
7218 .0
7219 .read_with(cx, |buffer, _| buffer.text()),
7220 "const THREE: usize = 1;"
7221 );
7222 assert_eq!(
7223 transaction
7224 .into_keys()
7225 .next()
7226 .unwrap()
7227 .read_with(cx, |buffer, _| buffer.text()),
7228 "const TWO: usize = one::THREE + one::THREE;"
7229 );
7230 }
7231
7232 #[gpui::test]
7233 async fn test_search(cx: &mut gpui::TestAppContext) {
7234 let fs = FakeFs::new(cx.background());
7235 fs.insert_tree(
7236 "/dir",
7237 json!({
7238 "one.rs": "const ONE: usize = 1;",
7239 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7240 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7241 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7242 }),
7243 )
7244 .await;
7245 let project = Project::test(fs.clone(), cx);
7246 let (tree, _) = project
7247 .update(cx, |project, cx| {
7248 project.find_or_create_local_worktree("/dir", true, cx)
7249 })
7250 .await
7251 .unwrap();
7252 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7253 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7254 .await;
7255
7256 assert_eq!(
7257 search(&project, SearchQuery::text("TWO", false, true), cx)
7258 .await
7259 .unwrap(),
7260 HashMap::from_iter([
7261 ("two.rs".to_string(), vec![6..9]),
7262 ("three.rs".to_string(), vec![37..40])
7263 ])
7264 );
7265
7266 let buffer_4 = project
7267 .update(cx, |project, cx| {
7268 project.open_buffer((worktree_id, "four.rs"), cx)
7269 })
7270 .await
7271 .unwrap();
7272 buffer_4.update(cx, |buffer, cx| {
7273 buffer.edit([20..28, 31..43], "two::TWO", cx);
7274 });
7275
7276 assert_eq!(
7277 search(&project, SearchQuery::text("TWO", false, true), cx)
7278 .await
7279 .unwrap(),
7280 HashMap::from_iter([
7281 ("two.rs".to_string(), vec![6..9]),
7282 ("three.rs".to_string(), vec![37..40]),
7283 ("four.rs".to_string(), vec![25..28, 36..39])
7284 ])
7285 );
7286
7287 async fn search(
7288 project: &ModelHandle<Project>,
7289 query: SearchQuery,
7290 cx: &mut gpui::TestAppContext,
7291 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7292 let results = project
7293 .update(cx, |project, cx| project.search(query, cx))
7294 .await?;
7295
7296 Ok(results
7297 .into_iter()
7298 .map(|(buffer, ranges)| {
7299 buffer.read_with(cx, |buffer, _| {
7300 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7301 let ranges = ranges
7302 .into_iter()
7303 .map(|range| range.to_offset(buffer))
7304 .collect::<Vec<_>>();
7305 (path, ranges)
7306 })
7307 })
7308 .collect())
7309 }
7310 }
7311}