1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use sha2::{Digest, Sha256};
32use similar::{ChangeTag, TextDiff};
33use std::{
34 cell::RefCell,
35 cmp::{self, Ordering},
36 convert::TryInto,
37 hash::Hash,
38 mem,
39 ops::Range,
40 path::{Component, Path, PathBuf},
41 rc::Rc,
42 sync::{
43 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
44 Arc,
45 },
46 time::Instant,
47};
48use util::{post_inc, ResultExt, TryFutureExt as _};
49
50pub use fs::*;
51pub use worktree::*;
52
53pub trait Item: Entity {
54 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
55}
56
57pub struct Project {
58 worktrees: Vec<WorktreeHandle>,
59 active_entry: Option<ProjectEntryId>,
60 languages: Arc<LanguageRegistry>,
61 language_servers:
62 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
63 started_language_servers:
64 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
65 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
66 language_server_settings: Arc<Mutex<serde_json::Value>>,
67 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
68 next_language_server_id: usize,
69 client: Arc<client::Client>,
70 next_entry_id: Arc<AtomicUsize>,
71 user_store: ModelHandle<UserStore>,
72 fs: Arc<dyn Fs>,
73 client_state: ProjectClientState,
74 collaborators: HashMap<PeerId, Collaborator>,
75 subscriptions: Vec<client::Subscription>,
76 language_servers_with_diagnostics_running: isize,
77 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
78 shared_buffers: HashMap<PeerId, HashSet<u64>>,
79 loading_buffers: HashMap<
80 ProjectPath,
81 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
82 >,
83 loading_local_worktrees:
84 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
85 opened_buffers: HashMap<u64, OpenBuffer>,
86 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
87 nonce: u128,
88}
89
90enum OpenBuffer {
91 Strong(ModelHandle<Buffer>),
92 Weak(WeakModelHandle<Buffer>),
93 Loading(Vec<Operation>),
94}
95
96enum WorktreeHandle {
97 Strong(ModelHandle<Worktree>),
98 Weak(WeakModelHandle<Worktree>),
99}
100
101enum ProjectClientState {
102 Local {
103 is_shared: bool,
104 remote_id_tx: watch::Sender<Option<u64>>,
105 remote_id_rx: watch::Receiver<Option<u64>>,
106 _maintain_remote_id_task: Task<Option<()>>,
107 },
108 Remote {
109 sharing_has_stopped: bool,
110 remote_id: u64,
111 replica_id: ReplicaId,
112 _detect_unshare_task: Task<Option<()>>,
113 },
114}
115
116#[derive(Clone, Debug)]
117pub struct Collaborator {
118 pub user: Arc<User>,
119 pub peer_id: PeerId,
120 pub replica_id: ReplicaId,
121}
122
123#[derive(Clone, Debug, PartialEq)]
124pub enum Event {
125 ActiveEntryChanged(Option<ProjectEntryId>),
126 WorktreeRemoved(WorktreeId),
127 DiskBasedDiagnosticsStarted,
128 DiskBasedDiagnosticsUpdated,
129 DiskBasedDiagnosticsFinished,
130 DiagnosticsUpdated(ProjectPath),
131 RemoteIdChanged(Option<u64>),
132 CollaboratorLeft(PeerId),
133}
134
135pub struct LanguageServerStatus {
136 pub name: String,
137 pub pending_work: BTreeMap<String, LanguageServerProgress>,
138 pending_diagnostic_updates: isize,
139}
140
141#[derive(Clone, Debug)]
142pub struct LanguageServerProgress {
143 pub message: Option<String>,
144 pub percentage: Option<usize>,
145 pub last_update_at: Instant,
146}
147
148#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
149pub struct ProjectPath {
150 pub worktree_id: WorktreeId,
151 pub path: Arc<Path>,
152}
153
154#[derive(Clone, Debug, Default, PartialEq)]
155pub struct DiagnosticSummary {
156 pub error_count: usize,
157 pub warning_count: usize,
158 pub info_count: usize,
159 pub hint_count: usize,
160}
161
162#[derive(Debug)]
163pub struct Location {
164 pub buffer: ModelHandle<Buffer>,
165 pub range: Range<language::Anchor>,
166}
167
168#[derive(Debug)]
169pub struct DocumentHighlight {
170 pub range: Range<language::Anchor>,
171 pub kind: DocumentHighlightKind,
172}
173
174#[derive(Clone, Debug)]
175pub struct Symbol {
176 pub source_worktree_id: WorktreeId,
177 pub worktree_id: WorktreeId,
178 pub language_server_name: LanguageServerName,
179 pub path: PathBuf,
180 pub label: CodeLabel,
181 pub name: String,
182 pub kind: lsp::SymbolKind,
183 pub range: Range<PointUtf16>,
184 pub signature: [u8; 32],
185}
186
187#[derive(Default)]
188pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
189
190impl DiagnosticSummary {
191 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
192 let mut this = Self {
193 error_count: 0,
194 warning_count: 0,
195 info_count: 0,
196 hint_count: 0,
197 };
198
199 for entry in diagnostics {
200 if entry.diagnostic.is_primary {
201 match entry.diagnostic.severity {
202 DiagnosticSeverity::ERROR => this.error_count += 1,
203 DiagnosticSeverity::WARNING => this.warning_count += 1,
204 DiagnosticSeverity::INFORMATION => this.info_count += 1,
205 DiagnosticSeverity::HINT => this.hint_count += 1,
206 _ => {}
207 }
208 }
209 }
210
211 this
212 }
213
214 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
215 proto::DiagnosticSummary {
216 path: path.to_string_lossy().to_string(),
217 error_count: self.error_count as u32,
218 warning_count: self.warning_count as u32,
219 info_count: self.info_count as u32,
220 hint_count: self.hint_count as u32,
221 }
222 }
223}
224
225#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
226pub struct ProjectEntryId(usize);
227
228impl ProjectEntryId {
229 pub fn new(counter: &AtomicUsize) -> Self {
230 Self(counter.fetch_add(1, SeqCst))
231 }
232
233 pub fn from_proto(id: u64) -> Self {
234 Self(id as usize)
235 }
236
237 pub fn to_proto(&self) -> u64 {
238 self.0 as u64
239 }
240
241 pub fn to_usize(&self) -> usize {
242 self.0
243 }
244}
245
246impl Project {
247 pub fn init(client: &Arc<Client>) {
248 client.add_model_message_handler(Self::handle_add_collaborator);
249 client.add_model_message_handler(Self::handle_buffer_reloaded);
250 client.add_model_message_handler(Self::handle_buffer_saved);
251 client.add_model_message_handler(Self::handle_start_language_server);
252 client.add_model_message_handler(Self::handle_update_language_server);
253 client.add_model_message_handler(Self::handle_remove_collaborator);
254 client.add_model_message_handler(Self::handle_register_worktree);
255 client.add_model_message_handler(Self::handle_unregister_worktree);
256 client.add_model_message_handler(Self::handle_unshare_project);
257 client.add_model_message_handler(Self::handle_update_buffer_file);
258 client.add_model_message_handler(Self::handle_update_buffer);
259 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
260 client.add_model_message_handler(Self::handle_update_worktree);
261 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
262 client.add_model_request_handler(Self::handle_apply_code_action);
263 client.add_model_request_handler(Self::handle_format_buffers);
264 client.add_model_request_handler(Self::handle_get_code_actions);
265 client.add_model_request_handler(Self::handle_get_completions);
266 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
269 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
271 client.add_model_request_handler(Self::handle_search_project);
272 client.add_model_request_handler(Self::handle_get_project_symbols);
273 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
274 client.add_model_request_handler(Self::handle_open_buffer_by_id);
275 client.add_model_request_handler(Self::handle_open_buffer_by_path);
276 client.add_model_request_handler(Self::handle_save_buffer);
277 }
278
279 pub fn local(
280 client: Arc<Client>,
281 user_store: ModelHandle<UserStore>,
282 languages: Arc<LanguageRegistry>,
283 fs: Arc<dyn Fs>,
284 cx: &mut MutableAppContext,
285 ) -> ModelHandle<Self> {
286 cx.add_model(|cx: &mut ModelContext<Self>| {
287 let (remote_id_tx, remote_id_rx) = watch::channel();
288 let _maintain_remote_id_task = cx.spawn_weak({
289 let rpc = client.clone();
290 move |this, mut cx| {
291 async move {
292 let mut status = rpc.status();
293 while let Some(status) = status.next().await {
294 if let Some(this) = this.upgrade(&cx) {
295 if status.is_connected() {
296 this.update(&mut cx, |this, cx| this.register(cx)).await?;
297 } else {
298 this.update(&mut cx, |this, cx| this.unregister(cx));
299 }
300 }
301 }
302 Ok(())
303 }
304 .log_err()
305 }
306 });
307
308 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
309 Self {
310 worktrees: Default::default(),
311 collaborators: Default::default(),
312 opened_buffers: Default::default(),
313 shared_buffers: Default::default(),
314 loading_buffers: Default::default(),
315 loading_local_worktrees: Default::default(),
316 buffer_snapshots: Default::default(),
317 client_state: ProjectClientState::Local {
318 is_shared: false,
319 remote_id_tx,
320 remote_id_rx,
321 _maintain_remote_id_task,
322 },
323 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
324 subscriptions: Vec::new(),
325 active_entry: None,
326 languages,
327 client,
328 user_store,
329 fs,
330 next_entry_id: Default::default(),
331 language_servers_with_diagnostics_running: 0,
332 language_servers: Default::default(),
333 started_language_servers: Default::default(),
334 language_server_statuses: Default::default(),
335 last_workspace_edits_by_language_server: Default::default(),
336 language_server_settings: Default::default(),
337 next_language_server_id: 0,
338 nonce: StdRng::from_entropy().gen(),
339 }
340 })
341 }
342
343 pub async fn remote(
344 remote_id: u64,
345 client: Arc<Client>,
346 user_store: ModelHandle<UserStore>,
347 languages: Arc<LanguageRegistry>,
348 fs: Arc<dyn Fs>,
349 cx: &mut AsyncAppContext,
350 ) -> Result<ModelHandle<Self>> {
351 client.authenticate_and_connect(true, &cx).await?;
352
353 let response = client
354 .request(proto::JoinProject {
355 project_id: remote_id,
356 })
357 .await?;
358
359 let replica_id = response.replica_id as ReplicaId;
360
361 let mut worktrees = Vec::new();
362 for worktree in response.worktrees {
363 let (worktree, load_task) = cx
364 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
365 worktrees.push(worktree);
366 load_task.detach();
367 }
368
369 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
370 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
371 let mut this = Self {
372 worktrees: Vec::new(),
373 loading_buffers: Default::default(),
374 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
375 shared_buffers: Default::default(),
376 loading_local_worktrees: Default::default(),
377 active_entry: None,
378 collaborators: Default::default(),
379 languages,
380 user_store: user_store.clone(),
381 fs,
382 next_entry_id: Default::default(),
383 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
384 client: client.clone(),
385 client_state: ProjectClientState::Remote {
386 sharing_has_stopped: false,
387 remote_id,
388 replica_id,
389 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
390 async move {
391 let mut status = client.status();
392 let is_connected =
393 status.next().await.map_or(false, |s| s.is_connected());
394 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
395 if !is_connected || status.next().await.is_some() {
396 if let Some(this) = this.upgrade(&cx) {
397 this.update(&mut cx, |this, cx| this.project_unshared(cx))
398 }
399 }
400 Ok(())
401 }
402 .log_err()
403 }),
404 },
405 language_servers_with_diagnostics_running: 0,
406 language_servers: Default::default(),
407 started_language_servers: Default::default(),
408 language_server_settings: Default::default(),
409 language_server_statuses: response
410 .language_servers
411 .into_iter()
412 .map(|server| {
413 (
414 server.id as usize,
415 LanguageServerStatus {
416 name: server.name,
417 pending_work: Default::default(),
418 pending_diagnostic_updates: 0,
419 },
420 )
421 })
422 .collect(),
423 last_workspace_edits_by_language_server: Default::default(),
424 next_language_server_id: 0,
425 opened_buffers: Default::default(),
426 buffer_snapshots: Default::default(),
427 nonce: StdRng::from_entropy().gen(),
428 };
429 for worktree in worktrees {
430 this.add_worktree(&worktree, cx);
431 }
432 this
433 });
434
435 let user_ids = response
436 .collaborators
437 .iter()
438 .map(|peer| peer.user_id)
439 .collect();
440 user_store
441 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
442 .await?;
443 let mut collaborators = HashMap::default();
444 for message in response.collaborators {
445 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
446 collaborators.insert(collaborator.peer_id, collaborator);
447 }
448
449 this.update(cx, |this, _| {
450 this.collaborators = collaborators;
451 });
452
453 Ok(this)
454 }
455
456 #[cfg(any(test, feature = "test-support"))]
457 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
458 let languages = Arc::new(LanguageRegistry::test());
459 let http_client = client::test::FakeHttpClient::with_404_response();
460 let client = client::Client::new(http_client.clone());
461 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
462 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
463 }
464
465 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
466 self.opened_buffers
467 .get(&remote_id)
468 .and_then(|buffer| buffer.upgrade(cx))
469 }
470
471 #[cfg(any(test, feature = "test-support"))]
472 pub fn languages(&self) -> &Arc<LanguageRegistry> {
473 &self.languages
474 }
475
476 #[cfg(any(test, feature = "test-support"))]
477 pub fn check_invariants(&self, cx: &AppContext) {
478 if self.is_local() {
479 let mut worktree_root_paths = HashMap::default();
480 for worktree in self.worktrees(cx) {
481 let worktree = worktree.read(cx);
482 let abs_path = worktree.as_local().unwrap().abs_path().clone();
483 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
484 assert_eq!(
485 prev_worktree_id,
486 None,
487 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
488 abs_path,
489 worktree.id(),
490 prev_worktree_id
491 )
492 }
493 } else {
494 let replica_id = self.replica_id();
495 for buffer in self.opened_buffers.values() {
496 if let Some(buffer) = buffer.upgrade(cx) {
497 let buffer = buffer.read(cx);
498 assert_eq!(
499 buffer.deferred_ops_len(),
500 0,
501 "replica {}, buffer {} has deferred operations",
502 replica_id,
503 buffer.remote_id()
504 );
505 }
506 }
507 }
508 }
509
510 #[cfg(any(test, feature = "test-support"))]
511 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
512 let path = path.into();
513 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
514 self.opened_buffers.iter().any(|(_, buffer)| {
515 if let Some(buffer) = buffer.upgrade(cx) {
516 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
517 if file.worktree == worktree && file.path() == &path.path {
518 return true;
519 }
520 }
521 }
522 false
523 })
524 } else {
525 false
526 }
527 }
528
529 pub fn fs(&self) -> &Arc<dyn Fs> {
530 &self.fs
531 }
532
533 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
534 self.unshare(cx);
535 for worktree in &self.worktrees {
536 if let Some(worktree) = worktree.upgrade(cx) {
537 worktree.update(cx, |worktree, _| {
538 worktree.as_local_mut().unwrap().unregister();
539 });
540 }
541 }
542
543 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
544 *remote_id_tx.borrow_mut() = None;
545 }
546
547 self.subscriptions.clear();
548 }
549
550 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
551 self.unregister(cx);
552
553 let response = self.client.request(proto::RegisterProject {});
554 cx.spawn(|this, mut cx| async move {
555 let remote_id = response.await?.project_id;
556
557 let mut registrations = Vec::new();
558 this.update(&mut cx, |this, cx| {
559 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
560 *remote_id_tx.borrow_mut() = Some(remote_id);
561 }
562
563 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
564
565 this.subscriptions
566 .push(this.client.add_model_for_remote_entity(remote_id, cx));
567
568 for worktree in &this.worktrees {
569 if let Some(worktree) = worktree.upgrade(cx) {
570 registrations.push(worktree.update(cx, |worktree, cx| {
571 let worktree = worktree.as_local_mut().unwrap();
572 worktree.register(remote_id, cx)
573 }));
574 }
575 }
576 });
577
578 futures::future::try_join_all(registrations).await?;
579 Ok(())
580 })
581 }
582
583 pub fn remote_id(&self) -> Option<u64> {
584 match &self.client_state {
585 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
586 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
587 }
588 }
589
590 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
591 let mut id = None;
592 let mut watch = None;
593 match &self.client_state {
594 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
595 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
596 }
597
598 async move {
599 if let Some(id) = id {
600 return id;
601 }
602 let mut watch = watch.unwrap();
603 loop {
604 let id = *watch.borrow();
605 if let Some(id) = id {
606 return id;
607 }
608 watch.next().await;
609 }
610 }
611 }
612
613 pub fn replica_id(&self) -> ReplicaId {
614 match &self.client_state {
615 ProjectClientState::Local { .. } => 0,
616 ProjectClientState::Remote { replica_id, .. } => *replica_id,
617 }
618 }
619
620 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
621 &self.collaborators
622 }
623
624 pub fn worktrees<'a>(
625 &'a self,
626 cx: &'a AppContext,
627 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
628 self.worktrees
629 .iter()
630 .filter_map(move |worktree| worktree.upgrade(cx))
631 }
632
633 pub fn visible_worktrees<'a>(
634 &'a self,
635 cx: &'a AppContext,
636 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
637 self.worktrees.iter().filter_map(|worktree| {
638 worktree.upgrade(cx).and_then(|worktree| {
639 if worktree.read(cx).is_visible() {
640 Some(worktree)
641 } else {
642 None
643 }
644 })
645 })
646 }
647
648 pub fn worktree_for_id(
649 &self,
650 id: WorktreeId,
651 cx: &AppContext,
652 ) -> Option<ModelHandle<Worktree>> {
653 self.worktrees(cx)
654 .find(|worktree| worktree.read(cx).id() == id)
655 }
656
657 pub fn worktree_for_entry(
658 &self,
659 entry_id: ProjectEntryId,
660 cx: &AppContext,
661 ) -> Option<ModelHandle<Worktree>> {
662 self.worktrees(cx)
663 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
664 }
665
666 pub fn worktree_id_for_entry(
667 &self,
668 entry_id: ProjectEntryId,
669 cx: &AppContext,
670 ) -> Option<WorktreeId> {
671 self.worktree_for_entry(entry_id, cx)
672 .map(|worktree| worktree.read(cx).id())
673 }
674
675 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
676 let rpc = self.client.clone();
677 cx.spawn(|this, mut cx| async move {
678 let project_id = this.update(&mut cx, |this, cx| {
679 if let ProjectClientState::Local {
680 is_shared,
681 remote_id_rx,
682 ..
683 } = &mut this.client_state
684 {
685 *is_shared = true;
686
687 for open_buffer in this.opened_buffers.values_mut() {
688 match open_buffer {
689 OpenBuffer::Strong(_) => {}
690 OpenBuffer::Weak(buffer) => {
691 if let Some(buffer) = buffer.upgrade(cx) {
692 *open_buffer = OpenBuffer::Strong(buffer);
693 }
694 }
695 OpenBuffer::Loading(_) => unreachable!(),
696 }
697 }
698
699 for worktree_handle in this.worktrees.iter_mut() {
700 match worktree_handle {
701 WorktreeHandle::Strong(_) => {}
702 WorktreeHandle::Weak(worktree) => {
703 if let Some(worktree) = worktree.upgrade(cx) {
704 *worktree_handle = WorktreeHandle::Strong(worktree);
705 }
706 }
707 }
708 }
709
710 remote_id_rx
711 .borrow()
712 .ok_or_else(|| anyhow!("no project id"))
713 } else {
714 Err(anyhow!("can't share a remote project"))
715 }
716 })?;
717
718 rpc.request(proto::ShareProject { project_id }).await?;
719
720 let mut tasks = Vec::new();
721 this.update(&mut cx, |this, cx| {
722 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
723 worktree.update(cx, |worktree, cx| {
724 let worktree = worktree.as_local_mut().unwrap();
725 tasks.push(worktree.share(project_id, cx));
726 });
727 }
728 });
729 for task in tasks {
730 task.await?;
731 }
732 this.update(&mut cx, |_, cx| cx.notify());
733 Ok(())
734 })
735 }
736
737 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
738 let rpc = self.client.clone();
739
740 if let ProjectClientState::Local {
741 is_shared,
742 remote_id_rx,
743 ..
744 } = &mut self.client_state
745 {
746 if !*is_shared {
747 return;
748 }
749
750 *is_shared = false;
751 self.collaborators.clear();
752 self.shared_buffers.clear();
753 for worktree_handle in self.worktrees.iter_mut() {
754 if let WorktreeHandle::Strong(worktree) = worktree_handle {
755 let is_visible = worktree.update(cx, |worktree, _| {
756 worktree.as_local_mut().unwrap().unshare();
757 worktree.is_visible()
758 });
759 if !is_visible {
760 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
761 }
762 }
763 }
764
765 for open_buffer in self.opened_buffers.values_mut() {
766 match open_buffer {
767 OpenBuffer::Strong(buffer) => {
768 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
769 }
770 _ => {}
771 }
772 }
773
774 if let Some(project_id) = *remote_id_rx.borrow() {
775 rpc.send(proto::UnshareProject { project_id }).log_err();
776 }
777
778 cx.notify();
779 } else {
780 log::error!("attempted to unshare a remote project");
781 }
782 }
783
784 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
785 if let ProjectClientState::Remote {
786 sharing_has_stopped,
787 ..
788 } = &mut self.client_state
789 {
790 *sharing_has_stopped = true;
791 self.collaborators.clear();
792 cx.notify();
793 }
794 }
795
796 pub fn is_read_only(&self) -> bool {
797 match &self.client_state {
798 ProjectClientState::Local { .. } => false,
799 ProjectClientState::Remote {
800 sharing_has_stopped,
801 ..
802 } => *sharing_has_stopped,
803 }
804 }
805
806 pub fn is_local(&self) -> bool {
807 match &self.client_state {
808 ProjectClientState::Local { .. } => true,
809 ProjectClientState::Remote { .. } => false,
810 }
811 }
812
813 pub fn is_remote(&self) -> bool {
814 !self.is_local()
815 }
816
817 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
818 if self.is_remote() {
819 return Err(anyhow!("creating buffers as a guest is not supported yet"));
820 }
821
822 let buffer = cx.add_model(|cx| {
823 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
824 });
825 self.register_buffer(&buffer, cx)?;
826 Ok(buffer)
827 }
828
829 pub fn open_path(
830 &mut self,
831 path: impl Into<ProjectPath>,
832 cx: &mut ModelContext<Self>,
833 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
834 let task = self.open_buffer(path, cx);
835 cx.spawn_weak(|_, cx| async move {
836 let buffer = task.await?;
837 let project_entry_id = buffer
838 .read_with(&cx, |buffer, cx| {
839 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
840 })
841 .ok_or_else(|| anyhow!("no project entry"))?;
842 Ok((project_entry_id, buffer.into()))
843 })
844 }
845
846 pub fn open_buffer(
847 &mut self,
848 path: impl Into<ProjectPath>,
849 cx: &mut ModelContext<Self>,
850 ) -> Task<Result<ModelHandle<Buffer>>> {
851 let project_path = path.into();
852 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
853 worktree
854 } else {
855 return Task::ready(Err(anyhow!("no such worktree")));
856 };
857
858 // If there is already a buffer for the given path, then return it.
859 let existing_buffer = self.get_open_buffer(&project_path, cx);
860 if let Some(existing_buffer) = existing_buffer {
861 return Task::ready(Ok(existing_buffer));
862 }
863
864 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
865 // If the given path is already being loaded, then wait for that existing
866 // task to complete and return the same buffer.
867 hash_map::Entry::Occupied(e) => e.get().clone(),
868
869 // Otherwise, record the fact that this path is now being loaded.
870 hash_map::Entry::Vacant(entry) => {
871 let (mut tx, rx) = postage::watch::channel();
872 entry.insert(rx.clone());
873
874 let load_buffer = if worktree.read(cx).is_local() {
875 self.open_local_buffer(&project_path.path, &worktree, cx)
876 } else {
877 self.open_remote_buffer(&project_path.path, &worktree, cx)
878 };
879
880 cx.spawn(move |this, mut cx| async move {
881 let load_result = load_buffer.await;
882 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
883 // Record the fact that the buffer is no longer loading.
884 this.loading_buffers.remove(&project_path);
885 let buffer = load_result.map_err(Arc::new)?;
886 Ok(buffer)
887 }));
888 })
889 .detach();
890 rx
891 }
892 };
893
894 cx.foreground().spawn(async move {
895 loop {
896 if let Some(result) = loading_watch.borrow().as_ref() {
897 match result {
898 Ok(buffer) => return Ok(buffer.clone()),
899 Err(error) => return Err(anyhow!("{}", error)),
900 }
901 }
902 loading_watch.next().await;
903 }
904 })
905 }
906
907 fn open_local_buffer(
908 &mut self,
909 path: &Arc<Path>,
910 worktree: &ModelHandle<Worktree>,
911 cx: &mut ModelContext<Self>,
912 ) -> Task<Result<ModelHandle<Buffer>>> {
913 let load_buffer = worktree.update(cx, |worktree, cx| {
914 let worktree = worktree.as_local_mut().unwrap();
915 worktree.load_buffer(path, cx)
916 });
917 cx.spawn(|this, mut cx| async move {
918 let buffer = load_buffer.await?;
919 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
920 Ok(buffer)
921 })
922 }
923
924 fn open_remote_buffer(
925 &mut self,
926 path: &Arc<Path>,
927 worktree: &ModelHandle<Worktree>,
928 cx: &mut ModelContext<Self>,
929 ) -> Task<Result<ModelHandle<Buffer>>> {
930 let rpc = self.client.clone();
931 let project_id = self.remote_id().unwrap();
932 let remote_worktree_id = worktree.read(cx).id();
933 let path = path.clone();
934 let path_string = path.to_string_lossy().to_string();
935 cx.spawn(|this, mut cx| async move {
936 let response = rpc
937 .request(proto::OpenBufferByPath {
938 project_id,
939 worktree_id: remote_worktree_id.to_proto(),
940 path: path_string,
941 })
942 .await?;
943 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
944 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
945 .await
946 })
947 }
948
949 fn open_local_buffer_via_lsp(
950 &mut self,
951 abs_path: lsp::Url,
952 lsp_adapter: Arc<dyn LspAdapter>,
953 lsp_server: Arc<LanguageServer>,
954 cx: &mut ModelContext<Self>,
955 ) -> Task<Result<ModelHandle<Buffer>>> {
956 cx.spawn(|this, mut cx| async move {
957 let abs_path = abs_path
958 .to_file_path()
959 .map_err(|_| anyhow!("can't convert URI to path"))?;
960 let (worktree, relative_path) = if let Some(result) =
961 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
962 {
963 result
964 } else {
965 let worktree = this
966 .update(&mut cx, |this, cx| {
967 this.create_local_worktree(&abs_path, false, cx)
968 })
969 .await?;
970 this.update(&mut cx, |this, cx| {
971 this.language_servers.insert(
972 (worktree.read(cx).id(), lsp_adapter.name()),
973 (lsp_adapter, lsp_server),
974 );
975 });
976 (worktree, PathBuf::new())
977 };
978
979 let project_path = ProjectPath {
980 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
981 path: relative_path.into(),
982 };
983 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
984 .await
985 })
986 }
987
988 pub fn open_buffer_by_id(
989 &mut self,
990 id: u64,
991 cx: &mut ModelContext<Self>,
992 ) -> Task<Result<ModelHandle<Buffer>>> {
993 if let Some(buffer) = self.buffer_for_id(id, cx) {
994 Task::ready(Ok(buffer))
995 } else if self.is_local() {
996 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
997 } else if let Some(project_id) = self.remote_id() {
998 let request = self
999 .client
1000 .request(proto::OpenBufferById { project_id, id });
1001 cx.spawn(|this, mut cx| async move {
1002 let buffer = request
1003 .await?
1004 .buffer
1005 .ok_or_else(|| anyhow!("invalid buffer"))?;
1006 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1007 .await
1008 })
1009 } else {
1010 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1011 }
1012 }
1013
1014 pub fn save_buffer_as(
1015 &mut self,
1016 buffer: ModelHandle<Buffer>,
1017 abs_path: PathBuf,
1018 cx: &mut ModelContext<Project>,
1019 ) -> Task<Result<()>> {
1020 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1021 cx.spawn(|this, mut cx| async move {
1022 let (worktree, path) = worktree_task.await?;
1023 worktree
1024 .update(&mut cx, |worktree, cx| {
1025 worktree
1026 .as_local_mut()
1027 .unwrap()
1028 .save_buffer_as(buffer.clone(), path, cx)
1029 })
1030 .await?;
1031 this.update(&mut cx, |this, cx| {
1032 this.assign_language_to_buffer(&buffer, cx);
1033 this.register_buffer_with_language_server(&buffer, cx);
1034 });
1035 Ok(())
1036 })
1037 }
1038
1039 pub fn get_open_buffer(
1040 &mut self,
1041 path: &ProjectPath,
1042 cx: &mut ModelContext<Self>,
1043 ) -> Option<ModelHandle<Buffer>> {
1044 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1045 self.opened_buffers.values().find_map(|buffer| {
1046 let buffer = buffer.upgrade(cx)?;
1047 let file = File::from_dyn(buffer.read(cx).file())?;
1048 if file.worktree == worktree && file.path() == &path.path {
1049 Some(buffer)
1050 } else {
1051 None
1052 }
1053 })
1054 }
1055
1056 fn register_buffer(
1057 &mut self,
1058 buffer: &ModelHandle<Buffer>,
1059 cx: &mut ModelContext<Self>,
1060 ) -> Result<()> {
1061 let remote_id = buffer.read(cx).remote_id();
1062 let open_buffer = if self.is_remote() || self.is_shared() {
1063 OpenBuffer::Strong(buffer.clone())
1064 } else {
1065 OpenBuffer::Weak(buffer.downgrade())
1066 };
1067
1068 match self.opened_buffers.insert(remote_id, open_buffer) {
1069 None => {}
1070 Some(OpenBuffer::Loading(operations)) => {
1071 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1072 }
1073 Some(OpenBuffer::Weak(existing_handle)) => {
1074 if existing_handle.upgrade(cx).is_some() {
1075 Err(anyhow!(
1076 "already registered buffer with remote id {}",
1077 remote_id
1078 ))?
1079 }
1080 }
1081 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1082 "already registered buffer with remote id {}",
1083 remote_id
1084 ))?,
1085 }
1086 cx.subscribe(buffer, |this, buffer, event, cx| {
1087 this.on_buffer_event(buffer, event, cx);
1088 })
1089 .detach();
1090
1091 self.assign_language_to_buffer(buffer, cx);
1092 self.register_buffer_with_language_server(buffer, cx);
1093
1094 Ok(())
1095 }
1096
1097 fn register_buffer_with_language_server(
1098 &mut self,
1099 buffer_handle: &ModelHandle<Buffer>,
1100 cx: &mut ModelContext<Self>,
1101 ) {
1102 let buffer = buffer_handle.read(cx);
1103 let buffer_id = buffer.remote_id();
1104 if let Some(file) = File::from_dyn(buffer.file()) {
1105 if file.is_local() {
1106 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1107 let initial_snapshot = buffer.text_snapshot();
1108 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1109
1110 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1111 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1112 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1113 .log_err();
1114 }
1115 }
1116
1117 if let Some((_, server)) = language_server {
1118 server
1119 .notify::<lsp::notification::DidOpenTextDocument>(
1120 lsp::DidOpenTextDocumentParams {
1121 text_document: lsp::TextDocumentItem::new(
1122 uri,
1123 Default::default(),
1124 0,
1125 initial_snapshot.text(),
1126 ),
1127 }
1128 .clone(),
1129 )
1130 .log_err();
1131 buffer_handle.update(cx, |buffer, cx| {
1132 buffer.set_completion_triggers(
1133 server
1134 .capabilities()
1135 .completion_provider
1136 .as_ref()
1137 .and_then(|provider| provider.trigger_characters.clone())
1138 .unwrap_or(Vec::new()),
1139 cx,
1140 )
1141 });
1142 self.buffer_snapshots
1143 .insert(buffer_id, vec![(0, initial_snapshot)]);
1144 }
1145
1146 cx.observe_release(buffer_handle, |this, buffer, cx| {
1147 if let Some(file) = File::from_dyn(buffer.file()) {
1148 if file.is_local() {
1149 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1150 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1151 server
1152 .notify::<lsp::notification::DidCloseTextDocument>(
1153 lsp::DidCloseTextDocumentParams {
1154 text_document: lsp::TextDocumentIdentifier::new(
1155 uri.clone(),
1156 ),
1157 },
1158 )
1159 .log_err();
1160 }
1161 }
1162 }
1163 })
1164 .detach();
1165 }
1166 }
1167 }
1168
1169 fn on_buffer_event(
1170 &mut self,
1171 buffer: ModelHandle<Buffer>,
1172 event: &BufferEvent,
1173 cx: &mut ModelContext<Self>,
1174 ) -> Option<()> {
1175 match event {
1176 BufferEvent::Operation(operation) => {
1177 let project_id = self.remote_id()?;
1178 let request = self.client.request(proto::UpdateBuffer {
1179 project_id,
1180 buffer_id: buffer.read(cx).remote_id(),
1181 operations: vec![language::proto::serialize_operation(&operation)],
1182 });
1183 cx.background().spawn(request).detach_and_log_err(cx);
1184 }
1185 BufferEvent::Edited { .. } => {
1186 let (_, language_server) = self
1187 .language_server_for_buffer(buffer.read(cx), cx)?
1188 .clone();
1189 let buffer = buffer.read(cx);
1190 let file = File::from_dyn(buffer.file())?;
1191 let abs_path = file.as_local()?.abs_path(cx);
1192 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1193 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1194 let (version, prev_snapshot) = buffer_snapshots.last()?;
1195 let next_snapshot = buffer.text_snapshot();
1196 let next_version = version + 1;
1197
1198 let content_changes = buffer
1199 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1200 .map(|edit| {
1201 let edit_start = edit.new.start.0;
1202 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1203 let new_text = next_snapshot
1204 .text_for_range(edit.new.start.1..edit.new.end.1)
1205 .collect();
1206 lsp::TextDocumentContentChangeEvent {
1207 range: Some(lsp::Range::new(
1208 point_to_lsp(edit_start),
1209 point_to_lsp(edit_end),
1210 )),
1211 range_length: None,
1212 text: new_text,
1213 }
1214 })
1215 .collect();
1216
1217 buffer_snapshots.push((next_version, next_snapshot));
1218
1219 language_server
1220 .notify::<lsp::notification::DidChangeTextDocument>(
1221 lsp::DidChangeTextDocumentParams {
1222 text_document: lsp::VersionedTextDocumentIdentifier::new(
1223 uri,
1224 next_version,
1225 ),
1226 content_changes,
1227 },
1228 )
1229 .log_err();
1230 }
1231 BufferEvent::Saved => {
1232 let file = File::from_dyn(buffer.read(cx).file())?;
1233 let worktree_id = file.worktree_id(cx);
1234 let abs_path = file.as_local()?.abs_path(cx);
1235 let text_document = lsp::TextDocumentIdentifier {
1236 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1237 };
1238
1239 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1240 server
1241 .notify::<lsp::notification::DidSaveTextDocument>(
1242 lsp::DidSaveTextDocumentParams {
1243 text_document: text_document.clone(),
1244 text: None,
1245 },
1246 )
1247 .log_err();
1248 }
1249 }
1250 _ => {}
1251 }
1252
1253 None
1254 }
1255
1256 fn language_servers_for_worktree(
1257 &self,
1258 worktree_id: WorktreeId,
1259 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1260 self.language_servers.iter().filter_map(
1261 move |((language_server_worktree_id, _), server)| {
1262 if *language_server_worktree_id == worktree_id {
1263 Some(server)
1264 } else {
1265 None
1266 }
1267 },
1268 )
1269 }
1270
1271 fn assign_language_to_buffer(
1272 &mut self,
1273 buffer: &ModelHandle<Buffer>,
1274 cx: &mut ModelContext<Self>,
1275 ) -> Option<()> {
1276 // If the buffer has a language, set it and start the language server if we haven't already.
1277 let full_path = buffer.read(cx).file()?.full_path(cx);
1278 let language = self.languages.select_language(&full_path)?;
1279 buffer.update(cx, |buffer, cx| {
1280 buffer.set_language(Some(language.clone()), cx);
1281 });
1282
1283 let file = File::from_dyn(buffer.read(cx).file())?;
1284 let worktree = file.worktree.read(cx).as_local()?;
1285 let worktree_id = worktree.id();
1286 let worktree_abs_path = worktree.abs_path().clone();
1287 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1288
1289 None
1290 }
1291
1292 fn start_language_server(
1293 &mut self,
1294 worktree_id: WorktreeId,
1295 worktree_path: Arc<Path>,
1296 language: Arc<Language>,
1297 cx: &mut ModelContext<Self>,
1298 ) {
1299 let adapter = if let Some(adapter) = language.lsp_adapter() {
1300 adapter
1301 } else {
1302 return;
1303 };
1304 let key = (worktree_id, adapter.name());
1305 self.started_language_servers
1306 .entry(key.clone())
1307 .or_insert_with(|| {
1308 let server_id = post_inc(&mut self.next_language_server_id);
1309 let language_server = self.languages.start_language_server(
1310 server_id,
1311 language.clone(),
1312 worktree_path,
1313 self.client.http_client(),
1314 cx,
1315 );
1316 cx.spawn_weak(|this, mut cx| async move {
1317 let language_server = language_server?.await.log_err()?;
1318 let language_server = language_server
1319 .initialize(adapter.initialization_options())
1320 .await
1321 .log_err()?;
1322 let this = this.upgrade(&cx)?;
1323 let disk_based_diagnostics_progress_token =
1324 adapter.disk_based_diagnostics_progress_token();
1325
1326 language_server
1327 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1328 let this = this.downgrade();
1329 let adapter = adapter.clone();
1330 move |params, mut cx| {
1331 if let Some(this) = this.upgrade(&cx) {
1332 this.update(&mut cx, |this, cx| {
1333 this.on_lsp_diagnostics_published(
1334 server_id,
1335 params,
1336 &adapter,
1337 disk_based_diagnostics_progress_token,
1338 cx,
1339 );
1340 });
1341 }
1342 }
1343 })
1344 .detach();
1345
1346 language_server
1347 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1348 let settings = this
1349 .read_with(&cx, |this, _| this.language_server_settings.clone());
1350 move |params, _| {
1351 let settings = settings.lock().clone();
1352 async move {
1353 Ok(params
1354 .items
1355 .into_iter()
1356 .map(|item| {
1357 if let Some(section) = &item.section {
1358 settings
1359 .get(section)
1360 .cloned()
1361 .unwrap_or(serde_json::Value::Null)
1362 } else {
1363 settings.clone()
1364 }
1365 })
1366 .collect())
1367 }
1368 }
1369 })
1370 .detach();
1371
1372 language_server
1373 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1374 let this = this.downgrade();
1375 let adapter = adapter.clone();
1376 let language_server = language_server.clone();
1377 move |params, cx| {
1378 Self::on_lsp_workspace_edit(
1379 this,
1380 params,
1381 server_id,
1382 adapter.clone(),
1383 language_server.clone(),
1384 cx,
1385 )
1386 }
1387 })
1388 .detach();
1389
1390 language_server
1391 .on_notification::<lsp::notification::Progress, _>({
1392 let this = this.downgrade();
1393 move |params, mut cx| {
1394 if let Some(this) = this.upgrade(&cx) {
1395 this.update(&mut cx, |this, cx| {
1396 this.on_lsp_progress(
1397 params,
1398 server_id,
1399 disk_based_diagnostics_progress_token,
1400 cx,
1401 );
1402 });
1403 }
1404 }
1405 })
1406 .detach();
1407
1408 this.update(&mut cx, |this, cx| {
1409 this.language_servers
1410 .insert(key.clone(), (adapter, language_server.clone()));
1411 this.language_server_statuses.insert(
1412 server_id,
1413 LanguageServerStatus {
1414 name: language_server.name().to_string(),
1415 pending_work: Default::default(),
1416 pending_diagnostic_updates: 0,
1417 },
1418 );
1419 language_server
1420 .notify::<lsp::notification::DidChangeConfiguration>(
1421 lsp::DidChangeConfigurationParams {
1422 settings: this.language_server_settings.lock().clone(),
1423 },
1424 )
1425 .ok();
1426
1427 if let Some(project_id) = this.remote_id() {
1428 this.client
1429 .send(proto::StartLanguageServer {
1430 project_id,
1431 server: Some(proto::LanguageServer {
1432 id: server_id as u64,
1433 name: language_server.name().to_string(),
1434 }),
1435 })
1436 .log_err();
1437 }
1438
1439 // Tell the language server about every open buffer in the worktree that matches the language.
1440 for buffer in this.opened_buffers.values() {
1441 if let Some(buffer_handle) = buffer.upgrade(cx) {
1442 let buffer = buffer_handle.read(cx);
1443 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1444 file
1445 } else {
1446 continue;
1447 };
1448 let language = if let Some(language) = buffer.language() {
1449 language
1450 } else {
1451 continue;
1452 };
1453 if file.worktree.read(cx).id() != key.0
1454 || language.lsp_adapter().map(|a| a.name())
1455 != Some(key.1.clone())
1456 {
1457 continue;
1458 }
1459
1460 let file = file.as_local()?;
1461 let versions = this
1462 .buffer_snapshots
1463 .entry(buffer.remote_id())
1464 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1465 let (version, initial_snapshot) = versions.last().unwrap();
1466 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1467 language_server
1468 .notify::<lsp::notification::DidOpenTextDocument>(
1469 lsp::DidOpenTextDocumentParams {
1470 text_document: lsp::TextDocumentItem::new(
1471 uri,
1472 Default::default(),
1473 *version,
1474 initial_snapshot.text(),
1475 ),
1476 },
1477 )
1478 .log_err()?;
1479 buffer_handle.update(cx, |buffer, cx| {
1480 buffer.set_completion_triggers(
1481 language_server
1482 .capabilities()
1483 .completion_provider
1484 .as_ref()
1485 .and_then(|provider| {
1486 provider.trigger_characters.clone()
1487 })
1488 .unwrap_or(Vec::new()),
1489 cx,
1490 )
1491 });
1492 }
1493 }
1494
1495 cx.notify();
1496 Some(())
1497 });
1498
1499 Some(language_server)
1500 })
1501 });
1502 }
1503
1504 pub fn restart_language_servers_for_buffers(
1505 &mut self,
1506 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1507 cx: &mut ModelContext<Self>,
1508 ) -> Option<()> {
1509 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1510 .into_iter()
1511 .filter_map(|buffer| {
1512 let file = File::from_dyn(buffer.read(cx).file())?;
1513 let worktree = file.worktree.read(cx).as_local()?;
1514 let worktree_id = worktree.id();
1515 let worktree_abs_path = worktree.abs_path().clone();
1516 let full_path = file.full_path(cx);
1517 Some((worktree_id, worktree_abs_path, full_path))
1518 })
1519 .collect();
1520 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1521 let language = self.languages.select_language(&full_path)?;
1522 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1523 }
1524
1525 None
1526 }
1527
1528 fn restart_language_server(
1529 &mut self,
1530 worktree_id: WorktreeId,
1531 worktree_path: Arc<Path>,
1532 language: Arc<Language>,
1533 cx: &mut ModelContext<Self>,
1534 ) {
1535 let adapter = if let Some(adapter) = language.lsp_adapter() {
1536 adapter
1537 } else {
1538 return;
1539 };
1540 let key = (worktree_id, adapter.name());
1541 let server_to_shutdown = self.language_servers.remove(&key);
1542 self.started_language_servers.remove(&key);
1543 server_to_shutdown
1544 .as_ref()
1545 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1546 cx.spawn_weak(|this, mut cx| async move {
1547 if let Some(this) = this.upgrade(&cx) {
1548 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1549 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1550 shutdown_task.await;
1551 }
1552 }
1553
1554 this.update(&mut cx, |this, cx| {
1555 this.start_language_server(worktree_id, worktree_path, language, cx);
1556 });
1557 }
1558 })
1559 .detach();
1560 }
1561
1562 fn on_lsp_diagnostics_published(
1563 &mut self,
1564 server_id: usize,
1565 mut params: lsp::PublishDiagnosticsParams,
1566 adapter: &Arc<dyn LspAdapter>,
1567 disk_based_diagnostics_progress_token: Option<&str>,
1568 cx: &mut ModelContext<Self>,
1569 ) {
1570 adapter.process_diagnostics(&mut params);
1571 if disk_based_diagnostics_progress_token.is_none() {
1572 self.disk_based_diagnostics_started(cx);
1573 self.broadcast_language_server_update(
1574 server_id,
1575 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1576 proto::LspDiskBasedDiagnosticsUpdating {},
1577 ),
1578 );
1579 }
1580 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1581 .log_err();
1582 if disk_based_diagnostics_progress_token.is_none() {
1583 self.disk_based_diagnostics_finished(cx);
1584 self.broadcast_language_server_update(
1585 server_id,
1586 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1587 proto::LspDiskBasedDiagnosticsUpdated {},
1588 ),
1589 );
1590 }
1591 }
1592
1593 fn on_lsp_progress(
1594 &mut self,
1595 progress: lsp::ProgressParams,
1596 server_id: usize,
1597 disk_based_diagnostics_progress_token: Option<&str>,
1598 cx: &mut ModelContext<Self>,
1599 ) {
1600 let token = match progress.token {
1601 lsp::NumberOrString::String(token) => token,
1602 lsp::NumberOrString::Number(token) => {
1603 log::info!("skipping numeric progress token {}", token);
1604 return;
1605 }
1606 };
1607
1608 match progress.value {
1609 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1610 lsp::WorkDoneProgress::Begin(_) => {
1611 let language_server_status =
1612 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1613 status
1614 } else {
1615 return;
1616 };
1617
1618 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1619 language_server_status.pending_diagnostic_updates += 1;
1620 if language_server_status.pending_diagnostic_updates == 1 {
1621 self.disk_based_diagnostics_started(cx);
1622 self.broadcast_language_server_update(
1623 server_id,
1624 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1625 proto::LspDiskBasedDiagnosticsUpdating {},
1626 ),
1627 );
1628 }
1629 } else {
1630 self.on_lsp_work_start(server_id, token.clone(), cx);
1631 self.broadcast_language_server_update(
1632 server_id,
1633 proto::update_language_server::Variant::WorkStart(
1634 proto::LspWorkStart { token },
1635 ),
1636 );
1637 }
1638 }
1639 lsp::WorkDoneProgress::Report(report) => {
1640 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1641 self.on_lsp_work_progress(
1642 server_id,
1643 token.clone(),
1644 LanguageServerProgress {
1645 message: report.message.clone(),
1646 percentage: report.percentage.map(|p| p as usize),
1647 last_update_at: Instant::now(),
1648 },
1649 cx,
1650 );
1651 self.broadcast_language_server_update(
1652 server_id,
1653 proto::update_language_server::Variant::WorkProgress(
1654 proto::LspWorkProgress {
1655 token,
1656 message: report.message,
1657 percentage: report.percentage.map(|p| p as u32),
1658 },
1659 ),
1660 );
1661 }
1662 }
1663 lsp::WorkDoneProgress::End(_) => {
1664 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1665 let language_server_status = if let Some(status) =
1666 self.language_server_statuses.get_mut(&server_id)
1667 {
1668 status
1669 } else {
1670 return;
1671 };
1672
1673 language_server_status.pending_diagnostic_updates -= 1;
1674 if language_server_status.pending_diagnostic_updates == 0 {
1675 self.disk_based_diagnostics_finished(cx);
1676 self.broadcast_language_server_update(
1677 server_id,
1678 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1679 proto::LspDiskBasedDiagnosticsUpdated {},
1680 ),
1681 );
1682 }
1683 } else {
1684 self.on_lsp_work_end(server_id, token.clone(), cx);
1685 self.broadcast_language_server_update(
1686 server_id,
1687 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1688 token,
1689 }),
1690 );
1691 }
1692 }
1693 },
1694 }
1695 }
1696
1697 fn on_lsp_work_start(
1698 &mut self,
1699 language_server_id: usize,
1700 token: String,
1701 cx: &mut ModelContext<Self>,
1702 ) {
1703 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1704 status.pending_work.insert(
1705 token,
1706 LanguageServerProgress {
1707 message: None,
1708 percentage: None,
1709 last_update_at: Instant::now(),
1710 },
1711 );
1712 cx.notify();
1713 }
1714 }
1715
1716 fn on_lsp_work_progress(
1717 &mut self,
1718 language_server_id: usize,
1719 token: String,
1720 progress: LanguageServerProgress,
1721 cx: &mut ModelContext<Self>,
1722 ) {
1723 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1724 status.pending_work.insert(token, progress);
1725 cx.notify();
1726 }
1727 }
1728
1729 fn on_lsp_work_end(
1730 &mut self,
1731 language_server_id: usize,
1732 token: String,
1733 cx: &mut ModelContext<Self>,
1734 ) {
1735 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1736 status.pending_work.remove(&token);
1737 cx.notify();
1738 }
1739 }
1740
1741 async fn on_lsp_workspace_edit(
1742 this: WeakModelHandle<Self>,
1743 params: lsp::ApplyWorkspaceEditParams,
1744 server_id: usize,
1745 adapter: Arc<dyn LspAdapter>,
1746 language_server: Arc<LanguageServer>,
1747 mut cx: AsyncAppContext,
1748 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1749 let this = this
1750 .upgrade(&cx)
1751 .ok_or_else(|| anyhow!("project project closed"))?;
1752 let transaction = Self::deserialize_workspace_edit(
1753 this.clone(),
1754 params.edit,
1755 true,
1756 adapter.clone(),
1757 language_server.clone(),
1758 &mut cx,
1759 )
1760 .await
1761 .log_err();
1762 this.update(&mut cx, |this, _| {
1763 if let Some(transaction) = transaction {
1764 this.last_workspace_edits_by_language_server
1765 .insert(server_id, transaction);
1766 }
1767 });
1768 Ok(lsp::ApplyWorkspaceEditResponse {
1769 applied: true,
1770 failed_change: None,
1771 failure_reason: None,
1772 })
1773 }
1774
1775 fn broadcast_language_server_update(
1776 &self,
1777 language_server_id: usize,
1778 event: proto::update_language_server::Variant,
1779 ) {
1780 if let Some(project_id) = self.remote_id() {
1781 self.client
1782 .send(proto::UpdateLanguageServer {
1783 project_id,
1784 language_server_id: language_server_id as u64,
1785 variant: Some(event),
1786 })
1787 .log_err();
1788 }
1789 }
1790
1791 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1792 for (_, server) in self.language_servers.values() {
1793 server
1794 .notify::<lsp::notification::DidChangeConfiguration>(
1795 lsp::DidChangeConfigurationParams {
1796 settings: settings.clone(),
1797 },
1798 )
1799 .ok();
1800 }
1801 *self.language_server_settings.lock() = settings;
1802 }
1803
1804 pub fn language_server_statuses(
1805 &self,
1806 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1807 self.language_server_statuses.values()
1808 }
1809
1810 pub fn update_diagnostics(
1811 &mut self,
1812 params: lsp::PublishDiagnosticsParams,
1813 disk_based_sources: &[&str],
1814 cx: &mut ModelContext<Self>,
1815 ) -> Result<()> {
1816 let abs_path = params
1817 .uri
1818 .to_file_path()
1819 .map_err(|_| anyhow!("URI is not a file"))?;
1820 let mut next_group_id = 0;
1821 let mut diagnostics = Vec::default();
1822 let mut primary_diagnostic_group_ids = HashMap::default();
1823 let mut sources_by_group_id = HashMap::default();
1824 let mut supporting_diagnostics = HashMap::default();
1825 for diagnostic in ¶ms.diagnostics {
1826 let source = diagnostic.source.as_ref();
1827 let code = diagnostic.code.as_ref().map(|code| match code {
1828 lsp::NumberOrString::Number(code) => code.to_string(),
1829 lsp::NumberOrString::String(code) => code.clone(),
1830 });
1831 let range = range_from_lsp(diagnostic.range);
1832 let is_supporting = diagnostic
1833 .related_information
1834 .as_ref()
1835 .map_or(false, |infos| {
1836 infos.iter().any(|info| {
1837 primary_diagnostic_group_ids.contains_key(&(
1838 source,
1839 code.clone(),
1840 range_from_lsp(info.location.range),
1841 ))
1842 })
1843 });
1844
1845 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1846 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1847 });
1848
1849 if is_supporting {
1850 supporting_diagnostics.insert(
1851 (source, code.clone(), range),
1852 (diagnostic.severity, is_unnecessary),
1853 );
1854 } else {
1855 let group_id = post_inc(&mut next_group_id);
1856 let is_disk_based = source.map_or(false, |source| {
1857 disk_based_sources.contains(&source.as_str())
1858 });
1859
1860 sources_by_group_id.insert(group_id, source);
1861 primary_diagnostic_group_ids
1862 .insert((source, code.clone(), range.clone()), group_id);
1863
1864 diagnostics.push(DiagnosticEntry {
1865 range,
1866 diagnostic: Diagnostic {
1867 code: code.clone(),
1868 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1869 message: diagnostic.message.clone(),
1870 group_id,
1871 is_primary: true,
1872 is_valid: true,
1873 is_disk_based,
1874 is_unnecessary,
1875 },
1876 });
1877 if let Some(infos) = &diagnostic.related_information {
1878 for info in infos {
1879 if info.location.uri == params.uri && !info.message.is_empty() {
1880 let range = range_from_lsp(info.location.range);
1881 diagnostics.push(DiagnosticEntry {
1882 range,
1883 diagnostic: Diagnostic {
1884 code: code.clone(),
1885 severity: DiagnosticSeverity::INFORMATION,
1886 message: info.message.clone(),
1887 group_id,
1888 is_primary: false,
1889 is_valid: true,
1890 is_disk_based,
1891 is_unnecessary: false,
1892 },
1893 });
1894 }
1895 }
1896 }
1897 }
1898 }
1899
1900 for entry in &mut diagnostics {
1901 let diagnostic = &mut entry.diagnostic;
1902 if !diagnostic.is_primary {
1903 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1904 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1905 source,
1906 diagnostic.code.clone(),
1907 entry.range.clone(),
1908 )) {
1909 if let Some(severity) = severity {
1910 diagnostic.severity = severity;
1911 }
1912 diagnostic.is_unnecessary = is_unnecessary;
1913 }
1914 }
1915 }
1916
1917 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1918 Ok(())
1919 }
1920
1921 pub fn update_diagnostic_entries(
1922 &mut self,
1923 abs_path: PathBuf,
1924 version: Option<i32>,
1925 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1926 cx: &mut ModelContext<Project>,
1927 ) -> Result<(), anyhow::Error> {
1928 let (worktree, relative_path) = self
1929 .find_local_worktree(&abs_path, cx)
1930 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1931 if !worktree.read(cx).is_visible() {
1932 return Ok(());
1933 }
1934
1935 let project_path = ProjectPath {
1936 worktree_id: worktree.read(cx).id(),
1937 path: relative_path.into(),
1938 };
1939
1940 for buffer in self.opened_buffers.values() {
1941 if let Some(buffer) = buffer.upgrade(cx) {
1942 if buffer
1943 .read(cx)
1944 .file()
1945 .map_or(false, |file| *file.path() == project_path.path)
1946 {
1947 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1948 break;
1949 }
1950 }
1951 }
1952 worktree.update(cx, |worktree, cx| {
1953 worktree
1954 .as_local_mut()
1955 .ok_or_else(|| anyhow!("not a local worktree"))?
1956 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1957 })?;
1958 cx.emit(Event::DiagnosticsUpdated(project_path));
1959 Ok(())
1960 }
1961
1962 fn update_buffer_diagnostics(
1963 &mut self,
1964 buffer: &ModelHandle<Buffer>,
1965 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1966 version: Option<i32>,
1967 cx: &mut ModelContext<Self>,
1968 ) -> Result<()> {
1969 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1970 Ordering::Equal
1971 .then_with(|| b.is_primary.cmp(&a.is_primary))
1972 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1973 .then_with(|| a.severity.cmp(&b.severity))
1974 .then_with(|| a.message.cmp(&b.message))
1975 }
1976
1977 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1978
1979 diagnostics.sort_unstable_by(|a, b| {
1980 Ordering::Equal
1981 .then_with(|| a.range.start.cmp(&b.range.start))
1982 .then_with(|| b.range.end.cmp(&a.range.end))
1983 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1984 });
1985
1986 let mut sanitized_diagnostics = Vec::new();
1987 let edits_since_save = Patch::new(
1988 snapshot
1989 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1990 .collect(),
1991 );
1992 for entry in diagnostics {
1993 let start;
1994 let end;
1995 if entry.diagnostic.is_disk_based {
1996 // Some diagnostics are based on files on disk instead of buffers'
1997 // current contents. Adjust these diagnostics' ranges to reflect
1998 // any unsaved edits.
1999 start = edits_since_save.old_to_new(entry.range.start);
2000 end = edits_since_save.old_to_new(entry.range.end);
2001 } else {
2002 start = entry.range.start;
2003 end = entry.range.end;
2004 }
2005
2006 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2007 ..snapshot.clip_point_utf16(end, Bias::Right);
2008
2009 // Expand empty ranges by one character
2010 if range.start == range.end {
2011 range.end.column += 1;
2012 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2013 if range.start == range.end && range.end.column > 0 {
2014 range.start.column -= 1;
2015 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2016 }
2017 }
2018
2019 sanitized_diagnostics.push(DiagnosticEntry {
2020 range,
2021 diagnostic: entry.diagnostic,
2022 });
2023 }
2024 drop(edits_since_save);
2025
2026 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2027 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2028 Ok(())
2029 }
2030
2031 pub fn format(
2032 &self,
2033 buffers: HashSet<ModelHandle<Buffer>>,
2034 push_to_history: bool,
2035 cx: &mut ModelContext<Project>,
2036 ) -> Task<Result<ProjectTransaction>> {
2037 let mut local_buffers = Vec::new();
2038 let mut remote_buffers = None;
2039 for buffer_handle in buffers {
2040 let buffer = buffer_handle.read(cx);
2041 if let Some(file) = File::from_dyn(buffer.file()) {
2042 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2043 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2044 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2045 }
2046 } else {
2047 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2048 }
2049 } else {
2050 return Task::ready(Ok(Default::default()));
2051 }
2052 }
2053
2054 let remote_buffers = self.remote_id().zip(remote_buffers);
2055 let client = self.client.clone();
2056
2057 cx.spawn(|this, mut cx| async move {
2058 let mut project_transaction = ProjectTransaction::default();
2059
2060 if let Some((project_id, remote_buffers)) = remote_buffers {
2061 let response = client
2062 .request(proto::FormatBuffers {
2063 project_id,
2064 buffer_ids: remote_buffers
2065 .iter()
2066 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2067 .collect(),
2068 })
2069 .await?
2070 .transaction
2071 .ok_or_else(|| anyhow!("missing transaction"))?;
2072 project_transaction = this
2073 .update(&mut cx, |this, cx| {
2074 this.deserialize_project_transaction(response, push_to_history, cx)
2075 })
2076 .await?;
2077 }
2078
2079 for (buffer, buffer_abs_path, language_server) in local_buffers {
2080 let text_document = lsp::TextDocumentIdentifier::new(
2081 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2082 );
2083 let capabilities = &language_server.capabilities();
2084 let lsp_edits = if capabilities
2085 .document_formatting_provider
2086 .as_ref()
2087 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2088 {
2089 language_server
2090 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2091 text_document,
2092 options: lsp::FormattingOptions {
2093 tab_size: 4,
2094 insert_spaces: true,
2095 insert_final_newline: Some(true),
2096 ..Default::default()
2097 },
2098 work_done_progress_params: Default::default(),
2099 })
2100 .await?
2101 } else if capabilities
2102 .document_range_formatting_provider
2103 .as_ref()
2104 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2105 {
2106 let buffer_start = lsp::Position::new(0, 0);
2107 let buffer_end =
2108 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2109 language_server
2110 .request::<lsp::request::RangeFormatting>(
2111 lsp::DocumentRangeFormattingParams {
2112 text_document,
2113 range: lsp::Range::new(buffer_start, buffer_end),
2114 options: lsp::FormattingOptions {
2115 tab_size: 4,
2116 insert_spaces: true,
2117 insert_final_newline: Some(true),
2118 ..Default::default()
2119 },
2120 work_done_progress_params: Default::default(),
2121 },
2122 )
2123 .await?
2124 } else {
2125 continue;
2126 };
2127
2128 if let Some(lsp_edits) = lsp_edits {
2129 let edits = this
2130 .update(&mut cx, |this, cx| {
2131 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2132 })
2133 .await?;
2134 buffer.update(&mut cx, |buffer, cx| {
2135 buffer.finalize_last_transaction();
2136 buffer.start_transaction();
2137 for (range, text) in edits {
2138 buffer.edit([range], text, cx);
2139 }
2140 if buffer.end_transaction(cx).is_some() {
2141 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2142 if !push_to_history {
2143 buffer.forget_transaction(transaction.id);
2144 }
2145 project_transaction.0.insert(cx.handle(), transaction);
2146 }
2147 });
2148 }
2149 }
2150
2151 Ok(project_transaction)
2152 })
2153 }
2154
2155 pub fn definition<T: ToPointUtf16>(
2156 &self,
2157 buffer: &ModelHandle<Buffer>,
2158 position: T,
2159 cx: &mut ModelContext<Self>,
2160 ) -> Task<Result<Vec<Location>>> {
2161 let position = position.to_point_utf16(buffer.read(cx));
2162 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2163 }
2164
2165 pub fn references<T: ToPointUtf16>(
2166 &self,
2167 buffer: &ModelHandle<Buffer>,
2168 position: T,
2169 cx: &mut ModelContext<Self>,
2170 ) -> Task<Result<Vec<Location>>> {
2171 let position = position.to_point_utf16(buffer.read(cx));
2172 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2173 }
2174
2175 pub fn document_highlights<T: ToPointUtf16>(
2176 &self,
2177 buffer: &ModelHandle<Buffer>,
2178 position: T,
2179 cx: &mut ModelContext<Self>,
2180 ) -> Task<Result<Vec<DocumentHighlight>>> {
2181 let position = position.to_point_utf16(buffer.read(cx));
2182
2183 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2184 }
2185
2186 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2187 if self.is_local() {
2188 let mut language_servers = HashMap::default();
2189 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2190 if let Some(worktree) = self
2191 .worktree_for_id(*worktree_id, cx)
2192 .and_then(|worktree| worktree.read(cx).as_local())
2193 {
2194 language_servers
2195 .entry(Arc::as_ptr(language_server))
2196 .or_insert((
2197 lsp_adapter.clone(),
2198 language_server.clone(),
2199 *worktree_id,
2200 worktree.abs_path().clone(),
2201 ));
2202 }
2203 }
2204
2205 let mut requests = Vec::new();
2206 for (_, language_server, _, _) in language_servers.values() {
2207 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2208 lsp::WorkspaceSymbolParams {
2209 query: query.to_string(),
2210 ..Default::default()
2211 },
2212 ));
2213 }
2214
2215 cx.spawn_weak(|this, cx| async move {
2216 let responses = futures::future::try_join_all(requests).await?;
2217
2218 let mut symbols = Vec::new();
2219 if let Some(this) = this.upgrade(&cx) {
2220 this.read_with(&cx, |this, cx| {
2221 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2222 language_servers.into_values().zip(responses)
2223 {
2224 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2225 |lsp_symbol| {
2226 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2227 let mut worktree_id = source_worktree_id;
2228 let path;
2229 if let Some((worktree, rel_path)) =
2230 this.find_local_worktree(&abs_path, cx)
2231 {
2232 worktree_id = worktree.read(cx).id();
2233 path = rel_path;
2234 } else {
2235 path = relativize_path(&worktree_abs_path, &abs_path);
2236 }
2237
2238 let label = this
2239 .languages
2240 .select_language(&path)
2241 .and_then(|language| {
2242 language
2243 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2244 })
2245 .unwrap_or_else(|| {
2246 CodeLabel::plain(lsp_symbol.name.clone(), None)
2247 });
2248 let signature = this.symbol_signature(worktree_id, &path);
2249
2250 Some(Symbol {
2251 source_worktree_id,
2252 worktree_id,
2253 language_server_name: adapter.name(),
2254 name: lsp_symbol.name,
2255 kind: lsp_symbol.kind,
2256 label,
2257 path,
2258 range: range_from_lsp(lsp_symbol.location.range),
2259 signature,
2260 })
2261 },
2262 ));
2263 }
2264 })
2265 }
2266
2267 Ok(symbols)
2268 })
2269 } else if let Some(project_id) = self.remote_id() {
2270 let request = self.client.request(proto::GetProjectSymbols {
2271 project_id,
2272 query: query.to_string(),
2273 });
2274 cx.spawn_weak(|this, cx| async move {
2275 let response = request.await?;
2276 let mut symbols = Vec::new();
2277 if let Some(this) = this.upgrade(&cx) {
2278 this.read_with(&cx, |this, _| {
2279 symbols.extend(
2280 response
2281 .symbols
2282 .into_iter()
2283 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2284 );
2285 })
2286 }
2287 Ok(symbols)
2288 })
2289 } else {
2290 Task::ready(Ok(Default::default()))
2291 }
2292 }
2293
2294 pub fn open_buffer_for_symbol(
2295 &mut self,
2296 symbol: &Symbol,
2297 cx: &mut ModelContext<Self>,
2298 ) -> Task<Result<ModelHandle<Buffer>>> {
2299 if self.is_local() {
2300 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2301 symbol.source_worktree_id,
2302 symbol.language_server_name.clone(),
2303 )) {
2304 server.clone()
2305 } else {
2306 return Task::ready(Err(anyhow!(
2307 "language server for worktree and language not found"
2308 )));
2309 };
2310
2311 let worktree_abs_path = if let Some(worktree_abs_path) = self
2312 .worktree_for_id(symbol.worktree_id, cx)
2313 .and_then(|worktree| worktree.read(cx).as_local())
2314 .map(|local_worktree| local_worktree.abs_path())
2315 {
2316 worktree_abs_path
2317 } else {
2318 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2319 };
2320 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2321 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2322 uri
2323 } else {
2324 return Task::ready(Err(anyhow!("invalid symbol path")));
2325 };
2326
2327 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2328 } else if let Some(project_id) = self.remote_id() {
2329 let request = self.client.request(proto::OpenBufferForSymbol {
2330 project_id,
2331 symbol: Some(serialize_symbol(symbol)),
2332 });
2333 cx.spawn(|this, mut cx| async move {
2334 let response = request.await?;
2335 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2336 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2337 .await
2338 })
2339 } else {
2340 Task::ready(Err(anyhow!("project does not have a remote id")))
2341 }
2342 }
2343
2344 pub fn completions<T: ToPointUtf16>(
2345 &self,
2346 source_buffer_handle: &ModelHandle<Buffer>,
2347 position: T,
2348 cx: &mut ModelContext<Self>,
2349 ) -> Task<Result<Vec<Completion>>> {
2350 let source_buffer_handle = source_buffer_handle.clone();
2351 let source_buffer = source_buffer_handle.read(cx);
2352 let buffer_id = source_buffer.remote_id();
2353 let language = source_buffer.language().cloned();
2354 let worktree;
2355 let buffer_abs_path;
2356 if let Some(file) = File::from_dyn(source_buffer.file()) {
2357 worktree = file.worktree.clone();
2358 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2359 } else {
2360 return Task::ready(Ok(Default::default()));
2361 };
2362
2363 let position = position.to_point_utf16(source_buffer);
2364 let anchor = source_buffer.anchor_after(position);
2365
2366 if worktree.read(cx).as_local().is_some() {
2367 let buffer_abs_path = buffer_abs_path.unwrap();
2368 let (_, lang_server) =
2369 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2370 server.clone()
2371 } else {
2372 return Task::ready(Ok(Default::default()));
2373 };
2374
2375 cx.spawn(|_, cx| async move {
2376 let completions = lang_server
2377 .request::<lsp::request::Completion>(lsp::CompletionParams {
2378 text_document_position: lsp::TextDocumentPositionParams::new(
2379 lsp::TextDocumentIdentifier::new(
2380 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2381 ),
2382 point_to_lsp(position),
2383 ),
2384 context: Default::default(),
2385 work_done_progress_params: Default::default(),
2386 partial_result_params: Default::default(),
2387 })
2388 .await
2389 .context("lsp completion request failed")?;
2390
2391 let completions = if let Some(completions) = completions {
2392 match completions {
2393 lsp::CompletionResponse::Array(completions) => completions,
2394 lsp::CompletionResponse::List(list) => list.items,
2395 }
2396 } else {
2397 Default::default()
2398 };
2399
2400 source_buffer_handle.read_with(&cx, |this, _| {
2401 Ok(completions
2402 .into_iter()
2403 .filter_map(|lsp_completion| {
2404 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2405 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2406 (range_from_lsp(edit.range), edit.new_text.clone())
2407 }
2408 None => {
2409 let clipped_position =
2410 this.clip_point_utf16(position, Bias::Left);
2411 if position != clipped_position {
2412 log::info!("completion out of expected range");
2413 return None;
2414 }
2415 (
2416 this.common_prefix_at(
2417 clipped_position,
2418 &lsp_completion.label,
2419 ),
2420 lsp_completion.label.clone(),
2421 )
2422 }
2423 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2424 log::info!("unsupported insert/replace completion");
2425 return None;
2426 }
2427 };
2428
2429 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2430 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2431 if clipped_start == old_range.start && clipped_end == old_range.end {
2432 Some(Completion {
2433 old_range: this.anchor_before(old_range.start)
2434 ..this.anchor_after(old_range.end),
2435 new_text,
2436 label: language
2437 .as_ref()
2438 .and_then(|l| l.label_for_completion(&lsp_completion))
2439 .unwrap_or_else(|| {
2440 CodeLabel::plain(
2441 lsp_completion.label.clone(),
2442 lsp_completion.filter_text.as_deref(),
2443 )
2444 }),
2445 lsp_completion,
2446 })
2447 } else {
2448 log::info!("completion out of expected range");
2449 None
2450 }
2451 })
2452 .collect())
2453 })
2454 })
2455 } else if let Some(project_id) = self.remote_id() {
2456 let rpc = self.client.clone();
2457 let message = proto::GetCompletions {
2458 project_id,
2459 buffer_id,
2460 position: Some(language::proto::serialize_anchor(&anchor)),
2461 version: serialize_version(&source_buffer.version()),
2462 };
2463 cx.spawn_weak(|_, mut cx| async move {
2464 let response = rpc.request(message).await?;
2465
2466 source_buffer_handle
2467 .update(&mut cx, |buffer, _| {
2468 buffer.wait_for_version(deserialize_version(response.version))
2469 })
2470 .await;
2471
2472 response
2473 .completions
2474 .into_iter()
2475 .map(|completion| {
2476 language::proto::deserialize_completion(completion, language.as_ref())
2477 })
2478 .collect()
2479 })
2480 } else {
2481 Task::ready(Ok(Default::default()))
2482 }
2483 }
2484
2485 pub fn apply_additional_edits_for_completion(
2486 &self,
2487 buffer_handle: ModelHandle<Buffer>,
2488 completion: Completion,
2489 push_to_history: bool,
2490 cx: &mut ModelContext<Self>,
2491 ) -> Task<Result<Option<Transaction>>> {
2492 let buffer = buffer_handle.read(cx);
2493 let buffer_id = buffer.remote_id();
2494
2495 if self.is_local() {
2496 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2497 {
2498 server.clone()
2499 } else {
2500 return Task::ready(Ok(Default::default()));
2501 };
2502
2503 cx.spawn(|this, mut cx| async move {
2504 let resolved_completion = lang_server
2505 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2506 .await?;
2507 if let Some(edits) = resolved_completion.additional_text_edits {
2508 let edits = this
2509 .update(&mut cx, |this, cx| {
2510 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2511 })
2512 .await?;
2513 buffer_handle.update(&mut cx, |buffer, cx| {
2514 buffer.finalize_last_transaction();
2515 buffer.start_transaction();
2516 for (range, text) in edits {
2517 buffer.edit([range], text, cx);
2518 }
2519 let transaction = if buffer.end_transaction(cx).is_some() {
2520 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2521 if !push_to_history {
2522 buffer.forget_transaction(transaction.id);
2523 }
2524 Some(transaction)
2525 } else {
2526 None
2527 };
2528 Ok(transaction)
2529 })
2530 } else {
2531 Ok(None)
2532 }
2533 })
2534 } else if let Some(project_id) = self.remote_id() {
2535 let client = self.client.clone();
2536 cx.spawn(|_, mut cx| async move {
2537 let response = client
2538 .request(proto::ApplyCompletionAdditionalEdits {
2539 project_id,
2540 buffer_id,
2541 completion: Some(language::proto::serialize_completion(&completion)),
2542 })
2543 .await?;
2544
2545 if let Some(transaction) = response.transaction {
2546 let transaction = language::proto::deserialize_transaction(transaction)?;
2547 buffer_handle
2548 .update(&mut cx, |buffer, _| {
2549 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2550 })
2551 .await;
2552 if push_to_history {
2553 buffer_handle.update(&mut cx, |buffer, _| {
2554 buffer.push_transaction(transaction.clone(), Instant::now());
2555 });
2556 }
2557 Ok(Some(transaction))
2558 } else {
2559 Ok(None)
2560 }
2561 })
2562 } else {
2563 Task::ready(Err(anyhow!("project does not have a remote id")))
2564 }
2565 }
2566
2567 pub fn code_actions<T: Clone + ToOffset>(
2568 &self,
2569 buffer_handle: &ModelHandle<Buffer>,
2570 range: Range<T>,
2571 cx: &mut ModelContext<Self>,
2572 ) -> Task<Result<Vec<CodeAction>>> {
2573 let buffer_handle = buffer_handle.clone();
2574 let buffer = buffer_handle.read(cx);
2575 let snapshot = buffer.snapshot();
2576 let relevant_diagnostics = snapshot
2577 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2578 .map(|entry| entry.to_lsp_diagnostic_stub())
2579 .collect();
2580 let buffer_id = buffer.remote_id();
2581 let worktree;
2582 let buffer_abs_path;
2583 if let Some(file) = File::from_dyn(buffer.file()) {
2584 worktree = file.worktree.clone();
2585 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2586 } else {
2587 return Task::ready(Ok(Default::default()));
2588 };
2589 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2590
2591 if worktree.read(cx).as_local().is_some() {
2592 let buffer_abs_path = buffer_abs_path.unwrap();
2593 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2594 {
2595 server.clone()
2596 } else {
2597 return Task::ready(Ok(Default::default()));
2598 };
2599
2600 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2601 cx.foreground().spawn(async move {
2602 if !lang_server.capabilities().code_action_provider.is_some() {
2603 return Ok(Default::default());
2604 }
2605
2606 Ok(lang_server
2607 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2608 text_document: lsp::TextDocumentIdentifier::new(
2609 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2610 ),
2611 range: lsp_range,
2612 work_done_progress_params: Default::default(),
2613 partial_result_params: Default::default(),
2614 context: lsp::CodeActionContext {
2615 diagnostics: relevant_diagnostics,
2616 only: Some(vec![
2617 lsp::CodeActionKind::QUICKFIX,
2618 lsp::CodeActionKind::REFACTOR,
2619 lsp::CodeActionKind::REFACTOR_EXTRACT,
2620 lsp::CodeActionKind::SOURCE,
2621 ]),
2622 },
2623 })
2624 .await?
2625 .unwrap_or_default()
2626 .into_iter()
2627 .filter_map(|entry| {
2628 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2629 Some(CodeAction {
2630 range: range.clone(),
2631 lsp_action,
2632 })
2633 } else {
2634 None
2635 }
2636 })
2637 .collect())
2638 })
2639 } else if let Some(project_id) = self.remote_id() {
2640 let rpc = self.client.clone();
2641 let version = buffer.version();
2642 cx.spawn_weak(|_, mut cx| async move {
2643 let response = rpc
2644 .request(proto::GetCodeActions {
2645 project_id,
2646 buffer_id,
2647 start: Some(language::proto::serialize_anchor(&range.start)),
2648 end: Some(language::proto::serialize_anchor(&range.end)),
2649 version: serialize_version(&version),
2650 })
2651 .await?;
2652
2653 buffer_handle
2654 .update(&mut cx, |buffer, _| {
2655 buffer.wait_for_version(deserialize_version(response.version))
2656 })
2657 .await;
2658
2659 response
2660 .actions
2661 .into_iter()
2662 .map(language::proto::deserialize_code_action)
2663 .collect()
2664 })
2665 } else {
2666 Task::ready(Ok(Default::default()))
2667 }
2668 }
2669
2670 pub fn apply_code_action(
2671 &self,
2672 buffer_handle: ModelHandle<Buffer>,
2673 mut action: CodeAction,
2674 push_to_history: bool,
2675 cx: &mut ModelContext<Self>,
2676 ) -> Task<Result<ProjectTransaction>> {
2677 if self.is_local() {
2678 let buffer = buffer_handle.read(cx);
2679 let (lsp_adapter, lang_server) =
2680 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2681 server.clone()
2682 } else {
2683 return Task::ready(Ok(Default::default()));
2684 };
2685 let range = action.range.to_point_utf16(buffer);
2686
2687 cx.spawn(|this, mut cx| async move {
2688 if let Some(lsp_range) = action
2689 .lsp_action
2690 .data
2691 .as_mut()
2692 .and_then(|d| d.get_mut("codeActionParams"))
2693 .and_then(|d| d.get_mut("range"))
2694 {
2695 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2696 action.lsp_action = lang_server
2697 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2698 .await?;
2699 } else {
2700 let actions = this
2701 .update(&mut cx, |this, cx| {
2702 this.code_actions(&buffer_handle, action.range, cx)
2703 })
2704 .await?;
2705 action.lsp_action = actions
2706 .into_iter()
2707 .find(|a| a.lsp_action.title == action.lsp_action.title)
2708 .ok_or_else(|| anyhow!("code action is outdated"))?
2709 .lsp_action;
2710 }
2711
2712 if let Some(edit) = action.lsp_action.edit {
2713 Self::deserialize_workspace_edit(
2714 this,
2715 edit,
2716 push_to_history,
2717 lsp_adapter,
2718 lang_server,
2719 &mut cx,
2720 )
2721 .await
2722 } else if let Some(command) = action.lsp_action.command {
2723 this.update(&mut cx, |this, _| {
2724 this.last_workspace_edits_by_language_server
2725 .remove(&lang_server.server_id());
2726 });
2727 lang_server
2728 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2729 command: command.command,
2730 arguments: command.arguments.unwrap_or_default(),
2731 ..Default::default()
2732 })
2733 .await?;
2734 Ok(this.update(&mut cx, |this, _| {
2735 this.last_workspace_edits_by_language_server
2736 .remove(&lang_server.server_id())
2737 .unwrap_or_default()
2738 }))
2739 } else {
2740 Ok(ProjectTransaction::default())
2741 }
2742 })
2743 } else if let Some(project_id) = self.remote_id() {
2744 let client = self.client.clone();
2745 let request = proto::ApplyCodeAction {
2746 project_id,
2747 buffer_id: buffer_handle.read(cx).remote_id(),
2748 action: Some(language::proto::serialize_code_action(&action)),
2749 };
2750 cx.spawn(|this, mut cx| async move {
2751 let response = client
2752 .request(request)
2753 .await?
2754 .transaction
2755 .ok_or_else(|| anyhow!("missing transaction"))?;
2756 this.update(&mut cx, |this, cx| {
2757 this.deserialize_project_transaction(response, push_to_history, cx)
2758 })
2759 .await
2760 })
2761 } else {
2762 Task::ready(Err(anyhow!("project does not have a remote id")))
2763 }
2764 }
2765
2766 async fn deserialize_workspace_edit(
2767 this: ModelHandle<Self>,
2768 edit: lsp::WorkspaceEdit,
2769 push_to_history: bool,
2770 lsp_adapter: Arc<dyn LspAdapter>,
2771 language_server: Arc<LanguageServer>,
2772 cx: &mut AsyncAppContext,
2773 ) -> Result<ProjectTransaction> {
2774 let fs = this.read_with(cx, |this, _| this.fs.clone());
2775 let mut operations = Vec::new();
2776 if let Some(document_changes) = edit.document_changes {
2777 match document_changes {
2778 lsp::DocumentChanges::Edits(edits) => {
2779 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2780 }
2781 lsp::DocumentChanges::Operations(ops) => operations = ops,
2782 }
2783 } else if let Some(changes) = edit.changes {
2784 operations.extend(changes.into_iter().map(|(uri, edits)| {
2785 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2786 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2787 uri,
2788 version: None,
2789 },
2790 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2791 })
2792 }));
2793 }
2794
2795 let mut project_transaction = ProjectTransaction::default();
2796 for operation in operations {
2797 match operation {
2798 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2799 let abs_path = op
2800 .uri
2801 .to_file_path()
2802 .map_err(|_| anyhow!("can't convert URI to path"))?;
2803
2804 if let Some(parent_path) = abs_path.parent() {
2805 fs.create_dir(parent_path).await?;
2806 }
2807 if abs_path.ends_with("/") {
2808 fs.create_dir(&abs_path).await?;
2809 } else {
2810 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2811 .await?;
2812 }
2813 }
2814 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2815 let source_abs_path = op
2816 .old_uri
2817 .to_file_path()
2818 .map_err(|_| anyhow!("can't convert URI to path"))?;
2819 let target_abs_path = op
2820 .new_uri
2821 .to_file_path()
2822 .map_err(|_| anyhow!("can't convert URI to path"))?;
2823 fs.rename(
2824 &source_abs_path,
2825 &target_abs_path,
2826 op.options.map(Into::into).unwrap_or_default(),
2827 )
2828 .await?;
2829 }
2830 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2831 let abs_path = op
2832 .uri
2833 .to_file_path()
2834 .map_err(|_| anyhow!("can't convert URI to path"))?;
2835 let options = op.options.map(Into::into).unwrap_or_default();
2836 if abs_path.ends_with("/") {
2837 fs.remove_dir(&abs_path, options).await?;
2838 } else {
2839 fs.remove_file(&abs_path, options).await?;
2840 }
2841 }
2842 lsp::DocumentChangeOperation::Edit(op) => {
2843 let buffer_to_edit = this
2844 .update(cx, |this, cx| {
2845 this.open_local_buffer_via_lsp(
2846 op.text_document.uri,
2847 lsp_adapter.clone(),
2848 language_server.clone(),
2849 cx,
2850 )
2851 })
2852 .await?;
2853
2854 let edits = this
2855 .update(cx, |this, cx| {
2856 let edits = op.edits.into_iter().map(|edit| match edit {
2857 lsp::OneOf::Left(edit) => edit,
2858 lsp::OneOf::Right(edit) => edit.text_edit,
2859 });
2860 this.edits_from_lsp(
2861 &buffer_to_edit,
2862 edits,
2863 op.text_document.version,
2864 cx,
2865 )
2866 })
2867 .await?;
2868
2869 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2870 buffer.finalize_last_transaction();
2871 buffer.start_transaction();
2872 for (range, text) in edits {
2873 buffer.edit([range], text, cx);
2874 }
2875 let transaction = if buffer.end_transaction(cx).is_some() {
2876 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2877 if !push_to_history {
2878 buffer.forget_transaction(transaction.id);
2879 }
2880 Some(transaction)
2881 } else {
2882 None
2883 };
2884
2885 transaction
2886 });
2887 if let Some(transaction) = transaction {
2888 project_transaction.0.insert(buffer_to_edit, transaction);
2889 }
2890 }
2891 }
2892 }
2893
2894 Ok(project_transaction)
2895 }
2896
2897 pub fn prepare_rename<T: ToPointUtf16>(
2898 &self,
2899 buffer: ModelHandle<Buffer>,
2900 position: T,
2901 cx: &mut ModelContext<Self>,
2902 ) -> Task<Result<Option<Range<Anchor>>>> {
2903 let position = position.to_point_utf16(buffer.read(cx));
2904 self.request_lsp(buffer, PrepareRename { position }, cx)
2905 }
2906
2907 pub fn perform_rename<T: ToPointUtf16>(
2908 &self,
2909 buffer: ModelHandle<Buffer>,
2910 position: T,
2911 new_name: String,
2912 push_to_history: bool,
2913 cx: &mut ModelContext<Self>,
2914 ) -> Task<Result<ProjectTransaction>> {
2915 let position = position.to_point_utf16(buffer.read(cx));
2916 self.request_lsp(
2917 buffer,
2918 PerformRename {
2919 position,
2920 new_name,
2921 push_to_history,
2922 },
2923 cx,
2924 )
2925 }
2926
2927 pub fn search(
2928 &self,
2929 query: SearchQuery,
2930 cx: &mut ModelContext<Self>,
2931 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2932 if self.is_local() {
2933 let snapshots = self
2934 .visible_worktrees(cx)
2935 .filter_map(|tree| {
2936 let tree = tree.read(cx).as_local()?;
2937 Some(tree.snapshot())
2938 })
2939 .collect::<Vec<_>>();
2940
2941 let background = cx.background().clone();
2942 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2943 if path_count == 0 {
2944 return Task::ready(Ok(Default::default()));
2945 }
2946 let workers = background.num_cpus().min(path_count);
2947 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2948 cx.background()
2949 .spawn({
2950 let fs = self.fs.clone();
2951 let background = cx.background().clone();
2952 let query = query.clone();
2953 async move {
2954 let fs = &fs;
2955 let query = &query;
2956 let matching_paths_tx = &matching_paths_tx;
2957 let paths_per_worker = (path_count + workers - 1) / workers;
2958 let snapshots = &snapshots;
2959 background
2960 .scoped(|scope| {
2961 for worker_ix in 0..workers {
2962 let worker_start_ix = worker_ix * paths_per_worker;
2963 let worker_end_ix = worker_start_ix + paths_per_worker;
2964 scope.spawn(async move {
2965 let mut snapshot_start_ix = 0;
2966 let mut abs_path = PathBuf::new();
2967 for snapshot in snapshots {
2968 let snapshot_end_ix =
2969 snapshot_start_ix + snapshot.visible_file_count();
2970 if worker_end_ix <= snapshot_start_ix {
2971 break;
2972 } else if worker_start_ix > snapshot_end_ix {
2973 snapshot_start_ix = snapshot_end_ix;
2974 continue;
2975 } else {
2976 let start_in_snapshot = worker_start_ix
2977 .saturating_sub(snapshot_start_ix);
2978 let end_in_snapshot =
2979 cmp::min(worker_end_ix, snapshot_end_ix)
2980 - snapshot_start_ix;
2981
2982 for entry in snapshot
2983 .files(false, start_in_snapshot)
2984 .take(end_in_snapshot - start_in_snapshot)
2985 {
2986 if matching_paths_tx.is_closed() {
2987 break;
2988 }
2989
2990 abs_path.clear();
2991 abs_path.push(&snapshot.abs_path());
2992 abs_path.push(&entry.path);
2993 let matches = if let Some(file) =
2994 fs.open_sync(&abs_path).await.log_err()
2995 {
2996 query.detect(file).unwrap_or(false)
2997 } else {
2998 false
2999 };
3000
3001 if matches {
3002 let project_path =
3003 (snapshot.id(), entry.path.clone());
3004 if matching_paths_tx
3005 .send(project_path)
3006 .await
3007 .is_err()
3008 {
3009 break;
3010 }
3011 }
3012 }
3013
3014 snapshot_start_ix = snapshot_end_ix;
3015 }
3016 }
3017 });
3018 }
3019 })
3020 .await;
3021 }
3022 })
3023 .detach();
3024
3025 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3026 let open_buffers = self
3027 .opened_buffers
3028 .values()
3029 .filter_map(|b| b.upgrade(cx))
3030 .collect::<HashSet<_>>();
3031 cx.spawn(|this, cx| async move {
3032 for buffer in &open_buffers {
3033 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3034 buffers_tx.send((buffer.clone(), snapshot)).await?;
3035 }
3036
3037 let open_buffers = Rc::new(RefCell::new(open_buffers));
3038 while let Some(project_path) = matching_paths_rx.next().await {
3039 if buffers_tx.is_closed() {
3040 break;
3041 }
3042
3043 let this = this.clone();
3044 let open_buffers = open_buffers.clone();
3045 let buffers_tx = buffers_tx.clone();
3046 cx.spawn(|mut cx| async move {
3047 if let Some(buffer) = this
3048 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3049 .await
3050 .log_err()
3051 {
3052 if open_buffers.borrow_mut().insert(buffer.clone()) {
3053 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3054 buffers_tx.send((buffer, snapshot)).await?;
3055 }
3056 }
3057
3058 Ok::<_, anyhow::Error>(())
3059 })
3060 .detach();
3061 }
3062
3063 Ok::<_, anyhow::Error>(())
3064 })
3065 .detach_and_log_err(cx);
3066
3067 let background = cx.background().clone();
3068 cx.background().spawn(async move {
3069 let query = &query;
3070 let mut matched_buffers = Vec::new();
3071 for _ in 0..workers {
3072 matched_buffers.push(HashMap::default());
3073 }
3074 background
3075 .scoped(|scope| {
3076 for worker_matched_buffers in matched_buffers.iter_mut() {
3077 let mut buffers_rx = buffers_rx.clone();
3078 scope.spawn(async move {
3079 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3080 let buffer_matches = query
3081 .search(snapshot.as_rope())
3082 .await
3083 .iter()
3084 .map(|range| {
3085 snapshot.anchor_before(range.start)
3086 ..snapshot.anchor_after(range.end)
3087 })
3088 .collect::<Vec<_>>();
3089 if !buffer_matches.is_empty() {
3090 worker_matched_buffers
3091 .insert(buffer.clone(), buffer_matches);
3092 }
3093 }
3094 });
3095 }
3096 })
3097 .await;
3098 Ok(matched_buffers.into_iter().flatten().collect())
3099 })
3100 } else if let Some(project_id) = self.remote_id() {
3101 let request = self.client.request(query.to_proto(project_id));
3102 cx.spawn(|this, mut cx| async move {
3103 let response = request.await?;
3104 let mut result = HashMap::default();
3105 for location in response.locations {
3106 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3107 let target_buffer = this
3108 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3109 .await?;
3110 let start = location
3111 .start
3112 .and_then(deserialize_anchor)
3113 .ok_or_else(|| anyhow!("missing target start"))?;
3114 let end = location
3115 .end
3116 .and_then(deserialize_anchor)
3117 .ok_or_else(|| anyhow!("missing target end"))?;
3118 result
3119 .entry(target_buffer)
3120 .or_insert(Vec::new())
3121 .push(start..end)
3122 }
3123 Ok(result)
3124 })
3125 } else {
3126 Task::ready(Ok(Default::default()))
3127 }
3128 }
3129
3130 fn request_lsp<R: LspCommand>(
3131 &self,
3132 buffer_handle: ModelHandle<Buffer>,
3133 request: R,
3134 cx: &mut ModelContext<Self>,
3135 ) -> Task<Result<R::Response>>
3136 where
3137 <R::LspRequest as lsp::request::Request>::Result: Send,
3138 {
3139 let buffer = buffer_handle.read(cx);
3140 if self.is_local() {
3141 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3142 if let Some((file, (_, language_server))) =
3143 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3144 {
3145 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3146 return cx.spawn(|this, cx| async move {
3147 if !request.check_capabilities(&language_server.capabilities()) {
3148 return Ok(Default::default());
3149 }
3150
3151 let response = language_server
3152 .request::<R::LspRequest>(lsp_params)
3153 .await
3154 .context("lsp request failed")?;
3155 request
3156 .response_from_lsp(response, this, buffer_handle, cx)
3157 .await
3158 });
3159 }
3160 } else if let Some(project_id) = self.remote_id() {
3161 let rpc = self.client.clone();
3162 let message = request.to_proto(project_id, buffer);
3163 return cx.spawn(|this, cx| async move {
3164 let response = rpc.request(message).await?;
3165 request
3166 .response_from_proto(response, this, buffer_handle, cx)
3167 .await
3168 });
3169 }
3170 Task::ready(Ok(Default::default()))
3171 }
3172
3173 pub fn find_or_create_local_worktree(
3174 &mut self,
3175 abs_path: impl AsRef<Path>,
3176 visible: bool,
3177 cx: &mut ModelContext<Self>,
3178 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3179 let abs_path = abs_path.as_ref();
3180 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3181 Task::ready(Ok((tree.clone(), relative_path.into())))
3182 } else {
3183 let worktree = self.create_local_worktree(abs_path, visible, cx);
3184 cx.foreground()
3185 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3186 }
3187 }
3188
3189 pub fn find_local_worktree(
3190 &self,
3191 abs_path: &Path,
3192 cx: &AppContext,
3193 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3194 for tree in self.worktrees(cx) {
3195 if let Some(relative_path) = tree
3196 .read(cx)
3197 .as_local()
3198 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3199 {
3200 return Some((tree.clone(), relative_path.into()));
3201 }
3202 }
3203 None
3204 }
3205
3206 pub fn is_shared(&self) -> bool {
3207 match &self.client_state {
3208 ProjectClientState::Local { is_shared, .. } => *is_shared,
3209 ProjectClientState::Remote { .. } => false,
3210 }
3211 }
3212
3213 fn create_local_worktree(
3214 &mut self,
3215 abs_path: impl AsRef<Path>,
3216 visible: bool,
3217 cx: &mut ModelContext<Self>,
3218 ) -> Task<Result<ModelHandle<Worktree>>> {
3219 let fs = self.fs.clone();
3220 let client = self.client.clone();
3221 let next_entry_id = self.next_entry_id.clone();
3222 let path: Arc<Path> = abs_path.as_ref().into();
3223 let task = self
3224 .loading_local_worktrees
3225 .entry(path.clone())
3226 .or_insert_with(|| {
3227 cx.spawn(|project, mut cx| {
3228 async move {
3229 let worktree = Worktree::local(
3230 client.clone(),
3231 path.clone(),
3232 visible,
3233 fs,
3234 next_entry_id,
3235 &mut cx,
3236 )
3237 .await;
3238 project.update(&mut cx, |project, _| {
3239 project.loading_local_worktrees.remove(&path);
3240 });
3241 let worktree = worktree?;
3242
3243 let (remote_project_id, is_shared) =
3244 project.update(&mut cx, |project, cx| {
3245 project.add_worktree(&worktree, cx);
3246 (project.remote_id(), project.is_shared())
3247 });
3248
3249 if let Some(project_id) = remote_project_id {
3250 if is_shared {
3251 worktree
3252 .update(&mut cx, |worktree, cx| {
3253 worktree.as_local_mut().unwrap().share(project_id, cx)
3254 })
3255 .await?;
3256 } else {
3257 worktree
3258 .update(&mut cx, |worktree, cx| {
3259 worktree.as_local_mut().unwrap().register(project_id, cx)
3260 })
3261 .await?;
3262 }
3263 }
3264
3265 Ok(worktree)
3266 }
3267 .map_err(|err| Arc::new(err))
3268 })
3269 .shared()
3270 })
3271 .clone();
3272 cx.foreground().spawn(async move {
3273 match task.await {
3274 Ok(worktree) => Ok(worktree),
3275 Err(err) => Err(anyhow!("{}", err)),
3276 }
3277 })
3278 }
3279
3280 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3281 self.worktrees.retain(|worktree| {
3282 worktree
3283 .upgrade(cx)
3284 .map_or(false, |w| w.read(cx).id() != id)
3285 });
3286 cx.notify();
3287 }
3288
3289 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3290 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3291 if worktree.read(cx).is_local() {
3292 cx.subscribe(&worktree, |this, worktree, _, cx| {
3293 this.update_local_worktree_buffers(worktree, cx);
3294 })
3295 .detach();
3296 }
3297
3298 let push_strong_handle = {
3299 let worktree = worktree.read(cx);
3300 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3301 };
3302 if push_strong_handle {
3303 self.worktrees
3304 .push(WorktreeHandle::Strong(worktree.clone()));
3305 } else {
3306 cx.observe_release(&worktree, |this, _, cx| {
3307 this.worktrees
3308 .retain(|worktree| worktree.upgrade(cx).is_some());
3309 cx.notify();
3310 })
3311 .detach();
3312 self.worktrees
3313 .push(WorktreeHandle::Weak(worktree.downgrade()));
3314 }
3315 cx.notify();
3316 }
3317
3318 fn update_local_worktree_buffers(
3319 &mut self,
3320 worktree_handle: ModelHandle<Worktree>,
3321 cx: &mut ModelContext<Self>,
3322 ) {
3323 let snapshot = worktree_handle.read(cx).snapshot();
3324 let mut buffers_to_delete = Vec::new();
3325 for (buffer_id, buffer) in &self.opened_buffers {
3326 if let Some(buffer) = buffer.upgrade(cx) {
3327 buffer.update(cx, |buffer, cx| {
3328 if let Some(old_file) = File::from_dyn(buffer.file()) {
3329 if old_file.worktree != worktree_handle {
3330 return;
3331 }
3332
3333 let new_file = if let Some(entry) = old_file
3334 .entry_id
3335 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3336 {
3337 File {
3338 is_local: true,
3339 entry_id: Some(entry.id),
3340 mtime: entry.mtime,
3341 path: entry.path.clone(),
3342 worktree: worktree_handle.clone(),
3343 }
3344 } else if let Some(entry) =
3345 snapshot.entry_for_path(old_file.path().as_ref())
3346 {
3347 File {
3348 is_local: true,
3349 entry_id: Some(entry.id),
3350 mtime: entry.mtime,
3351 path: entry.path.clone(),
3352 worktree: worktree_handle.clone(),
3353 }
3354 } else {
3355 File {
3356 is_local: true,
3357 entry_id: None,
3358 path: old_file.path().clone(),
3359 mtime: old_file.mtime(),
3360 worktree: worktree_handle.clone(),
3361 }
3362 };
3363
3364 if let Some(project_id) = self.remote_id() {
3365 self.client
3366 .send(proto::UpdateBufferFile {
3367 project_id,
3368 buffer_id: *buffer_id as u64,
3369 file: Some(new_file.to_proto()),
3370 })
3371 .log_err();
3372 }
3373 buffer.file_updated(Box::new(new_file), cx).detach();
3374 }
3375 });
3376 } else {
3377 buffers_to_delete.push(*buffer_id);
3378 }
3379 }
3380
3381 for buffer_id in buffers_to_delete {
3382 self.opened_buffers.remove(&buffer_id);
3383 }
3384 }
3385
3386 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3387 let new_active_entry = entry.and_then(|project_path| {
3388 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3389 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3390 Some(entry.id)
3391 });
3392 if new_active_entry != self.active_entry {
3393 self.active_entry = new_active_entry;
3394 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3395 }
3396 }
3397
3398 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3399 self.language_servers_with_diagnostics_running > 0
3400 }
3401
3402 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3403 let mut summary = DiagnosticSummary::default();
3404 for (_, path_summary) in self.diagnostic_summaries(cx) {
3405 summary.error_count += path_summary.error_count;
3406 summary.warning_count += path_summary.warning_count;
3407 summary.info_count += path_summary.info_count;
3408 summary.hint_count += path_summary.hint_count;
3409 }
3410 summary
3411 }
3412
3413 pub fn diagnostic_summaries<'a>(
3414 &'a self,
3415 cx: &'a AppContext,
3416 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3417 self.worktrees(cx).flat_map(move |worktree| {
3418 let worktree = worktree.read(cx);
3419 let worktree_id = worktree.id();
3420 worktree
3421 .diagnostic_summaries()
3422 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3423 })
3424 }
3425
3426 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3427 self.language_servers_with_diagnostics_running += 1;
3428 if self.language_servers_with_diagnostics_running == 1 {
3429 cx.emit(Event::DiskBasedDiagnosticsStarted);
3430 }
3431 }
3432
3433 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3434 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3435 self.language_servers_with_diagnostics_running -= 1;
3436 if self.language_servers_with_diagnostics_running == 0 {
3437 cx.emit(Event::DiskBasedDiagnosticsFinished);
3438 }
3439 }
3440
3441 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3442 self.active_entry
3443 }
3444
3445 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3446 self.worktree_for_id(path.worktree_id, cx)?
3447 .read(cx)
3448 .entry_for_path(&path.path)
3449 .map(|entry| entry.id)
3450 }
3451
3452 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3453 let worktree = self.worktree_for_entry(entry_id, cx)?;
3454 let worktree = worktree.read(cx);
3455 let worktree_id = worktree.id();
3456 let path = worktree.entry_for_id(entry_id)?.path.clone();
3457 Some(ProjectPath { worktree_id, path })
3458 }
3459
3460 // RPC message handlers
3461
3462 async fn handle_unshare_project(
3463 this: ModelHandle<Self>,
3464 _: TypedEnvelope<proto::UnshareProject>,
3465 _: Arc<Client>,
3466 mut cx: AsyncAppContext,
3467 ) -> Result<()> {
3468 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3469 Ok(())
3470 }
3471
3472 async fn handle_add_collaborator(
3473 this: ModelHandle<Self>,
3474 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3475 _: Arc<Client>,
3476 mut cx: AsyncAppContext,
3477 ) -> Result<()> {
3478 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3479 let collaborator = envelope
3480 .payload
3481 .collaborator
3482 .take()
3483 .ok_or_else(|| anyhow!("empty collaborator"))?;
3484
3485 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3486 this.update(&mut cx, |this, cx| {
3487 this.collaborators
3488 .insert(collaborator.peer_id, collaborator);
3489 cx.notify();
3490 });
3491
3492 Ok(())
3493 }
3494
3495 async fn handle_remove_collaborator(
3496 this: ModelHandle<Self>,
3497 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3498 _: Arc<Client>,
3499 mut cx: AsyncAppContext,
3500 ) -> Result<()> {
3501 this.update(&mut cx, |this, cx| {
3502 let peer_id = PeerId(envelope.payload.peer_id);
3503 let replica_id = this
3504 .collaborators
3505 .remove(&peer_id)
3506 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3507 .replica_id;
3508 for (_, buffer) in &this.opened_buffers {
3509 if let Some(buffer) = buffer.upgrade(cx) {
3510 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3511 }
3512 }
3513 cx.emit(Event::CollaboratorLeft(peer_id));
3514 cx.notify();
3515 Ok(())
3516 })
3517 }
3518
3519 async fn handle_register_worktree(
3520 this: ModelHandle<Self>,
3521 envelope: TypedEnvelope<proto::RegisterWorktree>,
3522 client: Arc<Client>,
3523 mut cx: AsyncAppContext,
3524 ) -> Result<()> {
3525 this.update(&mut cx, |this, cx| {
3526 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3527 let replica_id = this.replica_id();
3528 let worktree = proto::Worktree {
3529 id: envelope.payload.worktree_id,
3530 root_name: envelope.payload.root_name,
3531 entries: Default::default(),
3532 diagnostic_summaries: Default::default(),
3533 visible: envelope.payload.visible,
3534 };
3535 let (worktree, load_task) =
3536 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3537 this.add_worktree(&worktree, cx);
3538 load_task.detach();
3539 Ok(())
3540 })
3541 }
3542
3543 async fn handle_unregister_worktree(
3544 this: ModelHandle<Self>,
3545 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3546 _: Arc<Client>,
3547 mut cx: AsyncAppContext,
3548 ) -> Result<()> {
3549 this.update(&mut cx, |this, cx| {
3550 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3551 this.remove_worktree(worktree_id, cx);
3552 Ok(())
3553 })
3554 }
3555
3556 async fn handle_update_worktree(
3557 this: ModelHandle<Self>,
3558 envelope: TypedEnvelope<proto::UpdateWorktree>,
3559 _: Arc<Client>,
3560 mut cx: AsyncAppContext,
3561 ) -> Result<()> {
3562 this.update(&mut cx, |this, cx| {
3563 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3564 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3565 worktree.update(cx, |worktree, _| {
3566 let worktree = worktree.as_remote_mut().unwrap();
3567 worktree.update_from_remote(envelope)
3568 })?;
3569 }
3570 Ok(())
3571 })
3572 }
3573
3574 async fn handle_update_diagnostic_summary(
3575 this: ModelHandle<Self>,
3576 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3577 _: Arc<Client>,
3578 mut cx: AsyncAppContext,
3579 ) -> Result<()> {
3580 this.update(&mut cx, |this, cx| {
3581 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3582 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3583 if let Some(summary) = envelope.payload.summary {
3584 let project_path = ProjectPath {
3585 worktree_id,
3586 path: Path::new(&summary.path).into(),
3587 };
3588 worktree.update(cx, |worktree, _| {
3589 worktree
3590 .as_remote_mut()
3591 .unwrap()
3592 .update_diagnostic_summary(project_path.path.clone(), &summary);
3593 });
3594 cx.emit(Event::DiagnosticsUpdated(project_path));
3595 }
3596 }
3597 Ok(())
3598 })
3599 }
3600
3601 async fn handle_start_language_server(
3602 this: ModelHandle<Self>,
3603 envelope: TypedEnvelope<proto::StartLanguageServer>,
3604 _: Arc<Client>,
3605 mut cx: AsyncAppContext,
3606 ) -> Result<()> {
3607 let server = envelope
3608 .payload
3609 .server
3610 .ok_or_else(|| anyhow!("invalid server"))?;
3611 this.update(&mut cx, |this, cx| {
3612 this.language_server_statuses.insert(
3613 server.id as usize,
3614 LanguageServerStatus {
3615 name: server.name,
3616 pending_work: Default::default(),
3617 pending_diagnostic_updates: 0,
3618 },
3619 );
3620 cx.notify();
3621 });
3622 Ok(())
3623 }
3624
3625 async fn handle_update_language_server(
3626 this: ModelHandle<Self>,
3627 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3628 _: Arc<Client>,
3629 mut cx: AsyncAppContext,
3630 ) -> Result<()> {
3631 let language_server_id = envelope.payload.language_server_id as usize;
3632 match envelope
3633 .payload
3634 .variant
3635 .ok_or_else(|| anyhow!("invalid variant"))?
3636 {
3637 proto::update_language_server::Variant::WorkStart(payload) => {
3638 this.update(&mut cx, |this, cx| {
3639 this.on_lsp_work_start(language_server_id, payload.token, cx);
3640 })
3641 }
3642 proto::update_language_server::Variant::WorkProgress(payload) => {
3643 this.update(&mut cx, |this, cx| {
3644 this.on_lsp_work_progress(
3645 language_server_id,
3646 payload.token,
3647 LanguageServerProgress {
3648 message: payload.message,
3649 percentage: payload.percentage.map(|p| p as usize),
3650 last_update_at: Instant::now(),
3651 },
3652 cx,
3653 );
3654 })
3655 }
3656 proto::update_language_server::Variant::WorkEnd(payload) => {
3657 this.update(&mut cx, |this, cx| {
3658 this.on_lsp_work_end(language_server_id, payload.token, cx);
3659 })
3660 }
3661 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3662 this.update(&mut cx, |this, cx| {
3663 this.disk_based_diagnostics_started(cx);
3664 })
3665 }
3666 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3667 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3668 }
3669 }
3670
3671 Ok(())
3672 }
3673
3674 async fn handle_update_buffer(
3675 this: ModelHandle<Self>,
3676 envelope: TypedEnvelope<proto::UpdateBuffer>,
3677 _: Arc<Client>,
3678 mut cx: AsyncAppContext,
3679 ) -> Result<()> {
3680 this.update(&mut cx, |this, cx| {
3681 let payload = envelope.payload.clone();
3682 let buffer_id = payload.buffer_id;
3683 let ops = payload
3684 .operations
3685 .into_iter()
3686 .map(|op| language::proto::deserialize_operation(op))
3687 .collect::<Result<Vec<_>, _>>()?;
3688 match this.opened_buffers.entry(buffer_id) {
3689 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3690 OpenBuffer::Strong(buffer) => {
3691 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3692 }
3693 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3694 OpenBuffer::Weak(_) => {}
3695 },
3696 hash_map::Entry::Vacant(e) => {
3697 e.insert(OpenBuffer::Loading(ops));
3698 }
3699 }
3700 Ok(())
3701 })
3702 }
3703
3704 async fn handle_update_buffer_file(
3705 this: ModelHandle<Self>,
3706 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3707 _: Arc<Client>,
3708 mut cx: AsyncAppContext,
3709 ) -> Result<()> {
3710 this.update(&mut cx, |this, cx| {
3711 let payload = envelope.payload.clone();
3712 let buffer_id = payload.buffer_id;
3713 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3714 let worktree = this
3715 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3716 .ok_or_else(|| anyhow!("no such worktree"))?;
3717 let file = File::from_proto(file, worktree.clone(), cx)?;
3718 let buffer = this
3719 .opened_buffers
3720 .get_mut(&buffer_id)
3721 .and_then(|b| b.upgrade(cx))
3722 .ok_or_else(|| anyhow!("no such buffer"))?;
3723 buffer.update(cx, |buffer, cx| {
3724 buffer.file_updated(Box::new(file), cx).detach();
3725 });
3726 Ok(())
3727 })
3728 }
3729
3730 async fn handle_save_buffer(
3731 this: ModelHandle<Self>,
3732 envelope: TypedEnvelope<proto::SaveBuffer>,
3733 _: Arc<Client>,
3734 mut cx: AsyncAppContext,
3735 ) -> Result<proto::BufferSaved> {
3736 let buffer_id = envelope.payload.buffer_id;
3737 let requested_version = deserialize_version(envelope.payload.version);
3738
3739 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3740 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3741 let buffer = this
3742 .opened_buffers
3743 .get(&buffer_id)
3744 .map(|buffer| buffer.upgrade(cx).unwrap())
3745 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3746 Ok::<_, anyhow::Error>((project_id, buffer))
3747 })?;
3748 buffer
3749 .update(&mut cx, |buffer, _| {
3750 buffer.wait_for_version(requested_version)
3751 })
3752 .await;
3753
3754 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3755 Ok(proto::BufferSaved {
3756 project_id,
3757 buffer_id,
3758 version: serialize_version(&saved_version),
3759 mtime: Some(mtime.into()),
3760 })
3761 }
3762
3763 async fn handle_format_buffers(
3764 this: ModelHandle<Self>,
3765 envelope: TypedEnvelope<proto::FormatBuffers>,
3766 _: Arc<Client>,
3767 mut cx: AsyncAppContext,
3768 ) -> Result<proto::FormatBuffersResponse> {
3769 let sender_id = envelope.original_sender_id()?;
3770 let format = this.update(&mut cx, |this, cx| {
3771 let mut buffers = HashSet::default();
3772 for buffer_id in &envelope.payload.buffer_ids {
3773 buffers.insert(
3774 this.opened_buffers
3775 .get(buffer_id)
3776 .map(|buffer| buffer.upgrade(cx).unwrap())
3777 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3778 );
3779 }
3780 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3781 })?;
3782
3783 let project_transaction = format.await?;
3784 let project_transaction = this.update(&mut cx, |this, cx| {
3785 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3786 });
3787 Ok(proto::FormatBuffersResponse {
3788 transaction: Some(project_transaction),
3789 })
3790 }
3791
3792 async fn handle_get_completions(
3793 this: ModelHandle<Self>,
3794 envelope: TypedEnvelope<proto::GetCompletions>,
3795 _: Arc<Client>,
3796 mut cx: AsyncAppContext,
3797 ) -> Result<proto::GetCompletionsResponse> {
3798 let position = envelope
3799 .payload
3800 .position
3801 .and_then(language::proto::deserialize_anchor)
3802 .ok_or_else(|| anyhow!("invalid position"))?;
3803 let version = deserialize_version(envelope.payload.version);
3804 let buffer = this.read_with(&cx, |this, cx| {
3805 this.opened_buffers
3806 .get(&envelope.payload.buffer_id)
3807 .map(|buffer| buffer.upgrade(cx).unwrap())
3808 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3809 })?;
3810 buffer
3811 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3812 .await;
3813 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3814 let completions = this
3815 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3816 .await?;
3817
3818 Ok(proto::GetCompletionsResponse {
3819 completions: completions
3820 .iter()
3821 .map(language::proto::serialize_completion)
3822 .collect(),
3823 version: serialize_version(&version),
3824 })
3825 }
3826
3827 async fn handle_apply_additional_edits_for_completion(
3828 this: ModelHandle<Self>,
3829 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3830 _: Arc<Client>,
3831 mut cx: AsyncAppContext,
3832 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3833 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3834 let buffer = this
3835 .opened_buffers
3836 .get(&envelope.payload.buffer_id)
3837 .map(|buffer| buffer.upgrade(cx).unwrap())
3838 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3839 let language = buffer.read(cx).language();
3840 let completion = language::proto::deserialize_completion(
3841 envelope
3842 .payload
3843 .completion
3844 .ok_or_else(|| anyhow!("invalid completion"))?,
3845 language,
3846 )?;
3847 Ok::<_, anyhow::Error>(
3848 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3849 )
3850 })?;
3851
3852 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3853 transaction: apply_additional_edits
3854 .await?
3855 .as_ref()
3856 .map(language::proto::serialize_transaction),
3857 })
3858 }
3859
3860 async fn handle_get_code_actions(
3861 this: ModelHandle<Self>,
3862 envelope: TypedEnvelope<proto::GetCodeActions>,
3863 _: Arc<Client>,
3864 mut cx: AsyncAppContext,
3865 ) -> Result<proto::GetCodeActionsResponse> {
3866 let start = envelope
3867 .payload
3868 .start
3869 .and_then(language::proto::deserialize_anchor)
3870 .ok_or_else(|| anyhow!("invalid start"))?;
3871 let end = envelope
3872 .payload
3873 .end
3874 .and_then(language::proto::deserialize_anchor)
3875 .ok_or_else(|| anyhow!("invalid end"))?;
3876 let buffer = this.update(&mut cx, |this, cx| {
3877 this.opened_buffers
3878 .get(&envelope.payload.buffer_id)
3879 .map(|buffer| buffer.upgrade(cx).unwrap())
3880 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3881 })?;
3882 buffer
3883 .update(&mut cx, |buffer, _| {
3884 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3885 })
3886 .await;
3887
3888 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3889 let code_actions = this.update(&mut cx, |this, cx| {
3890 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3891 })?;
3892
3893 Ok(proto::GetCodeActionsResponse {
3894 actions: code_actions
3895 .await?
3896 .iter()
3897 .map(language::proto::serialize_code_action)
3898 .collect(),
3899 version: serialize_version(&version),
3900 })
3901 }
3902
3903 async fn handle_apply_code_action(
3904 this: ModelHandle<Self>,
3905 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3906 _: Arc<Client>,
3907 mut cx: AsyncAppContext,
3908 ) -> Result<proto::ApplyCodeActionResponse> {
3909 let sender_id = envelope.original_sender_id()?;
3910 let action = language::proto::deserialize_code_action(
3911 envelope
3912 .payload
3913 .action
3914 .ok_or_else(|| anyhow!("invalid action"))?,
3915 )?;
3916 let apply_code_action = this.update(&mut cx, |this, cx| {
3917 let buffer = this
3918 .opened_buffers
3919 .get(&envelope.payload.buffer_id)
3920 .map(|buffer| buffer.upgrade(cx).unwrap())
3921 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3922 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3923 })?;
3924
3925 let project_transaction = apply_code_action.await?;
3926 let project_transaction = this.update(&mut cx, |this, cx| {
3927 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3928 });
3929 Ok(proto::ApplyCodeActionResponse {
3930 transaction: Some(project_transaction),
3931 })
3932 }
3933
3934 async fn handle_lsp_command<T: LspCommand>(
3935 this: ModelHandle<Self>,
3936 envelope: TypedEnvelope<T::ProtoRequest>,
3937 _: Arc<Client>,
3938 mut cx: AsyncAppContext,
3939 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3940 where
3941 <T::LspRequest as lsp::request::Request>::Result: Send,
3942 {
3943 let sender_id = envelope.original_sender_id()?;
3944 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3945 let buffer_handle = this.read_with(&cx, |this, _| {
3946 this.opened_buffers
3947 .get(&buffer_id)
3948 .and_then(|buffer| buffer.upgrade(&cx))
3949 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3950 })?;
3951 let request = T::from_proto(
3952 envelope.payload,
3953 this.clone(),
3954 buffer_handle.clone(),
3955 cx.clone(),
3956 )
3957 .await?;
3958 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3959 let response = this
3960 .update(&mut cx, |this, cx| {
3961 this.request_lsp(buffer_handle, request, cx)
3962 })
3963 .await?;
3964 this.update(&mut cx, |this, cx| {
3965 Ok(T::response_to_proto(
3966 response,
3967 this,
3968 sender_id,
3969 &buffer_version,
3970 cx,
3971 ))
3972 })
3973 }
3974
3975 async fn handle_get_project_symbols(
3976 this: ModelHandle<Self>,
3977 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3978 _: Arc<Client>,
3979 mut cx: AsyncAppContext,
3980 ) -> Result<proto::GetProjectSymbolsResponse> {
3981 let symbols = this
3982 .update(&mut cx, |this, cx| {
3983 this.symbols(&envelope.payload.query, cx)
3984 })
3985 .await?;
3986
3987 Ok(proto::GetProjectSymbolsResponse {
3988 symbols: symbols.iter().map(serialize_symbol).collect(),
3989 })
3990 }
3991
3992 async fn handle_search_project(
3993 this: ModelHandle<Self>,
3994 envelope: TypedEnvelope<proto::SearchProject>,
3995 _: Arc<Client>,
3996 mut cx: AsyncAppContext,
3997 ) -> Result<proto::SearchProjectResponse> {
3998 let peer_id = envelope.original_sender_id()?;
3999 let query = SearchQuery::from_proto(envelope.payload)?;
4000 let result = this
4001 .update(&mut cx, |this, cx| this.search(query, cx))
4002 .await?;
4003
4004 this.update(&mut cx, |this, cx| {
4005 let mut locations = Vec::new();
4006 for (buffer, ranges) in result {
4007 for range in ranges {
4008 let start = serialize_anchor(&range.start);
4009 let end = serialize_anchor(&range.end);
4010 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4011 locations.push(proto::Location {
4012 buffer: Some(buffer),
4013 start: Some(start),
4014 end: Some(end),
4015 });
4016 }
4017 }
4018 Ok(proto::SearchProjectResponse { locations })
4019 })
4020 }
4021
4022 async fn handle_open_buffer_for_symbol(
4023 this: ModelHandle<Self>,
4024 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4025 _: Arc<Client>,
4026 mut cx: AsyncAppContext,
4027 ) -> Result<proto::OpenBufferForSymbolResponse> {
4028 let peer_id = envelope.original_sender_id()?;
4029 let symbol = envelope
4030 .payload
4031 .symbol
4032 .ok_or_else(|| anyhow!("invalid symbol"))?;
4033 let symbol = this.read_with(&cx, |this, _| {
4034 let symbol = this.deserialize_symbol(symbol)?;
4035 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4036 if signature == symbol.signature {
4037 Ok(symbol)
4038 } else {
4039 Err(anyhow!("invalid symbol signature"))
4040 }
4041 })?;
4042 let buffer = this
4043 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4044 .await?;
4045
4046 Ok(proto::OpenBufferForSymbolResponse {
4047 buffer: Some(this.update(&mut cx, |this, cx| {
4048 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4049 })),
4050 })
4051 }
4052
4053 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4054 let mut hasher = Sha256::new();
4055 hasher.update(worktree_id.to_proto().to_be_bytes());
4056 hasher.update(path.to_string_lossy().as_bytes());
4057 hasher.update(self.nonce.to_be_bytes());
4058 hasher.finalize().as_slice().try_into().unwrap()
4059 }
4060
4061 async fn handle_open_buffer_by_id(
4062 this: ModelHandle<Self>,
4063 envelope: TypedEnvelope<proto::OpenBufferById>,
4064 _: Arc<Client>,
4065 mut cx: AsyncAppContext,
4066 ) -> Result<proto::OpenBufferResponse> {
4067 let peer_id = envelope.original_sender_id()?;
4068 let buffer = this
4069 .update(&mut cx, |this, cx| {
4070 this.open_buffer_by_id(envelope.payload.id, cx)
4071 })
4072 .await?;
4073 this.update(&mut cx, |this, cx| {
4074 Ok(proto::OpenBufferResponse {
4075 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4076 })
4077 })
4078 }
4079
4080 async fn handle_open_buffer_by_path(
4081 this: ModelHandle<Self>,
4082 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4083 _: Arc<Client>,
4084 mut cx: AsyncAppContext,
4085 ) -> Result<proto::OpenBufferResponse> {
4086 let peer_id = envelope.original_sender_id()?;
4087 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4088 let open_buffer = this.update(&mut cx, |this, cx| {
4089 this.open_buffer(
4090 ProjectPath {
4091 worktree_id,
4092 path: PathBuf::from(envelope.payload.path).into(),
4093 },
4094 cx,
4095 )
4096 });
4097
4098 let buffer = open_buffer.await?;
4099 this.update(&mut cx, |this, cx| {
4100 Ok(proto::OpenBufferResponse {
4101 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4102 })
4103 })
4104 }
4105
4106 fn serialize_project_transaction_for_peer(
4107 &mut self,
4108 project_transaction: ProjectTransaction,
4109 peer_id: PeerId,
4110 cx: &AppContext,
4111 ) -> proto::ProjectTransaction {
4112 let mut serialized_transaction = proto::ProjectTransaction {
4113 buffers: Default::default(),
4114 transactions: Default::default(),
4115 };
4116 for (buffer, transaction) in project_transaction.0 {
4117 serialized_transaction
4118 .buffers
4119 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4120 serialized_transaction
4121 .transactions
4122 .push(language::proto::serialize_transaction(&transaction));
4123 }
4124 serialized_transaction
4125 }
4126
4127 fn deserialize_project_transaction(
4128 &mut self,
4129 message: proto::ProjectTransaction,
4130 push_to_history: bool,
4131 cx: &mut ModelContext<Self>,
4132 ) -> Task<Result<ProjectTransaction>> {
4133 cx.spawn(|this, mut cx| async move {
4134 let mut project_transaction = ProjectTransaction::default();
4135 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4136 let buffer = this
4137 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4138 .await?;
4139 let transaction = language::proto::deserialize_transaction(transaction)?;
4140 project_transaction.0.insert(buffer, transaction);
4141 }
4142
4143 for (buffer, transaction) in &project_transaction.0 {
4144 buffer
4145 .update(&mut cx, |buffer, _| {
4146 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4147 })
4148 .await;
4149
4150 if push_to_history {
4151 buffer.update(&mut cx, |buffer, _| {
4152 buffer.push_transaction(transaction.clone(), Instant::now());
4153 });
4154 }
4155 }
4156
4157 Ok(project_transaction)
4158 })
4159 }
4160
4161 fn serialize_buffer_for_peer(
4162 &mut self,
4163 buffer: &ModelHandle<Buffer>,
4164 peer_id: PeerId,
4165 cx: &AppContext,
4166 ) -> proto::Buffer {
4167 let buffer_id = buffer.read(cx).remote_id();
4168 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4169 if shared_buffers.insert(buffer_id) {
4170 proto::Buffer {
4171 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4172 }
4173 } else {
4174 proto::Buffer {
4175 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4176 }
4177 }
4178 }
4179
4180 fn deserialize_buffer(
4181 &mut self,
4182 buffer: proto::Buffer,
4183 cx: &mut ModelContext<Self>,
4184 ) -> Task<Result<ModelHandle<Buffer>>> {
4185 let replica_id = self.replica_id();
4186
4187 let opened_buffer_tx = self.opened_buffer.0.clone();
4188 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4189 cx.spawn(|this, mut cx| async move {
4190 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4191 proto::buffer::Variant::Id(id) => {
4192 let buffer = loop {
4193 let buffer = this.read_with(&cx, |this, cx| {
4194 this.opened_buffers
4195 .get(&id)
4196 .and_then(|buffer| buffer.upgrade(cx))
4197 });
4198 if let Some(buffer) = buffer {
4199 break buffer;
4200 }
4201 opened_buffer_rx
4202 .next()
4203 .await
4204 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4205 };
4206 Ok(buffer)
4207 }
4208 proto::buffer::Variant::State(mut buffer) => {
4209 let mut buffer_worktree = None;
4210 let mut buffer_file = None;
4211 if let Some(file) = buffer.file.take() {
4212 this.read_with(&cx, |this, cx| {
4213 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4214 let worktree =
4215 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4216 anyhow!("no worktree found for id {}", file.worktree_id)
4217 })?;
4218 buffer_file =
4219 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4220 as Box<dyn language::File>);
4221 buffer_worktree = Some(worktree);
4222 Ok::<_, anyhow::Error>(())
4223 })?;
4224 }
4225
4226 let buffer = cx.add_model(|cx| {
4227 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4228 });
4229
4230 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4231
4232 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4233 Ok(buffer)
4234 }
4235 }
4236 })
4237 }
4238
4239 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4240 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4241 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4242 let start = serialized_symbol
4243 .start
4244 .ok_or_else(|| anyhow!("invalid start"))?;
4245 let end = serialized_symbol
4246 .end
4247 .ok_or_else(|| anyhow!("invalid end"))?;
4248 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4249 let path = PathBuf::from(serialized_symbol.path);
4250 let language = self.languages.select_language(&path);
4251 Ok(Symbol {
4252 source_worktree_id,
4253 worktree_id,
4254 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4255 label: language
4256 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4257 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4258 name: serialized_symbol.name,
4259 path,
4260 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4261 kind,
4262 signature: serialized_symbol
4263 .signature
4264 .try_into()
4265 .map_err(|_| anyhow!("invalid signature"))?,
4266 })
4267 }
4268
4269 async fn handle_buffer_saved(
4270 this: ModelHandle<Self>,
4271 envelope: TypedEnvelope<proto::BufferSaved>,
4272 _: Arc<Client>,
4273 mut cx: AsyncAppContext,
4274 ) -> Result<()> {
4275 let version = deserialize_version(envelope.payload.version);
4276 let mtime = envelope
4277 .payload
4278 .mtime
4279 .ok_or_else(|| anyhow!("missing mtime"))?
4280 .into();
4281
4282 this.update(&mut cx, |this, cx| {
4283 let buffer = this
4284 .opened_buffers
4285 .get(&envelope.payload.buffer_id)
4286 .and_then(|buffer| buffer.upgrade(cx));
4287 if let Some(buffer) = buffer {
4288 buffer.update(cx, |buffer, cx| {
4289 buffer.did_save(version, mtime, None, cx);
4290 });
4291 }
4292 Ok(())
4293 })
4294 }
4295
4296 async fn handle_buffer_reloaded(
4297 this: ModelHandle<Self>,
4298 envelope: TypedEnvelope<proto::BufferReloaded>,
4299 _: Arc<Client>,
4300 mut cx: AsyncAppContext,
4301 ) -> Result<()> {
4302 let payload = envelope.payload.clone();
4303 let version = deserialize_version(payload.version);
4304 let mtime = payload
4305 .mtime
4306 .ok_or_else(|| anyhow!("missing mtime"))?
4307 .into();
4308 this.update(&mut cx, |this, cx| {
4309 let buffer = this
4310 .opened_buffers
4311 .get(&payload.buffer_id)
4312 .and_then(|buffer| buffer.upgrade(cx));
4313 if let Some(buffer) = buffer {
4314 buffer.update(cx, |buffer, cx| {
4315 buffer.did_reload(version, mtime, cx);
4316 });
4317 }
4318 Ok(())
4319 })
4320 }
4321
4322 pub fn match_paths<'a>(
4323 &self,
4324 query: &'a str,
4325 include_ignored: bool,
4326 smart_case: bool,
4327 max_results: usize,
4328 cancel_flag: &'a AtomicBool,
4329 cx: &AppContext,
4330 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4331 let worktrees = self
4332 .worktrees(cx)
4333 .filter(|worktree| worktree.read(cx).is_visible())
4334 .collect::<Vec<_>>();
4335 let include_root_name = worktrees.len() > 1;
4336 let candidate_sets = worktrees
4337 .into_iter()
4338 .map(|worktree| CandidateSet {
4339 snapshot: worktree.read(cx).snapshot(),
4340 include_ignored,
4341 include_root_name,
4342 })
4343 .collect::<Vec<_>>();
4344
4345 let background = cx.background().clone();
4346 async move {
4347 fuzzy::match_paths(
4348 candidate_sets.as_slice(),
4349 query,
4350 smart_case,
4351 max_results,
4352 cancel_flag,
4353 background,
4354 )
4355 .await
4356 }
4357 }
4358
4359 fn edits_from_lsp(
4360 &mut self,
4361 buffer: &ModelHandle<Buffer>,
4362 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4363 version: Option<i32>,
4364 cx: &mut ModelContext<Self>,
4365 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4366 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4367 cx.background().spawn(async move {
4368 let snapshot = snapshot?;
4369 let mut lsp_edits = lsp_edits
4370 .into_iter()
4371 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4372 .peekable();
4373
4374 let mut edits = Vec::new();
4375 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4376 // Combine any LSP edits that are adjacent.
4377 //
4378 // Also, combine LSP edits that are separated from each other by only
4379 // a newline. This is important because for some code actions,
4380 // Rust-analyzer rewrites the entire buffer via a series of edits that
4381 // are separated by unchanged newline characters.
4382 //
4383 // In order for the diffing logic below to work properly, any edits that
4384 // cancel each other out must be combined into one.
4385 while let Some((next_range, next_text)) = lsp_edits.peek() {
4386 if next_range.start > range.end {
4387 if next_range.start.row > range.end.row + 1
4388 || next_range.start.column > 0
4389 || snapshot.clip_point_utf16(
4390 PointUtf16::new(range.end.row, u32::MAX),
4391 Bias::Left,
4392 ) > range.end
4393 {
4394 break;
4395 }
4396 new_text.push('\n');
4397 }
4398 range.end = next_range.end;
4399 new_text.push_str(&next_text);
4400 lsp_edits.next();
4401 }
4402
4403 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4404 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4405 {
4406 return Err(anyhow!("invalid edits received from language server"));
4407 }
4408
4409 // For multiline edits, perform a diff of the old and new text so that
4410 // we can identify the changes more precisely, preserving the locations
4411 // of any anchors positioned in the unchanged regions.
4412 if range.end.row > range.start.row {
4413 let mut offset = range.start.to_offset(&snapshot);
4414 let old_text = snapshot.text_for_range(range).collect::<String>();
4415
4416 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4417 let mut moved_since_edit = true;
4418 for change in diff.iter_all_changes() {
4419 let tag = change.tag();
4420 let value = change.value();
4421 match tag {
4422 ChangeTag::Equal => {
4423 offset += value.len();
4424 moved_since_edit = true;
4425 }
4426 ChangeTag::Delete => {
4427 let start = snapshot.anchor_after(offset);
4428 let end = snapshot.anchor_before(offset + value.len());
4429 if moved_since_edit {
4430 edits.push((start..end, String::new()));
4431 } else {
4432 edits.last_mut().unwrap().0.end = end;
4433 }
4434 offset += value.len();
4435 moved_since_edit = false;
4436 }
4437 ChangeTag::Insert => {
4438 if moved_since_edit {
4439 let anchor = snapshot.anchor_after(offset);
4440 edits.push((anchor.clone()..anchor, value.to_string()));
4441 } else {
4442 edits.last_mut().unwrap().1.push_str(value);
4443 }
4444 moved_since_edit = false;
4445 }
4446 }
4447 }
4448 } else if range.end == range.start {
4449 let anchor = snapshot.anchor_after(range.start);
4450 edits.push((anchor.clone()..anchor, new_text));
4451 } else {
4452 let edit_start = snapshot.anchor_after(range.start);
4453 let edit_end = snapshot.anchor_before(range.end);
4454 edits.push((edit_start..edit_end, new_text));
4455 }
4456 }
4457
4458 Ok(edits)
4459 })
4460 }
4461
4462 fn buffer_snapshot_for_lsp_version(
4463 &mut self,
4464 buffer: &ModelHandle<Buffer>,
4465 version: Option<i32>,
4466 cx: &AppContext,
4467 ) -> Result<TextBufferSnapshot> {
4468 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4469
4470 if let Some(version) = version {
4471 let buffer_id = buffer.read(cx).remote_id();
4472 let snapshots = self
4473 .buffer_snapshots
4474 .get_mut(&buffer_id)
4475 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4476 let mut found_snapshot = None;
4477 snapshots.retain(|(snapshot_version, snapshot)| {
4478 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4479 false
4480 } else {
4481 if *snapshot_version == version {
4482 found_snapshot = Some(snapshot.clone());
4483 }
4484 true
4485 }
4486 });
4487
4488 found_snapshot.ok_or_else(|| {
4489 anyhow!(
4490 "snapshot not found for buffer {} at version {}",
4491 buffer_id,
4492 version
4493 )
4494 })
4495 } else {
4496 Ok((buffer.read(cx)).text_snapshot())
4497 }
4498 }
4499
4500 fn language_server_for_buffer(
4501 &self,
4502 buffer: &Buffer,
4503 cx: &AppContext,
4504 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4505 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4506 let worktree_id = file.worktree_id(cx);
4507 self.language_servers
4508 .get(&(worktree_id, language.lsp_adapter()?.name()))
4509 } else {
4510 None
4511 }
4512 }
4513}
4514
4515impl WorktreeHandle {
4516 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4517 match self {
4518 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4519 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4520 }
4521 }
4522}
4523
4524impl OpenBuffer {
4525 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4526 match self {
4527 OpenBuffer::Strong(handle) => Some(handle.clone()),
4528 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4529 OpenBuffer::Loading(_) => None,
4530 }
4531 }
4532}
4533
4534struct CandidateSet {
4535 snapshot: Snapshot,
4536 include_ignored: bool,
4537 include_root_name: bool,
4538}
4539
4540impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4541 type Candidates = CandidateSetIter<'a>;
4542
4543 fn id(&self) -> usize {
4544 self.snapshot.id().to_usize()
4545 }
4546
4547 fn len(&self) -> usize {
4548 if self.include_ignored {
4549 self.snapshot.file_count()
4550 } else {
4551 self.snapshot.visible_file_count()
4552 }
4553 }
4554
4555 fn prefix(&self) -> Arc<str> {
4556 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4557 self.snapshot.root_name().into()
4558 } else if self.include_root_name {
4559 format!("{}/", self.snapshot.root_name()).into()
4560 } else {
4561 "".into()
4562 }
4563 }
4564
4565 fn candidates(&'a self, start: usize) -> Self::Candidates {
4566 CandidateSetIter {
4567 traversal: self.snapshot.files(self.include_ignored, start),
4568 }
4569 }
4570}
4571
4572struct CandidateSetIter<'a> {
4573 traversal: Traversal<'a>,
4574}
4575
4576impl<'a> Iterator for CandidateSetIter<'a> {
4577 type Item = PathMatchCandidate<'a>;
4578
4579 fn next(&mut self) -> Option<Self::Item> {
4580 self.traversal.next().map(|entry| {
4581 if let EntryKind::File(char_bag) = entry.kind {
4582 PathMatchCandidate {
4583 path: &entry.path,
4584 char_bag,
4585 }
4586 } else {
4587 unreachable!()
4588 }
4589 })
4590 }
4591}
4592
4593impl Entity for Project {
4594 type Event = Event;
4595
4596 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4597 match &self.client_state {
4598 ProjectClientState::Local { remote_id_rx, .. } => {
4599 if let Some(project_id) = *remote_id_rx.borrow() {
4600 self.client
4601 .send(proto::UnregisterProject { project_id })
4602 .log_err();
4603 }
4604 }
4605 ProjectClientState::Remote { remote_id, .. } => {
4606 self.client
4607 .send(proto::LeaveProject {
4608 project_id: *remote_id,
4609 })
4610 .log_err();
4611 }
4612 }
4613 }
4614
4615 fn app_will_quit(
4616 &mut self,
4617 _: &mut MutableAppContext,
4618 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4619 let shutdown_futures = self
4620 .language_servers
4621 .drain()
4622 .filter_map(|(_, (_, server))| server.shutdown())
4623 .collect::<Vec<_>>();
4624 Some(
4625 async move {
4626 futures::future::join_all(shutdown_futures).await;
4627 }
4628 .boxed(),
4629 )
4630 }
4631}
4632
4633impl Collaborator {
4634 fn from_proto(
4635 message: proto::Collaborator,
4636 user_store: &ModelHandle<UserStore>,
4637 cx: &mut AsyncAppContext,
4638 ) -> impl Future<Output = Result<Self>> {
4639 let user = user_store.update(cx, |user_store, cx| {
4640 user_store.fetch_user(message.user_id, cx)
4641 });
4642
4643 async move {
4644 Ok(Self {
4645 peer_id: PeerId(message.peer_id),
4646 user: user.await?,
4647 replica_id: message.replica_id as ReplicaId,
4648 })
4649 }
4650 }
4651}
4652
4653impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4654 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4655 Self {
4656 worktree_id,
4657 path: path.as_ref().into(),
4658 }
4659 }
4660}
4661
4662impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4663 fn from(options: lsp::CreateFileOptions) -> Self {
4664 Self {
4665 overwrite: options.overwrite.unwrap_or(false),
4666 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4667 }
4668 }
4669}
4670
4671impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4672 fn from(options: lsp::RenameFileOptions) -> Self {
4673 Self {
4674 overwrite: options.overwrite.unwrap_or(false),
4675 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4676 }
4677 }
4678}
4679
4680impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4681 fn from(options: lsp::DeleteFileOptions) -> Self {
4682 Self {
4683 recursive: options.recursive.unwrap_or(false),
4684 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4685 }
4686 }
4687}
4688
4689fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4690 proto::Symbol {
4691 source_worktree_id: symbol.source_worktree_id.to_proto(),
4692 worktree_id: symbol.worktree_id.to_proto(),
4693 language_server_name: symbol.language_server_name.0.to_string(),
4694 name: symbol.name.clone(),
4695 kind: unsafe { mem::transmute(symbol.kind) },
4696 path: symbol.path.to_string_lossy().to_string(),
4697 start: Some(proto::Point {
4698 row: symbol.range.start.row,
4699 column: symbol.range.start.column,
4700 }),
4701 end: Some(proto::Point {
4702 row: symbol.range.end.row,
4703 column: symbol.range.end.column,
4704 }),
4705 signature: symbol.signature.to_vec(),
4706 }
4707}
4708
4709fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4710 let mut path_components = path.components();
4711 let mut base_components = base.components();
4712 let mut components: Vec<Component> = Vec::new();
4713 loop {
4714 match (path_components.next(), base_components.next()) {
4715 (None, None) => break,
4716 (Some(a), None) => {
4717 components.push(a);
4718 components.extend(path_components.by_ref());
4719 break;
4720 }
4721 (None, _) => components.push(Component::ParentDir),
4722 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4723 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4724 (Some(a), Some(_)) => {
4725 components.push(Component::ParentDir);
4726 for _ in base_components {
4727 components.push(Component::ParentDir);
4728 }
4729 components.push(a);
4730 components.extend(path_components.by_ref());
4731 break;
4732 }
4733 }
4734 }
4735 components.iter().map(|c| c.as_os_str()).collect()
4736}
4737
4738impl Item for Buffer {
4739 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4740 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4741 }
4742}
4743
4744#[cfg(test)]
4745mod tests {
4746 use super::{Event, *};
4747 use fs::RealFs;
4748 use futures::{future, StreamExt};
4749 use gpui::test::subscribe;
4750 use language::{
4751 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4752 ToPoint,
4753 };
4754 use lsp::Url;
4755 use serde_json::json;
4756 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4757 use unindent::Unindent as _;
4758 use util::{assert_set_eq, test::temp_tree};
4759 use worktree::WorktreeHandle as _;
4760
4761 #[gpui::test]
4762 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4763 let dir = temp_tree(json!({
4764 "root": {
4765 "apple": "",
4766 "banana": {
4767 "carrot": {
4768 "date": "",
4769 "endive": "",
4770 }
4771 },
4772 "fennel": {
4773 "grape": "",
4774 }
4775 }
4776 }));
4777
4778 let root_link_path = dir.path().join("root_link");
4779 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4780 unix::fs::symlink(
4781 &dir.path().join("root/fennel"),
4782 &dir.path().join("root/finnochio"),
4783 )
4784 .unwrap();
4785
4786 let project = Project::test(Arc::new(RealFs), cx);
4787
4788 let (tree, _) = project
4789 .update(cx, |project, cx| {
4790 project.find_or_create_local_worktree(&root_link_path, true, cx)
4791 })
4792 .await
4793 .unwrap();
4794
4795 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4796 .await;
4797 cx.read(|cx| {
4798 let tree = tree.read(cx);
4799 assert_eq!(tree.file_count(), 5);
4800 assert_eq!(
4801 tree.inode_for_path("fennel/grape"),
4802 tree.inode_for_path("finnochio/grape")
4803 );
4804 });
4805
4806 let cancel_flag = Default::default();
4807 let results = project
4808 .read_with(cx, |project, cx| {
4809 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4810 })
4811 .await;
4812 assert_eq!(
4813 results
4814 .into_iter()
4815 .map(|result| result.path)
4816 .collect::<Vec<Arc<Path>>>(),
4817 vec![
4818 PathBuf::from("banana/carrot/date").into(),
4819 PathBuf::from("banana/carrot/endive").into(),
4820 ]
4821 );
4822 }
4823
4824 #[gpui::test]
4825 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4826 cx.foreground().forbid_parking();
4827
4828 let mut rust_language = Language::new(
4829 LanguageConfig {
4830 name: "Rust".into(),
4831 path_suffixes: vec!["rs".to_string()],
4832 ..Default::default()
4833 },
4834 Some(tree_sitter_rust::language()),
4835 );
4836 let mut json_language = Language::new(
4837 LanguageConfig {
4838 name: "JSON".into(),
4839 path_suffixes: vec!["json".to_string()],
4840 ..Default::default()
4841 },
4842 None,
4843 );
4844 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4845 name: "the-rust-language-server",
4846 capabilities: lsp::ServerCapabilities {
4847 completion_provider: Some(lsp::CompletionOptions {
4848 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4849 ..Default::default()
4850 }),
4851 ..Default::default()
4852 },
4853 ..Default::default()
4854 });
4855 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4856 name: "the-json-language-server",
4857 capabilities: lsp::ServerCapabilities {
4858 completion_provider: Some(lsp::CompletionOptions {
4859 trigger_characters: Some(vec![":".to_string()]),
4860 ..Default::default()
4861 }),
4862 ..Default::default()
4863 },
4864 ..Default::default()
4865 });
4866
4867 let fs = FakeFs::new(cx.background());
4868 fs.insert_tree(
4869 "/the-root",
4870 json!({
4871 "test.rs": "const A: i32 = 1;",
4872 "test2.rs": "",
4873 "Cargo.toml": "a = 1",
4874 "package.json": "{\"a\": 1}",
4875 }),
4876 )
4877 .await;
4878
4879 let project = Project::test(fs, cx);
4880 project.update(cx, |project, _| {
4881 project.languages.add(Arc::new(rust_language));
4882 project.languages.add(Arc::new(json_language));
4883 });
4884
4885 let worktree_id = project
4886 .update(cx, |project, cx| {
4887 project.find_or_create_local_worktree("/the-root", true, cx)
4888 })
4889 .await
4890 .unwrap()
4891 .0
4892 .read_with(cx, |tree, _| tree.id());
4893
4894 // Open a buffer without an associated language server.
4895 let toml_buffer = project
4896 .update(cx, |project, cx| {
4897 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4898 })
4899 .await
4900 .unwrap();
4901
4902 // Open a buffer with an associated language server.
4903 let rust_buffer = project
4904 .update(cx, |project, cx| {
4905 project.open_buffer((worktree_id, "test.rs"), cx)
4906 })
4907 .await
4908 .unwrap();
4909
4910 // A server is started up, and it is notified about Rust files.
4911 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4912 assert_eq!(
4913 fake_rust_server
4914 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4915 .await
4916 .text_document,
4917 lsp::TextDocumentItem {
4918 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4919 version: 0,
4920 text: "const A: i32 = 1;".to_string(),
4921 language_id: Default::default()
4922 }
4923 );
4924
4925 // The buffer is configured based on the language server's capabilities.
4926 rust_buffer.read_with(cx, |buffer, _| {
4927 assert_eq!(
4928 buffer.completion_triggers(),
4929 &[".".to_string(), "::".to_string()]
4930 );
4931 });
4932 toml_buffer.read_with(cx, |buffer, _| {
4933 assert!(buffer.completion_triggers().is_empty());
4934 });
4935
4936 // Edit a buffer. The changes are reported to the language server.
4937 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4938 assert_eq!(
4939 fake_rust_server
4940 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4941 .await
4942 .text_document,
4943 lsp::VersionedTextDocumentIdentifier::new(
4944 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4945 1
4946 )
4947 );
4948
4949 // Open a third buffer with a different associated language server.
4950 let json_buffer = project
4951 .update(cx, |project, cx| {
4952 project.open_buffer((worktree_id, "package.json"), cx)
4953 })
4954 .await
4955 .unwrap();
4956
4957 // A json language server is started up and is only notified about the json buffer.
4958 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4959 assert_eq!(
4960 fake_json_server
4961 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4962 .await
4963 .text_document,
4964 lsp::TextDocumentItem {
4965 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4966 version: 0,
4967 text: "{\"a\": 1}".to_string(),
4968 language_id: Default::default()
4969 }
4970 );
4971
4972 // This buffer is configured based on the second language server's
4973 // capabilities.
4974 json_buffer.read_with(cx, |buffer, _| {
4975 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4976 });
4977
4978 // When opening another buffer whose language server is already running,
4979 // it is also configured based on the existing language server's capabilities.
4980 let rust_buffer2 = project
4981 .update(cx, |project, cx| {
4982 project.open_buffer((worktree_id, "test2.rs"), cx)
4983 })
4984 .await
4985 .unwrap();
4986 rust_buffer2.read_with(cx, |buffer, _| {
4987 assert_eq!(
4988 buffer.completion_triggers(),
4989 &[".".to_string(), "::".to_string()]
4990 );
4991 });
4992
4993 // Changes are reported only to servers matching the buffer's language.
4994 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4995 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4996 assert_eq!(
4997 fake_rust_server
4998 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4999 .await
5000 .text_document,
5001 lsp::VersionedTextDocumentIdentifier::new(
5002 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5003 1
5004 )
5005 );
5006
5007 // Save notifications are reported to all servers.
5008 toml_buffer
5009 .update(cx, |buffer, cx| buffer.save(cx))
5010 .await
5011 .unwrap();
5012 assert_eq!(
5013 fake_rust_server
5014 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5015 .await
5016 .text_document,
5017 lsp::TextDocumentIdentifier::new(
5018 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5019 )
5020 );
5021 assert_eq!(
5022 fake_json_server
5023 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5024 .await
5025 .text_document,
5026 lsp::TextDocumentIdentifier::new(
5027 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5028 )
5029 );
5030
5031 // Restart language servers
5032 project.update(cx, |project, cx| {
5033 project.restart_language_servers_for_buffers(
5034 vec![rust_buffer.clone(), json_buffer.clone()],
5035 cx,
5036 );
5037 });
5038
5039 let mut rust_shutdown_requests = fake_rust_server
5040 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5041 let mut json_shutdown_requests = fake_json_server
5042 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5043 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5044
5045 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5046 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5047
5048 // Ensure both rust documents are reopened in new rust language server without worrying about order
5049 assert_set_eq!(
5050 [
5051 fake_rust_server
5052 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5053 .await
5054 .text_document,
5055 fake_rust_server
5056 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5057 .await
5058 .text_document,
5059 ],
5060 [
5061 lsp::TextDocumentItem {
5062 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5063 version: 1,
5064 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5065 language_id: Default::default()
5066 },
5067 lsp::TextDocumentItem {
5068 uri: lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5069 version: 1,
5070 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5071 language_id: Default::default()
5072 },
5073 ]
5074 );
5075
5076 // Ensure json document is reopened in new json language server
5077 assert_eq!(
5078 fake_json_server
5079 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5080 .await
5081 .text_document,
5082 lsp::TextDocumentItem {
5083 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5084 version: 0,
5085 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5086 language_id: Default::default()
5087 }
5088 );
5089
5090 // Close notifications are reported only to servers matching the buffer's language.
5091 cx.update(|_| drop(json_buffer));
5092 let close_message = lsp::DidCloseTextDocumentParams {
5093 text_document: lsp::TextDocumentIdentifier::new(
5094 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5095 ),
5096 };
5097 assert_eq!(
5098 fake_json_server
5099 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5100 .await,
5101 close_message,
5102 );
5103 }
5104
5105 #[gpui::test]
5106 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5107 cx.foreground().forbid_parking();
5108
5109 let progress_token = "the-progress-token";
5110 let mut language = Language::new(
5111 LanguageConfig {
5112 name: "Rust".into(),
5113 path_suffixes: vec!["rs".to_string()],
5114 ..Default::default()
5115 },
5116 Some(tree_sitter_rust::language()),
5117 );
5118 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5119 disk_based_diagnostics_progress_token: Some(progress_token),
5120 disk_based_diagnostics_sources: &["disk"],
5121 ..Default::default()
5122 });
5123
5124 let fs = FakeFs::new(cx.background());
5125 fs.insert_tree(
5126 "/dir",
5127 json!({
5128 "a.rs": "fn a() { A }",
5129 "b.rs": "const y: i32 = 1",
5130 }),
5131 )
5132 .await;
5133
5134 let project = Project::test(fs, cx);
5135 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5136
5137 let (tree, _) = project
5138 .update(cx, |project, cx| {
5139 project.find_or_create_local_worktree("/dir", true, cx)
5140 })
5141 .await
5142 .unwrap();
5143 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5144
5145 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5146 .await;
5147
5148 // Cause worktree to start the fake language server
5149 let _buffer = project
5150 .update(cx, |project, cx| {
5151 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5152 })
5153 .await
5154 .unwrap();
5155
5156 let mut events = subscribe(&project, cx);
5157
5158 let mut fake_server = fake_servers.next().await.unwrap();
5159 fake_server.start_progress(progress_token).await;
5160 assert_eq!(
5161 events.next().await.unwrap(),
5162 Event::DiskBasedDiagnosticsStarted
5163 );
5164
5165 fake_server.start_progress(progress_token).await;
5166 fake_server.end_progress(progress_token).await;
5167 fake_server.start_progress(progress_token).await;
5168
5169 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5170 lsp::PublishDiagnosticsParams {
5171 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5172 version: None,
5173 diagnostics: vec![lsp::Diagnostic {
5174 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5175 severity: Some(lsp::DiagnosticSeverity::ERROR),
5176 message: "undefined variable 'A'".to_string(),
5177 ..Default::default()
5178 }],
5179 },
5180 );
5181 assert_eq!(
5182 events.next().await.unwrap(),
5183 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5184 );
5185
5186 fake_server.end_progress(progress_token).await;
5187 fake_server.end_progress(progress_token).await;
5188 assert_eq!(
5189 events.next().await.unwrap(),
5190 Event::DiskBasedDiagnosticsUpdated
5191 );
5192 assert_eq!(
5193 events.next().await.unwrap(),
5194 Event::DiskBasedDiagnosticsFinished
5195 );
5196
5197 let buffer = project
5198 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5199 .await
5200 .unwrap();
5201
5202 buffer.read_with(cx, |buffer, _| {
5203 let snapshot = buffer.snapshot();
5204 let diagnostics = snapshot
5205 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5206 .collect::<Vec<_>>();
5207 assert_eq!(
5208 diagnostics,
5209 &[DiagnosticEntry {
5210 range: Point::new(0, 9)..Point::new(0, 10),
5211 diagnostic: Diagnostic {
5212 severity: lsp::DiagnosticSeverity::ERROR,
5213 message: "undefined variable 'A'".to_string(),
5214 group_id: 0,
5215 is_primary: true,
5216 ..Default::default()
5217 }
5218 }]
5219 )
5220 });
5221 }
5222
5223 #[gpui::test]
5224 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5225 cx.foreground().forbid_parking();
5226
5227 let mut language = Language::new(
5228 LanguageConfig {
5229 name: "Rust".into(),
5230 path_suffixes: vec!["rs".to_string()],
5231 ..Default::default()
5232 },
5233 Some(tree_sitter_rust::language()),
5234 );
5235 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5236 disk_based_diagnostics_sources: &["disk"],
5237 ..Default::default()
5238 });
5239
5240 let text = "
5241 fn a() { A }
5242 fn b() { BB }
5243 fn c() { CCC }
5244 "
5245 .unindent();
5246
5247 let fs = FakeFs::new(cx.background());
5248 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5249
5250 let project = Project::test(fs, cx);
5251 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5252
5253 let worktree_id = project
5254 .update(cx, |project, cx| {
5255 project.find_or_create_local_worktree("/dir", true, cx)
5256 })
5257 .await
5258 .unwrap()
5259 .0
5260 .read_with(cx, |tree, _| tree.id());
5261
5262 let buffer = project
5263 .update(cx, |project, cx| {
5264 project.open_buffer((worktree_id, "a.rs"), cx)
5265 })
5266 .await
5267 .unwrap();
5268
5269 let mut fake_server = fake_servers.next().await.unwrap();
5270 let open_notification = fake_server
5271 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5272 .await;
5273
5274 // Edit the buffer, moving the content down
5275 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5276 let change_notification_1 = fake_server
5277 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5278 .await;
5279 assert!(
5280 change_notification_1.text_document.version > open_notification.text_document.version
5281 );
5282
5283 // Report some diagnostics for the initial version of the buffer
5284 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5285 lsp::PublishDiagnosticsParams {
5286 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5287 version: Some(open_notification.text_document.version),
5288 diagnostics: vec![
5289 lsp::Diagnostic {
5290 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5291 severity: Some(DiagnosticSeverity::ERROR),
5292 message: "undefined variable 'A'".to_string(),
5293 source: Some("disk".to_string()),
5294 ..Default::default()
5295 },
5296 lsp::Diagnostic {
5297 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5298 severity: Some(DiagnosticSeverity::ERROR),
5299 message: "undefined variable 'BB'".to_string(),
5300 source: Some("disk".to_string()),
5301 ..Default::default()
5302 },
5303 lsp::Diagnostic {
5304 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5305 severity: Some(DiagnosticSeverity::ERROR),
5306 source: Some("disk".to_string()),
5307 message: "undefined variable 'CCC'".to_string(),
5308 ..Default::default()
5309 },
5310 ],
5311 },
5312 );
5313
5314 // The diagnostics have moved down since they were created.
5315 buffer.next_notification(cx).await;
5316 buffer.read_with(cx, |buffer, _| {
5317 assert_eq!(
5318 buffer
5319 .snapshot()
5320 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5321 .collect::<Vec<_>>(),
5322 &[
5323 DiagnosticEntry {
5324 range: Point::new(3, 9)..Point::new(3, 11),
5325 diagnostic: Diagnostic {
5326 severity: DiagnosticSeverity::ERROR,
5327 message: "undefined variable 'BB'".to_string(),
5328 is_disk_based: true,
5329 group_id: 1,
5330 is_primary: true,
5331 ..Default::default()
5332 },
5333 },
5334 DiagnosticEntry {
5335 range: Point::new(4, 9)..Point::new(4, 12),
5336 diagnostic: Diagnostic {
5337 severity: DiagnosticSeverity::ERROR,
5338 message: "undefined variable 'CCC'".to_string(),
5339 is_disk_based: true,
5340 group_id: 2,
5341 is_primary: true,
5342 ..Default::default()
5343 }
5344 }
5345 ]
5346 );
5347 assert_eq!(
5348 chunks_with_diagnostics(buffer, 0..buffer.len()),
5349 [
5350 ("\n\nfn a() { ".to_string(), None),
5351 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5352 (" }\nfn b() { ".to_string(), None),
5353 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5354 (" }\nfn c() { ".to_string(), None),
5355 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5356 (" }\n".to_string(), None),
5357 ]
5358 );
5359 assert_eq!(
5360 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5361 [
5362 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5363 (" }\nfn c() { ".to_string(), None),
5364 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5365 ]
5366 );
5367 });
5368
5369 // Ensure overlapping diagnostics are highlighted correctly.
5370 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5371 lsp::PublishDiagnosticsParams {
5372 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5373 version: Some(open_notification.text_document.version),
5374 diagnostics: vec![
5375 lsp::Diagnostic {
5376 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5377 severity: Some(DiagnosticSeverity::ERROR),
5378 message: "undefined variable 'A'".to_string(),
5379 source: Some("disk".to_string()),
5380 ..Default::default()
5381 },
5382 lsp::Diagnostic {
5383 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5384 severity: Some(DiagnosticSeverity::WARNING),
5385 message: "unreachable statement".to_string(),
5386 source: Some("disk".to_string()),
5387 ..Default::default()
5388 },
5389 ],
5390 },
5391 );
5392
5393 buffer.next_notification(cx).await;
5394 buffer.read_with(cx, |buffer, _| {
5395 assert_eq!(
5396 buffer
5397 .snapshot()
5398 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5399 .collect::<Vec<_>>(),
5400 &[
5401 DiagnosticEntry {
5402 range: Point::new(2, 9)..Point::new(2, 12),
5403 diagnostic: Diagnostic {
5404 severity: DiagnosticSeverity::WARNING,
5405 message: "unreachable statement".to_string(),
5406 is_disk_based: true,
5407 group_id: 1,
5408 is_primary: true,
5409 ..Default::default()
5410 }
5411 },
5412 DiagnosticEntry {
5413 range: Point::new(2, 9)..Point::new(2, 10),
5414 diagnostic: Diagnostic {
5415 severity: DiagnosticSeverity::ERROR,
5416 message: "undefined variable 'A'".to_string(),
5417 is_disk_based: true,
5418 group_id: 0,
5419 is_primary: true,
5420 ..Default::default()
5421 },
5422 }
5423 ]
5424 );
5425 assert_eq!(
5426 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5427 [
5428 ("fn a() { ".to_string(), None),
5429 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5430 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5431 ("\n".to_string(), None),
5432 ]
5433 );
5434 assert_eq!(
5435 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5436 [
5437 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5438 ("\n".to_string(), None),
5439 ]
5440 );
5441 });
5442
5443 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5444 // changes since the last save.
5445 buffer.update(cx, |buffer, cx| {
5446 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5447 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5448 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5449 });
5450 let change_notification_2 = fake_server
5451 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5452 .await;
5453 assert!(
5454 change_notification_2.text_document.version
5455 > change_notification_1.text_document.version
5456 );
5457
5458 // Handle out-of-order diagnostics
5459 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5460 lsp::PublishDiagnosticsParams {
5461 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5462 version: Some(change_notification_2.text_document.version),
5463 diagnostics: vec![
5464 lsp::Diagnostic {
5465 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5466 severity: Some(DiagnosticSeverity::ERROR),
5467 message: "undefined variable 'BB'".to_string(),
5468 source: Some("disk".to_string()),
5469 ..Default::default()
5470 },
5471 lsp::Diagnostic {
5472 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5473 severity: Some(DiagnosticSeverity::WARNING),
5474 message: "undefined variable 'A'".to_string(),
5475 source: Some("disk".to_string()),
5476 ..Default::default()
5477 },
5478 ],
5479 },
5480 );
5481
5482 buffer.next_notification(cx).await;
5483 buffer.read_with(cx, |buffer, _| {
5484 assert_eq!(
5485 buffer
5486 .snapshot()
5487 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5488 .collect::<Vec<_>>(),
5489 &[
5490 DiagnosticEntry {
5491 range: Point::new(2, 21)..Point::new(2, 22),
5492 diagnostic: Diagnostic {
5493 severity: DiagnosticSeverity::WARNING,
5494 message: "undefined variable 'A'".to_string(),
5495 is_disk_based: true,
5496 group_id: 1,
5497 is_primary: true,
5498 ..Default::default()
5499 }
5500 },
5501 DiagnosticEntry {
5502 range: Point::new(3, 9)..Point::new(3, 14),
5503 diagnostic: Diagnostic {
5504 severity: DiagnosticSeverity::ERROR,
5505 message: "undefined variable 'BB'".to_string(),
5506 is_disk_based: true,
5507 group_id: 0,
5508 is_primary: true,
5509 ..Default::default()
5510 },
5511 }
5512 ]
5513 );
5514 });
5515 }
5516
5517 #[gpui::test]
5518 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5519 cx.foreground().forbid_parking();
5520
5521 let text = concat!(
5522 "let one = ;\n", //
5523 "let two = \n",
5524 "let three = 3;\n",
5525 );
5526
5527 let fs = FakeFs::new(cx.background());
5528 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5529
5530 let project = Project::test(fs, cx);
5531 let worktree_id = project
5532 .update(cx, |project, cx| {
5533 project.find_or_create_local_worktree("/dir", true, cx)
5534 })
5535 .await
5536 .unwrap()
5537 .0
5538 .read_with(cx, |tree, _| tree.id());
5539
5540 let buffer = project
5541 .update(cx, |project, cx| {
5542 project.open_buffer((worktree_id, "a.rs"), cx)
5543 })
5544 .await
5545 .unwrap();
5546
5547 project.update(cx, |project, cx| {
5548 project
5549 .update_buffer_diagnostics(
5550 &buffer,
5551 vec![
5552 DiagnosticEntry {
5553 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5554 diagnostic: Diagnostic {
5555 severity: DiagnosticSeverity::ERROR,
5556 message: "syntax error 1".to_string(),
5557 ..Default::default()
5558 },
5559 },
5560 DiagnosticEntry {
5561 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5562 diagnostic: Diagnostic {
5563 severity: DiagnosticSeverity::ERROR,
5564 message: "syntax error 2".to_string(),
5565 ..Default::default()
5566 },
5567 },
5568 ],
5569 None,
5570 cx,
5571 )
5572 .unwrap();
5573 });
5574
5575 // An empty range is extended forward to include the following character.
5576 // At the end of a line, an empty range is extended backward to include
5577 // the preceding character.
5578 buffer.read_with(cx, |buffer, _| {
5579 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5580 assert_eq!(
5581 chunks
5582 .iter()
5583 .map(|(s, d)| (s.as_str(), *d))
5584 .collect::<Vec<_>>(),
5585 &[
5586 ("let one = ", None),
5587 (";", Some(DiagnosticSeverity::ERROR)),
5588 ("\nlet two =", None),
5589 (" ", Some(DiagnosticSeverity::ERROR)),
5590 ("\nlet three = 3;\n", None)
5591 ]
5592 );
5593 });
5594 }
5595
5596 #[gpui::test]
5597 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5598 cx.foreground().forbid_parking();
5599
5600 let mut language = Language::new(
5601 LanguageConfig {
5602 name: "Rust".into(),
5603 path_suffixes: vec!["rs".to_string()],
5604 ..Default::default()
5605 },
5606 Some(tree_sitter_rust::language()),
5607 );
5608 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5609
5610 let text = "
5611 fn a() {
5612 f1();
5613 }
5614 fn b() {
5615 f2();
5616 }
5617 fn c() {
5618 f3();
5619 }
5620 "
5621 .unindent();
5622
5623 let fs = FakeFs::new(cx.background());
5624 fs.insert_tree(
5625 "/dir",
5626 json!({
5627 "a.rs": text.clone(),
5628 }),
5629 )
5630 .await;
5631
5632 let project = Project::test(fs, cx);
5633 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5634
5635 let worktree_id = project
5636 .update(cx, |project, cx| {
5637 project.find_or_create_local_worktree("/dir", true, cx)
5638 })
5639 .await
5640 .unwrap()
5641 .0
5642 .read_with(cx, |tree, _| tree.id());
5643
5644 let buffer = project
5645 .update(cx, |project, cx| {
5646 project.open_buffer((worktree_id, "a.rs"), cx)
5647 })
5648 .await
5649 .unwrap();
5650
5651 let mut fake_server = fake_servers.next().await.unwrap();
5652 let lsp_document_version = fake_server
5653 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5654 .await
5655 .text_document
5656 .version;
5657
5658 // Simulate editing the buffer after the language server computes some edits.
5659 buffer.update(cx, |buffer, cx| {
5660 buffer.edit(
5661 [Point::new(0, 0)..Point::new(0, 0)],
5662 "// above first function\n",
5663 cx,
5664 );
5665 buffer.edit(
5666 [Point::new(2, 0)..Point::new(2, 0)],
5667 " // inside first function\n",
5668 cx,
5669 );
5670 buffer.edit(
5671 [Point::new(6, 4)..Point::new(6, 4)],
5672 "// inside second function ",
5673 cx,
5674 );
5675
5676 assert_eq!(
5677 buffer.text(),
5678 "
5679 // above first function
5680 fn a() {
5681 // inside first function
5682 f1();
5683 }
5684 fn b() {
5685 // inside second function f2();
5686 }
5687 fn c() {
5688 f3();
5689 }
5690 "
5691 .unindent()
5692 );
5693 });
5694
5695 let edits = project
5696 .update(cx, |project, cx| {
5697 project.edits_from_lsp(
5698 &buffer,
5699 vec![
5700 // replace body of first function
5701 lsp::TextEdit {
5702 range: lsp::Range::new(
5703 lsp::Position::new(0, 0),
5704 lsp::Position::new(3, 0),
5705 ),
5706 new_text: "
5707 fn a() {
5708 f10();
5709 }
5710 "
5711 .unindent(),
5712 },
5713 // edit inside second function
5714 lsp::TextEdit {
5715 range: lsp::Range::new(
5716 lsp::Position::new(4, 6),
5717 lsp::Position::new(4, 6),
5718 ),
5719 new_text: "00".into(),
5720 },
5721 // edit inside third function via two distinct edits
5722 lsp::TextEdit {
5723 range: lsp::Range::new(
5724 lsp::Position::new(7, 5),
5725 lsp::Position::new(7, 5),
5726 ),
5727 new_text: "4000".into(),
5728 },
5729 lsp::TextEdit {
5730 range: lsp::Range::new(
5731 lsp::Position::new(7, 5),
5732 lsp::Position::new(7, 6),
5733 ),
5734 new_text: "".into(),
5735 },
5736 ],
5737 Some(lsp_document_version),
5738 cx,
5739 )
5740 })
5741 .await
5742 .unwrap();
5743
5744 buffer.update(cx, |buffer, cx| {
5745 for (range, new_text) in edits {
5746 buffer.edit([range], new_text, cx);
5747 }
5748 assert_eq!(
5749 buffer.text(),
5750 "
5751 // above first function
5752 fn a() {
5753 // inside first function
5754 f10();
5755 }
5756 fn b() {
5757 // inside second function f200();
5758 }
5759 fn c() {
5760 f4000();
5761 }
5762 "
5763 .unindent()
5764 );
5765 });
5766 }
5767
5768 #[gpui::test]
5769 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5770 cx.foreground().forbid_parking();
5771
5772 let text = "
5773 use a::b;
5774 use a::c;
5775
5776 fn f() {
5777 b();
5778 c();
5779 }
5780 "
5781 .unindent();
5782
5783 let fs = FakeFs::new(cx.background());
5784 fs.insert_tree(
5785 "/dir",
5786 json!({
5787 "a.rs": text.clone(),
5788 }),
5789 )
5790 .await;
5791
5792 let project = Project::test(fs, cx);
5793 let worktree_id = project
5794 .update(cx, |project, cx| {
5795 project.find_or_create_local_worktree("/dir", true, cx)
5796 })
5797 .await
5798 .unwrap()
5799 .0
5800 .read_with(cx, |tree, _| tree.id());
5801
5802 let buffer = project
5803 .update(cx, |project, cx| {
5804 project.open_buffer((worktree_id, "a.rs"), cx)
5805 })
5806 .await
5807 .unwrap();
5808
5809 // Simulate the language server sending us a small edit in the form of a very large diff.
5810 // Rust-analyzer does this when performing a merge-imports code action.
5811 let edits = project
5812 .update(cx, |project, cx| {
5813 project.edits_from_lsp(
5814 &buffer,
5815 [
5816 // Replace the first use statement without editing the semicolon.
5817 lsp::TextEdit {
5818 range: lsp::Range::new(
5819 lsp::Position::new(0, 4),
5820 lsp::Position::new(0, 8),
5821 ),
5822 new_text: "a::{b, c}".into(),
5823 },
5824 // Reinsert the remainder of the file between the semicolon and the final
5825 // newline of the file.
5826 lsp::TextEdit {
5827 range: lsp::Range::new(
5828 lsp::Position::new(0, 9),
5829 lsp::Position::new(0, 9),
5830 ),
5831 new_text: "\n\n".into(),
5832 },
5833 lsp::TextEdit {
5834 range: lsp::Range::new(
5835 lsp::Position::new(0, 9),
5836 lsp::Position::new(0, 9),
5837 ),
5838 new_text: "
5839 fn f() {
5840 b();
5841 c();
5842 }"
5843 .unindent(),
5844 },
5845 // Delete everything after the first newline of the file.
5846 lsp::TextEdit {
5847 range: lsp::Range::new(
5848 lsp::Position::new(1, 0),
5849 lsp::Position::new(7, 0),
5850 ),
5851 new_text: "".into(),
5852 },
5853 ],
5854 None,
5855 cx,
5856 )
5857 })
5858 .await
5859 .unwrap();
5860
5861 buffer.update(cx, |buffer, cx| {
5862 let edits = edits
5863 .into_iter()
5864 .map(|(range, text)| {
5865 (
5866 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5867 text,
5868 )
5869 })
5870 .collect::<Vec<_>>();
5871
5872 assert_eq!(
5873 edits,
5874 [
5875 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5876 (Point::new(1, 0)..Point::new(2, 0), "".into())
5877 ]
5878 );
5879
5880 for (range, new_text) in edits {
5881 buffer.edit([range], new_text, cx);
5882 }
5883 assert_eq!(
5884 buffer.text(),
5885 "
5886 use a::{b, c};
5887
5888 fn f() {
5889 b();
5890 c();
5891 }
5892 "
5893 .unindent()
5894 );
5895 });
5896 }
5897
5898 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5899 buffer: &Buffer,
5900 range: Range<T>,
5901 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5902 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5903 for chunk in buffer.snapshot().chunks(range, true) {
5904 if chunks.last().map_or(false, |prev_chunk| {
5905 prev_chunk.1 == chunk.diagnostic_severity
5906 }) {
5907 chunks.last_mut().unwrap().0.push_str(chunk.text);
5908 } else {
5909 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5910 }
5911 }
5912 chunks
5913 }
5914
5915 #[gpui::test]
5916 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5917 let dir = temp_tree(json!({
5918 "root": {
5919 "dir1": {},
5920 "dir2": {
5921 "dir3": {}
5922 }
5923 }
5924 }));
5925
5926 let project = Project::test(Arc::new(RealFs), cx);
5927 let (tree, _) = project
5928 .update(cx, |project, cx| {
5929 project.find_or_create_local_worktree(&dir.path(), true, cx)
5930 })
5931 .await
5932 .unwrap();
5933
5934 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5935 .await;
5936
5937 let cancel_flag = Default::default();
5938 let results = project
5939 .read_with(cx, |project, cx| {
5940 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5941 })
5942 .await;
5943
5944 assert!(results.is_empty());
5945 }
5946
5947 #[gpui::test]
5948 async fn test_definition(cx: &mut gpui::TestAppContext) {
5949 let mut language = Language::new(
5950 LanguageConfig {
5951 name: "Rust".into(),
5952 path_suffixes: vec!["rs".to_string()],
5953 ..Default::default()
5954 },
5955 Some(tree_sitter_rust::language()),
5956 );
5957 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5958
5959 let fs = FakeFs::new(cx.background());
5960 fs.insert_tree(
5961 "/dir",
5962 json!({
5963 "a.rs": "const fn a() { A }",
5964 "b.rs": "const y: i32 = crate::a()",
5965 }),
5966 )
5967 .await;
5968
5969 let project = Project::test(fs, cx);
5970 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5971
5972 let (tree, _) = project
5973 .update(cx, |project, cx| {
5974 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5975 })
5976 .await
5977 .unwrap();
5978 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5979 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5980 .await;
5981
5982 let buffer = project
5983 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
5984 .await
5985 .unwrap();
5986
5987 let fake_server = fake_servers.next().await.unwrap();
5988 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
5989 let params = params.text_document_position_params;
5990 assert_eq!(
5991 params.text_document.uri.to_file_path().unwrap(),
5992 Path::new("/dir/b.rs"),
5993 );
5994 assert_eq!(params.position, lsp::Position::new(0, 22));
5995
5996 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
5997 lsp::Location::new(
5998 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5999 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6000 ),
6001 )))
6002 });
6003
6004 let mut definitions = project
6005 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6006 .await
6007 .unwrap();
6008
6009 assert_eq!(definitions.len(), 1);
6010 let definition = definitions.pop().unwrap();
6011 cx.update(|cx| {
6012 let target_buffer = definition.buffer.read(cx);
6013 assert_eq!(
6014 target_buffer
6015 .file()
6016 .unwrap()
6017 .as_local()
6018 .unwrap()
6019 .abs_path(cx),
6020 Path::new("/dir/a.rs"),
6021 );
6022 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6023 assert_eq!(
6024 list_worktrees(&project, cx),
6025 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6026 );
6027
6028 drop(definition);
6029 });
6030 cx.read(|cx| {
6031 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6032 });
6033
6034 fn list_worktrees<'a>(
6035 project: &'a ModelHandle<Project>,
6036 cx: &'a AppContext,
6037 ) -> Vec<(&'a Path, bool)> {
6038 project
6039 .read(cx)
6040 .worktrees(cx)
6041 .map(|worktree| {
6042 let worktree = worktree.read(cx);
6043 (
6044 worktree.as_local().unwrap().abs_path().as_ref(),
6045 worktree.is_visible(),
6046 )
6047 })
6048 .collect::<Vec<_>>()
6049 }
6050 }
6051
6052 #[gpui::test(iterations = 10)]
6053 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6054 let mut language = Language::new(
6055 LanguageConfig {
6056 name: "TypeScript".into(),
6057 path_suffixes: vec!["ts".to_string()],
6058 ..Default::default()
6059 },
6060 None,
6061 );
6062 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6063
6064 let fs = FakeFs::new(cx.background());
6065 fs.insert_tree(
6066 "/dir",
6067 json!({
6068 "a.ts": "a",
6069 }),
6070 )
6071 .await;
6072
6073 let project = Project::test(fs, cx);
6074 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6075
6076 let (tree, _) = project
6077 .update(cx, |project, cx| {
6078 project.find_or_create_local_worktree("/dir", true, cx)
6079 })
6080 .await
6081 .unwrap();
6082 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6083 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6084 .await;
6085
6086 let buffer = project
6087 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6088 .await
6089 .unwrap();
6090
6091 let fake_server = fake_language_servers.next().await.unwrap();
6092
6093 // Language server returns code actions that contain commands, and not edits.
6094 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6095 fake_server
6096 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6097 Ok(Some(vec![
6098 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6099 title: "The code action".into(),
6100 command: Some(lsp::Command {
6101 title: "The command".into(),
6102 command: "_the/command".into(),
6103 arguments: Some(vec![json!("the-argument")]),
6104 }),
6105 ..Default::default()
6106 }),
6107 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6108 title: "two".into(),
6109 ..Default::default()
6110 }),
6111 ]))
6112 })
6113 .next()
6114 .await;
6115
6116 let action = actions.await.unwrap()[0].clone();
6117 let apply = project.update(cx, |project, cx| {
6118 project.apply_code_action(buffer.clone(), action, true, cx)
6119 });
6120
6121 // Resolving the code action does not populate its edits. In absence of
6122 // edits, we must execute the given command.
6123 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6124 |action, _| async move { Ok(action) },
6125 );
6126
6127 // While executing the command, the language server sends the editor
6128 // a `workspaceEdit` request.
6129 fake_server
6130 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6131 let fake = fake_server.clone();
6132 move |params, _| {
6133 assert_eq!(params.command, "_the/command");
6134 let fake = fake.clone();
6135 async move {
6136 fake.server
6137 .request::<lsp::request::ApplyWorkspaceEdit>(
6138 lsp::ApplyWorkspaceEditParams {
6139 label: None,
6140 edit: lsp::WorkspaceEdit {
6141 changes: Some(
6142 [(
6143 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6144 vec![lsp::TextEdit {
6145 range: lsp::Range::new(
6146 lsp::Position::new(0, 0),
6147 lsp::Position::new(0, 0),
6148 ),
6149 new_text: "X".into(),
6150 }],
6151 )]
6152 .into_iter()
6153 .collect(),
6154 ),
6155 ..Default::default()
6156 },
6157 },
6158 )
6159 .await
6160 .unwrap();
6161 Ok(Some(json!(null)))
6162 }
6163 }
6164 })
6165 .next()
6166 .await;
6167
6168 // Applying the code action returns a project transaction containing the edits
6169 // sent by the language server in its `workspaceEdit` request.
6170 let transaction = apply.await.unwrap();
6171 assert!(transaction.0.contains_key(&buffer));
6172 buffer.update(cx, |buffer, cx| {
6173 assert_eq!(buffer.text(), "Xa");
6174 buffer.undo(cx);
6175 assert_eq!(buffer.text(), "a");
6176 });
6177 }
6178
6179 #[gpui::test]
6180 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6181 let fs = FakeFs::new(cx.background());
6182 fs.insert_tree(
6183 "/dir",
6184 json!({
6185 "file1": "the old contents",
6186 }),
6187 )
6188 .await;
6189
6190 let project = Project::test(fs.clone(), cx);
6191 let worktree_id = project
6192 .update(cx, |p, cx| {
6193 p.find_or_create_local_worktree("/dir", true, cx)
6194 })
6195 .await
6196 .unwrap()
6197 .0
6198 .read_with(cx, |tree, _| tree.id());
6199
6200 let buffer = project
6201 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6202 .await
6203 .unwrap();
6204 buffer
6205 .update(cx, |buffer, cx| {
6206 assert_eq!(buffer.text(), "the old contents");
6207 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6208 buffer.save(cx)
6209 })
6210 .await
6211 .unwrap();
6212
6213 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6214 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6215 }
6216
6217 #[gpui::test]
6218 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6219 let fs = FakeFs::new(cx.background());
6220 fs.insert_tree(
6221 "/dir",
6222 json!({
6223 "file1": "the old contents",
6224 }),
6225 )
6226 .await;
6227
6228 let project = Project::test(fs.clone(), cx);
6229 let worktree_id = project
6230 .update(cx, |p, cx| {
6231 p.find_or_create_local_worktree("/dir/file1", true, cx)
6232 })
6233 .await
6234 .unwrap()
6235 .0
6236 .read_with(cx, |tree, _| tree.id());
6237
6238 let buffer = project
6239 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6240 .await
6241 .unwrap();
6242 buffer
6243 .update(cx, |buffer, cx| {
6244 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6245 buffer.save(cx)
6246 })
6247 .await
6248 .unwrap();
6249
6250 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6251 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6252 }
6253
6254 #[gpui::test]
6255 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6256 let fs = FakeFs::new(cx.background());
6257 fs.insert_tree("/dir", json!({})).await;
6258
6259 let project = Project::test(fs.clone(), cx);
6260 let (worktree, _) = project
6261 .update(cx, |project, cx| {
6262 project.find_or_create_local_worktree("/dir", true, cx)
6263 })
6264 .await
6265 .unwrap();
6266 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6267
6268 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6269 buffer.update(cx, |buffer, cx| {
6270 buffer.edit([0..0], "abc", cx);
6271 assert!(buffer.is_dirty());
6272 assert!(!buffer.has_conflict());
6273 });
6274 project
6275 .update(cx, |project, cx| {
6276 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6277 })
6278 .await
6279 .unwrap();
6280 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6281 buffer.read_with(cx, |buffer, cx| {
6282 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6283 assert!(!buffer.is_dirty());
6284 assert!(!buffer.has_conflict());
6285 });
6286
6287 let opened_buffer = project
6288 .update(cx, |project, cx| {
6289 project.open_buffer((worktree_id, "file1"), cx)
6290 })
6291 .await
6292 .unwrap();
6293 assert_eq!(opened_buffer, buffer);
6294 }
6295
6296 #[gpui::test(retries = 5)]
6297 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6298 let dir = temp_tree(json!({
6299 "a": {
6300 "file1": "",
6301 "file2": "",
6302 "file3": "",
6303 },
6304 "b": {
6305 "c": {
6306 "file4": "",
6307 "file5": "",
6308 }
6309 }
6310 }));
6311
6312 let project = Project::test(Arc::new(RealFs), cx);
6313 let rpc = project.read_with(cx, |p, _| p.client.clone());
6314
6315 let (tree, _) = project
6316 .update(cx, |p, cx| {
6317 p.find_or_create_local_worktree(dir.path(), true, cx)
6318 })
6319 .await
6320 .unwrap();
6321 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6322
6323 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6324 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6325 async move { buffer.await.unwrap() }
6326 };
6327 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6328 tree.read_with(cx, |tree, _| {
6329 tree.entry_for_path(path)
6330 .expect(&format!("no entry for path {}", path))
6331 .id
6332 })
6333 };
6334
6335 let buffer2 = buffer_for_path("a/file2", cx).await;
6336 let buffer3 = buffer_for_path("a/file3", cx).await;
6337 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6338 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6339
6340 let file2_id = id_for_path("a/file2", &cx);
6341 let file3_id = id_for_path("a/file3", &cx);
6342 let file4_id = id_for_path("b/c/file4", &cx);
6343
6344 // Wait for the initial scan.
6345 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6346 .await;
6347
6348 // Create a remote copy of this worktree.
6349 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6350 let (remote, load_task) = cx.update(|cx| {
6351 Worktree::remote(
6352 1,
6353 1,
6354 initial_snapshot.to_proto(&Default::default(), true),
6355 rpc.clone(),
6356 cx,
6357 )
6358 });
6359 load_task.await;
6360
6361 cx.read(|cx| {
6362 assert!(!buffer2.read(cx).is_dirty());
6363 assert!(!buffer3.read(cx).is_dirty());
6364 assert!(!buffer4.read(cx).is_dirty());
6365 assert!(!buffer5.read(cx).is_dirty());
6366 });
6367
6368 // Rename and delete files and directories.
6369 tree.flush_fs_events(&cx).await;
6370 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6371 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6372 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6373 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6374 tree.flush_fs_events(&cx).await;
6375
6376 let expected_paths = vec![
6377 "a",
6378 "a/file1",
6379 "a/file2.new",
6380 "b",
6381 "d",
6382 "d/file3",
6383 "d/file4",
6384 ];
6385
6386 cx.read(|app| {
6387 assert_eq!(
6388 tree.read(app)
6389 .paths()
6390 .map(|p| p.to_str().unwrap())
6391 .collect::<Vec<_>>(),
6392 expected_paths
6393 );
6394
6395 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6396 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6397 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6398
6399 assert_eq!(
6400 buffer2.read(app).file().unwrap().path().as_ref(),
6401 Path::new("a/file2.new")
6402 );
6403 assert_eq!(
6404 buffer3.read(app).file().unwrap().path().as_ref(),
6405 Path::new("d/file3")
6406 );
6407 assert_eq!(
6408 buffer4.read(app).file().unwrap().path().as_ref(),
6409 Path::new("d/file4")
6410 );
6411 assert_eq!(
6412 buffer5.read(app).file().unwrap().path().as_ref(),
6413 Path::new("b/c/file5")
6414 );
6415
6416 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6417 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6418 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6419 assert!(buffer5.read(app).file().unwrap().is_deleted());
6420 });
6421
6422 // Update the remote worktree. Check that it becomes consistent with the
6423 // local worktree.
6424 remote.update(cx, |remote, cx| {
6425 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6426 &initial_snapshot,
6427 1,
6428 1,
6429 true,
6430 );
6431 remote
6432 .as_remote_mut()
6433 .unwrap()
6434 .snapshot
6435 .apply_remote_update(update_message)
6436 .unwrap();
6437
6438 assert_eq!(
6439 remote
6440 .paths()
6441 .map(|p| p.to_str().unwrap())
6442 .collect::<Vec<_>>(),
6443 expected_paths
6444 );
6445 });
6446 }
6447
6448 #[gpui::test]
6449 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6450 let fs = FakeFs::new(cx.background());
6451 fs.insert_tree(
6452 "/the-dir",
6453 json!({
6454 "a.txt": "a-contents",
6455 "b.txt": "b-contents",
6456 }),
6457 )
6458 .await;
6459
6460 let project = Project::test(fs.clone(), cx);
6461 let worktree_id = project
6462 .update(cx, |p, cx| {
6463 p.find_or_create_local_worktree("/the-dir", true, cx)
6464 })
6465 .await
6466 .unwrap()
6467 .0
6468 .read_with(cx, |tree, _| tree.id());
6469
6470 // Spawn multiple tasks to open paths, repeating some paths.
6471 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6472 (
6473 p.open_buffer((worktree_id, "a.txt"), cx),
6474 p.open_buffer((worktree_id, "b.txt"), cx),
6475 p.open_buffer((worktree_id, "a.txt"), cx),
6476 )
6477 });
6478
6479 let buffer_a_1 = buffer_a_1.await.unwrap();
6480 let buffer_a_2 = buffer_a_2.await.unwrap();
6481 let buffer_b = buffer_b.await.unwrap();
6482 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6483 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6484
6485 // There is only one buffer per path.
6486 let buffer_a_id = buffer_a_1.id();
6487 assert_eq!(buffer_a_2.id(), buffer_a_id);
6488
6489 // Open the same path again while it is still open.
6490 drop(buffer_a_1);
6491 let buffer_a_3 = project
6492 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6493 .await
6494 .unwrap();
6495
6496 // There's still only one buffer per path.
6497 assert_eq!(buffer_a_3.id(), buffer_a_id);
6498 }
6499
6500 #[gpui::test]
6501 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6502 use std::fs;
6503
6504 let dir = temp_tree(json!({
6505 "file1": "abc",
6506 "file2": "def",
6507 "file3": "ghi",
6508 }));
6509
6510 let project = Project::test(Arc::new(RealFs), cx);
6511 let (worktree, _) = project
6512 .update(cx, |p, cx| {
6513 p.find_or_create_local_worktree(dir.path(), true, cx)
6514 })
6515 .await
6516 .unwrap();
6517 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6518
6519 worktree.flush_fs_events(&cx).await;
6520 worktree
6521 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6522 .await;
6523
6524 let buffer1 = project
6525 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6526 .await
6527 .unwrap();
6528 let events = Rc::new(RefCell::new(Vec::new()));
6529
6530 // initially, the buffer isn't dirty.
6531 buffer1.update(cx, |buffer, cx| {
6532 cx.subscribe(&buffer1, {
6533 let events = events.clone();
6534 move |_, _, event, _| match event {
6535 BufferEvent::Operation(_) => {}
6536 _ => events.borrow_mut().push(event.clone()),
6537 }
6538 })
6539 .detach();
6540
6541 assert!(!buffer.is_dirty());
6542 assert!(events.borrow().is_empty());
6543
6544 buffer.edit(vec![1..2], "", cx);
6545 });
6546
6547 // after the first edit, the buffer is dirty, and emits a dirtied event.
6548 buffer1.update(cx, |buffer, cx| {
6549 assert!(buffer.text() == "ac");
6550 assert!(buffer.is_dirty());
6551 assert_eq!(
6552 *events.borrow(),
6553 &[language::Event::Edited, language::Event::Dirtied]
6554 );
6555 events.borrow_mut().clear();
6556 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6557 });
6558
6559 // after saving, the buffer is not dirty, and emits a saved event.
6560 buffer1.update(cx, |buffer, cx| {
6561 assert!(!buffer.is_dirty());
6562 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6563 events.borrow_mut().clear();
6564
6565 buffer.edit(vec![1..1], "B", cx);
6566 buffer.edit(vec![2..2], "D", cx);
6567 });
6568
6569 // after editing again, the buffer is dirty, and emits another dirty event.
6570 buffer1.update(cx, |buffer, cx| {
6571 assert!(buffer.text() == "aBDc");
6572 assert!(buffer.is_dirty());
6573 assert_eq!(
6574 *events.borrow(),
6575 &[
6576 language::Event::Edited,
6577 language::Event::Dirtied,
6578 language::Event::Edited,
6579 ],
6580 );
6581 events.borrow_mut().clear();
6582
6583 // TODO - currently, after restoring the buffer to its
6584 // previously-saved state, the is still considered dirty.
6585 buffer.edit([1..3], "", cx);
6586 assert!(buffer.text() == "ac");
6587 assert!(buffer.is_dirty());
6588 });
6589
6590 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6591
6592 // When a file is deleted, the buffer is considered dirty.
6593 let events = Rc::new(RefCell::new(Vec::new()));
6594 let buffer2 = project
6595 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6596 .await
6597 .unwrap();
6598 buffer2.update(cx, |_, cx| {
6599 cx.subscribe(&buffer2, {
6600 let events = events.clone();
6601 move |_, _, event, _| events.borrow_mut().push(event.clone())
6602 })
6603 .detach();
6604 });
6605
6606 fs::remove_file(dir.path().join("file2")).unwrap();
6607 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6608 assert_eq!(
6609 *events.borrow(),
6610 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6611 );
6612
6613 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6614 let events = Rc::new(RefCell::new(Vec::new()));
6615 let buffer3 = project
6616 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6617 .await
6618 .unwrap();
6619 buffer3.update(cx, |_, cx| {
6620 cx.subscribe(&buffer3, {
6621 let events = events.clone();
6622 move |_, _, event, _| events.borrow_mut().push(event.clone())
6623 })
6624 .detach();
6625 });
6626
6627 worktree.flush_fs_events(&cx).await;
6628 buffer3.update(cx, |buffer, cx| {
6629 buffer.edit(Some(0..0), "x", cx);
6630 });
6631 events.borrow_mut().clear();
6632 fs::remove_file(dir.path().join("file3")).unwrap();
6633 buffer3
6634 .condition(&cx, |_, _| !events.borrow().is_empty())
6635 .await;
6636 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6637 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6638 }
6639
6640 #[gpui::test]
6641 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6642 use std::fs;
6643
6644 let initial_contents = "aaa\nbbbbb\nc\n";
6645 let dir = temp_tree(json!({ "the-file": initial_contents }));
6646
6647 let project = Project::test(Arc::new(RealFs), cx);
6648 let (worktree, _) = project
6649 .update(cx, |p, cx| {
6650 p.find_or_create_local_worktree(dir.path(), true, cx)
6651 })
6652 .await
6653 .unwrap();
6654 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6655
6656 worktree
6657 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6658 .await;
6659
6660 let abs_path = dir.path().join("the-file");
6661 let buffer = project
6662 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6663 .await
6664 .unwrap();
6665
6666 // TODO
6667 // Add a cursor on each row.
6668 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6669 // assert!(!buffer.is_dirty());
6670 // buffer.add_selection_set(
6671 // &(0..3)
6672 // .map(|row| Selection {
6673 // id: row as usize,
6674 // start: Point::new(row, 1),
6675 // end: Point::new(row, 1),
6676 // reversed: false,
6677 // goal: SelectionGoal::None,
6678 // })
6679 // .collect::<Vec<_>>(),
6680 // cx,
6681 // )
6682 // });
6683
6684 // Change the file on disk, adding two new lines of text, and removing
6685 // one line.
6686 buffer.read_with(cx, |buffer, _| {
6687 assert!(!buffer.is_dirty());
6688 assert!(!buffer.has_conflict());
6689 });
6690 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6691 fs::write(&abs_path, new_contents).unwrap();
6692
6693 // Because the buffer was not modified, it is reloaded from disk. Its
6694 // contents are edited according to the diff between the old and new
6695 // file contents.
6696 buffer
6697 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6698 .await;
6699
6700 buffer.update(cx, |buffer, _| {
6701 assert_eq!(buffer.text(), new_contents);
6702 assert!(!buffer.is_dirty());
6703 assert!(!buffer.has_conflict());
6704
6705 // TODO
6706 // let cursor_positions = buffer
6707 // .selection_set(selection_set_id)
6708 // .unwrap()
6709 // .selections::<Point>(&*buffer)
6710 // .map(|selection| {
6711 // assert_eq!(selection.start, selection.end);
6712 // selection.start
6713 // })
6714 // .collect::<Vec<_>>();
6715 // assert_eq!(
6716 // cursor_positions,
6717 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6718 // );
6719 });
6720
6721 // Modify the buffer
6722 buffer.update(cx, |buffer, cx| {
6723 buffer.edit(vec![0..0], " ", cx);
6724 assert!(buffer.is_dirty());
6725 assert!(!buffer.has_conflict());
6726 });
6727
6728 // Change the file on disk again, adding blank lines to the beginning.
6729 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6730
6731 // Because the buffer is modified, it doesn't reload from disk, but is
6732 // marked as having a conflict.
6733 buffer
6734 .condition(&cx, |buffer, _| buffer.has_conflict())
6735 .await;
6736 }
6737
6738 #[gpui::test]
6739 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6740 cx.foreground().forbid_parking();
6741
6742 let fs = FakeFs::new(cx.background());
6743 fs.insert_tree(
6744 "/the-dir",
6745 json!({
6746 "a.rs": "
6747 fn foo(mut v: Vec<usize>) {
6748 for x in &v {
6749 v.push(1);
6750 }
6751 }
6752 "
6753 .unindent(),
6754 }),
6755 )
6756 .await;
6757
6758 let project = Project::test(fs.clone(), cx);
6759 let (worktree, _) = project
6760 .update(cx, |p, cx| {
6761 p.find_or_create_local_worktree("/the-dir", true, cx)
6762 })
6763 .await
6764 .unwrap();
6765 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6766
6767 let buffer = project
6768 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6769 .await
6770 .unwrap();
6771
6772 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6773 let message = lsp::PublishDiagnosticsParams {
6774 uri: buffer_uri.clone(),
6775 diagnostics: vec![
6776 lsp::Diagnostic {
6777 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6778 severity: Some(DiagnosticSeverity::WARNING),
6779 message: "error 1".to_string(),
6780 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6781 location: lsp::Location {
6782 uri: buffer_uri.clone(),
6783 range: lsp::Range::new(
6784 lsp::Position::new(1, 8),
6785 lsp::Position::new(1, 9),
6786 ),
6787 },
6788 message: "error 1 hint 1".to_string(),
6789 }]),
6790 ..Default::default()
6791 },
6792 lsp::Diagnostic {
6793 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6794 severity: Some(DiagnosticSeverity::HINT),
6795 message: "error 1 hint 1".to_string(),
6796 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6797 location: lsp::Location {
6798 uri: buffer_uri.clone(),
6799 range: lsp::Range::new(
6800 lsp::Position::new(1, 8),
6801 lsp::Position::new(1, 9),
6802 ),
6803 },
6804 message: "original diagnostic".to_string(),
6805 }]),
6806 ..Default::default()
6807 },
6808 lsp::Diagnostic {
6809 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6810 severity: Some(DiagnosticSeverity::ERROR),
6811 message: "error 2".to_string(),
6812 related_information: Some(vec![
6813 lsp::DiagnosticRelatedInformation {
6814 location: lsp::Location {
6815 uri: buffer_uri.clone(),
6816 range: lsp::Range::new(
6817 lsp::Position::new(1, 13),
6818 lsp::Position::new(1, 15),
6819 ),
6820 },
6821 message: "error 2 hint 1".to_string(),
6822 },
6823 lsp::DiagnosticRelatedInformation {
6824 location: lsp::Location {
6825 uri: buffer_uri.clone(),
6826 range: lsp::Range::new(
6827 lsp::Position::new(1, 13),
6828 lsp::Position::new(1, 15),
6829 ),
6830 },
6831 message: "error 2 hint 2".to_string(),
6832 },
6833 ]),
6834 ..Default::default()
6835 },
6836 lsp::Diagnostic {
6837 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6838 severity: Some(DiagnosticSeverity::HINT),
6839 message: "error 2 hint 1".to_string(),
6840 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6841 location: lsp::Location {
6842 uri: buffer_uri.clone(),
6843 range: lsp::Range::new(
6844 lsp::Position::new(2, 8),
6845 lsp::Position::new(2, 17),
6846 ),
6847 },
6848 message: "original diagnostic".to_string(),
6849 }]),
6850 ..Default::default()
6851 },
6852 lsp::Diagnostic {
6853 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6854 severity: Some(DiagnosticSeverity::HINT),
6855 message: "error 2 hint 2".to_string(),
6856 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6857 location: lsp::Location {
6858 uri: buffer_uri.clone(),
6859 range: lsp::Range::new(
6860 lsp::Position::new(2, 8),
6861 lsp::Position::new(2, 17),
6862 ),
6863 },
6864 message: "original diagnostic".to_string(),
6865 }]),
6866 ..Default::default()
6867 },
6868 ],
6869 version: None,
6870 };
6871
6872 project
6873 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
6874 .unwrap();
6875 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6876
6877 assert_eq!(
6878 buffer
6879 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6880 .collect::<Vec<_>>(),
6881 &[
6882 DiagnosticEntry {
6883 range: Point::new(1, 8)..Point::new(1, 9),
6884 diagnostic: Diagnostic {
6885 severity: DiagnosticSeverity::WARNING,
6886 message: "error 1".to_string(),
6887 group_id: 0,
6888 is_primary: true,
6889 ..Default::default()
6890 }
6891 },
6892 DiagnosticEntry {
6893 range: Point::new(1, 8)..Point::new(1, 9),
6894 diagnostic: Diagnostic {
6895 severity: DiagnosticSeverity::HINT,
6896 message: "error 1 hint 1".to_string(),
6897 group_id: 0,
6898 is_primary: false,
6899 ..Default::default()
6900 }
6901 },
6902 DiagnosticEntry {
6903 range: Point::new(1, 13)..Point::new(1, 15),
6904 diagnostic: Diagnostic {
6905 severity: DiagnosticSeverity::HINT,
6906 message: "error 2 hint 1".to_string(),
6907 group_id: 1,
6908 is_primary: false,
6909 ..Default::default()
6910 }
6911 },
6912 DiagnosticEntry {
6913 range: Point::new(1, 13)..Point::new(1, 15),
6914 diagnostic: Diagnostic {
6915 severity: DiagnosticSeverity::HINT,
6916 message: "error 2 hint 2".to_string(),
6917 group_id: 1,
6918 is_primary: false,
6919 ..Default::default()
6920 }
6921 },
6922 DiagnosticEntry {
6923 range: Point::new(2, 8)..Point::new(2, 17),
6924 diagnostic: Diagnostic {
6925 severity: DiagnosticSeverity::ERROR,
6926 message: "error 2".to_string(),
6927 group_id: 1,
6928 is_primary: true,
6929 ..Default::default()
6930 }
6931 }
6932 ]
6933 );
6934
6935 assert_eq!(
6936 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6937 &[
6938 DiagnosticEntry {
6939 range: Point::new(1, 8)..Point::new(1, 9),
6940 diagnostic: Diagnostic {
6941 severity: DiagnosticSeverity::WARNING,
6942 message: "error 1".to_string(),
6943 group_id: 0,
6944 is_primary: true,
6945 ..Default::default()
6946 }
6947 },
6948 DiagnosticEntry {
6949 range: Point::new(1, 8)..Point::new(1, 9),
6950 diagnostic: Diagnostic {
6951 severity: DiagnosticSeverity::HINT,
6952 message: "error 1 hint 1".to_string(),
6953 group_id: 0,
6954 is_primary: false,
6955 ..Default::default()
6956 }
6957 },
6958 ]
6959 );
6960 assert_eq!(
6961 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6962 &[
6963 DiagnosticEntry {
6964 range: Point::new(1, 13)..Point::new(1, 15),
6965 diagnostic: Diagnostic {
6966 severity: DiagnosticSeverity::HINT,
6967 message: "error 2 hint 1".to_string(),
6968 group_id: 1,
6969 is_primary: false,
6970 ..Default::default()
6971 }
6972 },
6973 DiagnosticEntry {
6974 range: Point::new(1, 13)..Point::new(1, 15),
6975 diagnostic: Diagnostic {
6976 severity: DiagnosticSeverity::HINT,
6977 message: "error 2 hint 2".to_string(),
6978 group_id: 1,
6979 is_primary: false,
6980 ..Default::default()
6981 }
6982 },
6983 DiagnosticEntry {
6984 range: Point::new(2, 8)..Point::new(2, 17),
6985 diagnostic: Diagnostic {
6986 severity: DiagnosticSeverity::ERROR,
6987 message: "error 2".to_string(),
6988 group_id: 1,
6989 is_primary: true,
6990 ..Default::default()
6991 }
6992 }
6993 ]
6994 );
6995 }
6996
6997 #[gpui::test]
6998 async fn test_rename(cx: &mut gpui::TestAppContext) {
6999 cx.foreground().forbid_parking();
7000
7001 let mut language = Language::new(
7002 LanguageConfig {
7003 name: "Rust".into(),
7004 path_suffixes: vec!["rs".to_string()],
7005 ..Default::default()
7006 },
7007 Some(tree_sitter_rust::language()),
7008 );
7009 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7010
7011 let fs = FakeFs::new(cx.background());
7012 fs.insert_tree(
7013 "/dir",
7014 json!({
7015 "one.rs": "const ONE: usize = 1;",
7016 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7017 }),
7018 )
7019 .await;
7020
7021 let project = Project::test(fs.clone(), cx);
7022 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7023
7024 let (tree, _) = project
7025 .update(cx, |project, cx| {
7026 project.find_or_create_local_worktree("/dir", true, cx)
7027 })
7028 .await
7029 .unwrap();
7030 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7031 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7032 .await;
7033
7034 let buffer = project
7035 .update(cx, |project, cx| {
7036 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7037 })
7038 .await
7039 .unwrap();
7040
7041 let fake_server = fake_servers.next().await.unwrap();
7042
7043 let response = project.update(cx, |project, cx| {
7044 project.prepare_rename(buffer.clone(), 7, cx)
7045 });
7046 fake_server
7047 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7048 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7049 assert_eq!(params.position, lsp::Position::new(0, 7));
7050 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7051 lsp::Position::new(0, 6),
7052 lsp::Position::new(0, 9),
7053 ))))
7054 })
7055 .next()
7056 .await
7057 .unwrap();
7058 let range = response.await.unwrap().unwrap();
7059 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7060 assert_eq!(range, 6..9);
7061
7062 let response = project.update(cx, |project, cx| {
7063 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7064 });
7065 fake_server
7066 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7067 assert_eq!(
7068 params.text_document_position.text_document.uri.as_str(),
7069 "file:///dir/one.rs"
7070 );
7071 assert_eq!(
7072 params.text_document_position.position,
7073 lsp::Position::new(0, 7)
7074 );
7075 assert_eq!(params.new_name, "THREE");
7076 Ok(Some(lsp::WorkspaceEdit {
7077 changes: Some(
7078 [
7079 (
7080 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7081 vec![lsp::TextEdit::new(
7082 lsp::Range::new(
7083 lsp::Position::new(0, 6),
7084 lsp::Position::new(0, 9),
7085 ),
7086 "THREE".to_string(),
7087 )],
7088 ),
7089 (
7090 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7091 vec![
7092 lsp::TextEdit::new(
7093 lsp::Range::new(
7094 lsp::Position::new(0, 24),
7095 lsp::Position::new(0, 27),
7096 ),
7097 "THREE".to_string(),
7098 ),
7099 lsp::TextEdit::new(
7100 lsp::Range::new(
7101 lsp::Position::new(0, 35),
7102 lsp::Position::new(0, 38),
7103 ),
7104 "THREE".to_string(),
7105 ),
7106 ],
7107 ),
7108 ]
7109 .into_iter()
7110 .collect(),
7111 ),
7112 ..Default::default()
7113 }))
7114 })
7115 .next()
7116 .await
7117 .unwrap();
7118 let mut transaction = response.await.unwrap().0;
7119 assert_eq!(transaction.len(), 2);
7120 assert_eq!(
7121 transaction
7122 .remove_entry(&buffer)
7123 .unwrap()
7124 .0
7125 .read_with(cx, |buffer, _| buffer.text()),
7126 "const THREE: usize = 1;"
7127 );
7128 assert_eq!(
7129 transaction
7130 .into_keys()
7131 .next()
7132 .unwrap()
7133 .read_with(cx, |buffer, _| buffer.text()),
7134 "const TWO: usize = one::THREE + one::THREE;"
7135 );
7136 }
7137
7138 #[gpui::test]
7139 async fn test_search(cx: &mut gpui::TestAppContext) {
7140 let fs = FakeFs::new(cx.background());
7141 fs.insert_tree(
7142 "/dir",
7143 json!({
7144 "one.rs": "const ONE: usize = 1;",
7145 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7146 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7147 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7148 }),
7149 )
7150 .await;
7151 let project = Project::test(fs.clone(), cx);
7152 let (tree, _) = project
7153 .update(cx, |project, cx| {
7154 project.find_or_create_local_worktree("/dir", true, cx)
7155 })
7156 .await
7157 .unwrap();
7158 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7159 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7160 .await;
7161
7162 assert_eq!(
7163 search(&project, SearchQuery::text("TWO", false, true), cx)
7164 .await
7165 .unwrap(),
7166 HashMap::from_iter([
7167 ("two.rs".to_string(), vec![6..9]),
7168 ("three.rs".to_string(), vec![37..40])
7169 ])
7170 );
7171
7172 let buffer_4 = project
7173 .update(cx, |project, cx| {
7174 project.open_buffer((worktree_id, "four.rs"), cx)
7175 })
7176 .await
7177 .unwrap();
7178 buffer_4.update(cx, |buffer, cx| {
7179 buffer.edit([20..28, 31..43], "two::TWO", cx);
7180 });
7181
7182 assert_eq!(
7183 search(&project, SearchQuery::text("TWO", false, true), cx)
7184 .await
7185 .unwrap(),
7186 HashMap::from_iter([
7187 ("two.rs".to_string(), vec![6..9]),
7188 ("three.rs".to_string(), vec![37..40]),
7189 ("four.rs".to_string(), vec![25..28, 36..39])
7190 ])
7191 );
7192
7193 async fn search(
7194 project: &ModelHandle<Project>,
7195 query: SearchQuery,
7196 cx: &mut gpui::TestAppContext,
7197 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7198 let results = project
7199 .update(cx, |project, cx| project.search(query, cx))
7200 .await?;
7201
7202 Ok(results
7203 .into_iter()
7204 .map(|(buffer, ranges)| {
7205 buffer.read_with(cx, |buffer, _| {
7206 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7207 let ranges = ranges
7208 .into_iter()
7209 .map(|range| range.to_offset(buffer))
7210 .collect::<Vec<_>>();
7211 (path, ranges)
7212 })
7213 })
7214 .collect())
7215 }
7216 }
7217}