1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use sha2::{Digest, Sha256};
32use similar::{ChangeTag, TextDiff};
33use std::{
34 cell::RefCell,
35 cmp::{self, Ordering},
36 convert::TryInto,
37 hash::Hash,
38 mem,
39 ops::Range,
40 path::{Component, Path, PathBuf},
41 rc::Rc,
42 sync::{
43 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
44 Arc,
45 },
46 time::Instant,
47};
48use util::{post_inc, ResultExt, TryFutureExt as _};
49
50pub use fs::*;
51pub use worktree::*;
52
53pub trait Item: Entity {
54 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
55}
56
57pub struct Project {
58 worktrees: Vec<WorktreeHandle>,
59 active_entry: Option<ProjectEntryId>,
60 languages: Arc<LanguageRegistry>,
61 language_servers:
62 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
63 started_language_servers:
64 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
65 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
66 language_server_settings: Arc<Mutex<serde_json::Value>>,
67 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
68 next_language_server_id: usize,
69 client: Arc<client::Client>,
70 next_entry_id: Arc<AtomicUsize>,
71 user_store: ModelHandle<UserStore>,
72 fs: Arc<dyn Fs>,
73 client_state: ProjectClientState,
74 collaborators: HashMap<PeerId, Collaborator>,
75 subscriptions: Vec<client::Subscription>,
76 language_servers_with_diagnostics_running: isize,
77 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
78 shared_buffers: HashMap<PeerId, HashSet<u64>>,
79 loading_buffers: HashMap<
80 ProjectPath,
81 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
82 >,
83 loading_local_worktrees:
84 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
85 opened_buffers: HashMap<u64, OpenBuffer>,
86 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
87 nonce: u128,
88}
89
90enum OpenBuffer {
91 Strong(ModelHandle<Buffer>),
92 Weak(WeakModelHandle<Buffer>),
93 Loading(Vec<Operation>),
94}
95
96enum WorktreeHandle {
97 Strong(ModelHandle<Worktree>),
98 Weak(WeakModelHandle<Worktree>),
99}
100
101enum ProjectClientState {
102 Local {
103 is_shared: bool,
104 remote_id_tx: watch::Sender<Option<u64>>,
105 remote_id_rx: watch::Receiver<Option<u64>>,
106 _maintain_remote_id_task: Task<Option<()>>,
107 },
108 Remote {
109 sharing_has_stopped: bool,
110 remote_id: u64,
111 replica_id: ReplicaId,
112 _detect_unshare_task: Task<Option<()>>,
113 },
114}
115
116#[derive(Clone, Debug)]
117pub struct Collaborator {
118 pub user: Arc<User>,
119 pub peer_id: PeerId,
120 pub replica_id: ReplicaId,
121}
122
123#[derive(Clone, Debug, PartialEq)]
124pub enum Event {
125 ActiveEntryChanged(Option<ProjectEntryId>),
126 WorktreeRemoved(WorktreeId),
127 DiskBasedDiagnosticsStarted,
128 DiskBasedDiagnosticsUpdated,
129 DiskBasedDiagnosticsFinished,
130 DiagnosticsUpdated(ProjectPath),
131 RemoteIdChanged(Option<u64>),
132 CollaboratorLeft(PeerId),
133}
134
135pub struct LanguageServerStatus {
136 pub name: String,
137 pub pending_work: BTreeMap<String, LanguageServerProgress>,
138 pending_diagnostic_updates: isize,
139}
140
141#[derive(Clone, Debug)]
142pub struct LanguageServerProgress {
143 pub message: Option<String>,
144 pub percentage: Option<usize>,
145 pub last_update_at: Instant,
146}
147
148#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
149pub struct ProjectPath {
150 pub worktree_id: WorktreeId,
151 pub path: Arc<Path>,
152}
153
154#[derive(Clone, Debug, Default, PartialEq)]
155pub struct DiagnosticSummary {
156 pub error_count: usize,
157 pub warning_count: usize,
158 pub info_count: usize,
159 pub hint_count: usize,
160}
161
162#[derive(Debug)]
163pub struct Location {
164 pub buffer: ModelHandle<Buffer>,
165 pub range: Range<language::Anchor>,
166}
167
168#[derive(Debug)]
169pub struct DocumentHighlight {
170 pub range: Range<language::Anchor>,
171 pub kind: DocumentHighlightKind,
172}
173
174#[derive(Clone, Debug)]
175pub struct Symbol {
176 pub source_worktree_id: WorktreeId,
177 pub worktree_id: WorktreeId,
178 pub language_server_name: LanguageServerName,
179 pub path: PathBuf,
180 pub label: CodeLabel,
181 pub name: String,
182 pub kind: lsp::SymbolKind,
183 pub range: Range<PointUtf16>,
184 pub signature: [u8; 32],
185}
186
187#[derive(Default)]
188pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
189
190impl DiagnosticSummary {
191 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
192 let mut this = Self {
193 error_count: 0,
194 warning_count: 0,
195 info_count: 0,
196 hint_count: 0,
197 };
198
199 for entry in diagnostics {
200 if entry.diagnostic.is_primary {
201 match entry.diagnostic.severity {
202 DiagnosticSeverity::ERROR => this.error_count += 1,
203 DiagnosticSeverity::WARNING => this.warning_count += 1,
204 DiagnosticSeverity::INFORMATION => this.info_count += 1,
205 DiagnosticSeverity::HINT => this.hint_count += 1,
206 _ => {}
207 }
208 }
209 }
210
211 this
212 }
213
214 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
215 proto::DiagnosticSummary {
216 path: path.to_string_lossy().to_string(),
217 error_count: self.error_count as u32,
218 warning_count: self.warning_count as u32,
219 info_count: self.info_count as u32,
220 hint_count: self.hint_count as u32,
221 }
222 }
223}
224
225#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
226pub struct ProjectEntryId(usize);
227
228impl ProjectEntryId {
229 pub fn new(counter: &AtomicUsize) -> Self {
230 Self(counter.fetch_add(1, SeqCst))
231 }
232
233 pub fn from_proto(id: u64) -> Self {
234 Self(id as usize)
235 }
236
237 pub fn to_proto(&self) -> u64 {
238 self.0 as u64
239 }
240
241 pub fn to_usize(&self) -> usize {
242 self.0
243 }
244}
245
246impl Project {
247 pub fn init(client: &Arc<Client>) {
248 client.add_model_message_handler(Self::handle_add_collaborator);
249 client.add_model_message_handler(Self::handle_buffer_reloaded);
250 client.add_model_message_handler(Self::handle_buffer_saved);
251 client.add_model_message_handler(Self::handle_start_language_server);
252 client.add_model_message_handler(Self::handle_update_language_server);
253 client.add_model_message_handler(Self::handle_remove_collaborator);
254 client.add_model_message_handler(Self::handle_register_worktree);
255 client.add_model_message_handler(Self::handle_unregister_worktree);
256 client.add_model_message_handler(Self::handle_unshare_project);
257 client.add_model_message_handler(Self::handle_update_buffer_file);
258 client.add_model_message_handler(Self::handle_update_buffer);
259 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
260 client.add_model_message_handler(Self::handle_update_worktree);
261 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
262 client.add_model_request_handler(Self::handle_apply_code_action);
263 client.add_model_request_handler(Self::handle_format_buffers);
264 client.add_model_request_handler(Self::handle_get_code_actions);
265 client.add_model_request_handler(Self::handle_get_completions);
266 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
269 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
271 client.add_model_request_handler(Self::handle_search_project);
272 client.add_model_request_handler(Self::handle_get_project_symbols);
273 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
274 client.add_model_request_handler(Self::handle_open_buffer_by_id);
275 client.add_model_request_handler(Self::handle_open_buffer_by_path);
276 client.add_model_request_handler(Self::handle_save_buffer);
277 }
278
279 pub fn local(
280 client: Arc<Client>,
281 user_store: ModelHandle<UserStore>,
282 languages: Arc<LanguageRegistry>,
283 fs: Arc<dyn Fs>,
284 cx: &mut MutableAppContext,
285 ) -> ModelHandle<Self> {
286 cx.add_model(|cx: &mut ModelContext<Self>| {
287 let (remote_id_tx, remote_id_rx) = watch::channel();
288 let _maintain_remote_id_task = cx.spawn_weak({
289 let rpc = client.clone();
290 move |this, mut cx| {
291 async move {
292 let mut status = rpc.status();
293 while let Some(status) = status.next().await {
294 if let Some(this) = this.upgrade(&cx) {
295 if status.is_connected() {
296 this.update(&mut cx, |this, cx| this.register(cx)).await?;
297 } else {
298 this.update(&mut cx, |this, cx| this.unregister(cx));
299 }
300 }
301 }
302 Ok(())
303 }
304 .log_err()
305 }
306 });
307
308 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
309 Self {
310 worktrees: Default::default(),
311 collaborators: Default::default(),
312 opened_buffers: Default::default(),
313 shared_buffers: Default::default(),
314 loading_buffers: Default::default(),
315 loading_local_worktrees: Default::default(),
316 buffer_snapshots: Default::default(),
317 client_state: ProjectClientState::Local {
318 is_shared: false,
319 remote_id_tx,
320 remote_id_rx,
321 _maintain_remote_id_task,
322 },
323 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
324 subscriptions: Vec::new(),
325 active_entry: None,
326 languages,
327 client,
328 user_store,
329 fs,
330 next_entry_id: Default::default(),
331 language_servers_with_diagnostics_running: 0,
332 language_servers: Default::default(),
333 started_language_servers: Default::default(),
334 language_server_statuses: Default::default(),
335 last_workspace_edits_by_language_server: Default::default(),
336 language_server_settings: Default::default(),
337 next_language_server_id: 0,
338 nonce: StdRng::from_entropy().gen(),
339 }
340 })
341 }
342
343 pub async fn remote(
344 remote_id: u64,
345 client: Arc<Client>,
346 user_store: ModelHandle<UserStore>,
347 languages: Arc<LanguageRegistry>,
348 fs: Arc<dyn Fs>,
349 cx: &mut AsyncAppContext,
350 ) -> Result<ModelHandle<Self>> {
351 client.authenticate_and_connect(true, &cx).await?;
352
353 let response = client
354 .request(proto::JoinProject {
355 project_id: remote_id,
356 })
357 .await?;
358
359 let replica_id = response.replica_id as ReplicaId;
360
361 let mut worktrees = Vec::new();
362 for worktree in response.worktrees {
363 let (worktree, load_task) = cx
364 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
365 worktrees.push(worktree);
366 load_task.detach();
367 }
368
369 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
370 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
371 let mut this = Self {
372 worktrees: Vec::new(),
373 loading_buffers: Default::default(),
374 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
375 shared_buffers: Default::default(),
376 loading_local_worktrees: Default::default(),
377 active_entry: None,
378 collaborators: Default::default(),
379 languages,
380 user_store: user_store.clone(),
381 fs,
382 next_entry_id: Default::default(),
383 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
384 client: client.clone(),
385 client_state: ProjectClientState::Remote {
386 sharing_has_stopped: false,
387 remote_id,
388 replica_id,
389 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
390 async move {
391 let mut status = client.status();
392 let is_connected =
393 status.next().await.map_or(false, |s| s.is_connected());
394 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
395 if !is_connected || status.next().await.is_some() {
396 if let Some(this) = this.upgrade(&cx) {
397 this.update(&mut cx, |this, cx| this.project_unshared(cx))
398 }
399 }
400 Ok(())
401 }
402 .log_err()
403 }),
404 },
405 language_servers_with_diagnostics_running: 0,
406 language_servers: Default::default(),
407 started_language_servers: Default::default(),
408 language_server_settings: Default::default(),
409 language_server_statuses: response
410 .language_servers
411 .into_iter()
412 .map(|server| {
413 (
414 server.id as usize,
415 LanguageServerStatus {
416 name: server.name,
417 pending_work: Default::default(),
418 pending_diagnostic_updates: 0,
419 },
420 )
421 })
422 .collect(),
423 last_workspace_edits_by_language_server: Default::default(),
424 next_language_server_id: 0,
425 opened_buffers: Default::default(),
426 buffer_snapshots: Default::default(),
427 nonce: StdRng::from_entropy().gen(),
428 };
429 for worktree in worktrees {
430 this.add_worktree(&worktree, cx);
431 }
432 this
433 });
434
435 let user_ids = response
436 .collaborators
437 .iter()
438 .map(|peer| peer.user_id)
439 .collect();
440 user_store
441 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
442 .await?;
443 let mut collaborators = HashMap::default();
444 for message in response.collaborators {
445 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
446 collaborators.insert(collaborator.peer_id, collaborator);
447 }
448
449 this.update(cx, |this, _| {
450 this.collaborators = collaborators;
451 });
452
453 Ok(this)
454 }
455
456 #[cfg(any(test, feature = "test-support"))]
457 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
458 let languages = Arc::new(LanguageRegistry::test());
459 let http_client = client::test::FakeHttpClient::with_404_response();
460 let client = client::Client::new(http_client.clone());
461 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
462 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
463 }
464
465 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
466 self.opened_buffers
467 .get(&remote_id)
468 .and_then(|buffer| buffer.upgrade(cx))
469 }
470
471 #[cfg(any(test, feature = "test-support"))]
472 pub fn languages(&self) -> &Arc<LanguageRegistry> {
473 &self.languages
474 }
475
476 #[cfg(any(test, feature = "test-support"))]
477 pub fn check_invariants(&self, cx: &AppContext) {
478 if self.is_local() {
479 let mut worktree_root_paths = HashMap::default();
480 for worktree in self.worktrees(cx) {
481 let worktree = worktree.read(cx);
482 let abs_path = worktree.as_local().unwrap().abs_path().clone();
483 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
484 assert_eq!(
485 prev_worktree_id,
486 None,
487 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
488 abs_path,
489 worktree.id(),
490 prev_worktree_id
491 )
492 }
493 } else {
494 let replica_id = self.replica_id();
495 for buffer in self.opened_buffers.values() {
496 if let Some(buffer) = buffer.upgrade(cx) {
497 let buffer = buffer.read(cx);
498 assert_eq!(
499 buffer.deferred_ops_len(),
500 0,
501 "replica {}, buffer {} has deferred operations",
502 replica_id,
503 buffer.remote_id()
504 );
505 }
506 }
507 }
508 }
509
510 #[cfg(any(test, feature = "test-support"))]
511 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
512 let path = path.into();
513 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
514 self.opened_buffers.iter().any(|(_, buffer)| {
515 if let Some(buffer) = buffer.upgrade(cx) {
516 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
517 if file.worktree == worktree && file.path() == &path.path {
518 return true;
519 }
520 }
521 }
522 false
523 })
524 } else {
525 false
526 }
527 }
528
529 pub fn fs(&self) -> &Arc<dyn Fs> {
530 &self.fs
531 }
532
533 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
534 self.unshare(cx);
535 for worktree in &self.worktrees {
536 if let Some(worktree) = worktree.upgrade(cx) {
537 worktree.update(cx, |worktree, _| {
538 worktree.as_local_mut().unwrap().unregister();
539 });
540 }
541 }
542
543 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
544 *remote_id_tx.borrow_mut() = None;
545 }
546
547 self.subscriptions.clear();
548 }
549
550 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
551 self.unregister(cx);
552
553 let response = self.client.request(proto::RegisterProject {});
554 cx.spawn(|this, mut cx| async move {
555 let remote_id = response.await?.project_id;
556
557 let mut registrations = Vec::new();
558 this.update(&mut cx, |this, cx| {
559 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
560 *remote_id_tx.borrow_mut() = Some(remote_id);
561 }
562
563 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
564
565 this.subscriptions
566 .push(this.client.add_model_for_remote_entity(remote_id, cx));
567
568 for worktree in &this.worktrees {
569 if let Some(worktree) = worktree.upgrade(cx) {
570 registrations.push(worktree.update(cx, |worktree, cx| {
571 let worktree = worktree.as_local_mut().unwrap();
572 worktree.register(remote_id, cx)
573 }));
574 }
575 }
576 });
577
578 futures::future::try_join_all(registrations).await?;
579 Ok(())
580 })
581 }
582
583 pub fn remote_id(&self) -> Option<u64> {
584 match &self.client_state {
585 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
586 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
587 }
588 }
589
590 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
591 let mut id = None;
592 let mut watch = None;
593 match &self.client_state {
594 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
595 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
596 }
597
598 async move {
599 if let Some(id) = id {
600 return id;
601 }
602 let mut watch = watch.unwrap();
603 loop {
604 let id = *watch.borrow();
605 if let Some(id) = id {
606 return id;
607 }
608 watch.next().await;
609 }
610 }
611 }
612
613 pub fn replica_id(&self) -> ReplicaId {
614 match &self.client_state {
615 ProjectClientState::Local { .. } => 0,
616 ProjectClientState::Remote { replica_id, .. } => *replica_id,
617 }
618 }
619
620 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
621 &self.collaborators
622 }
623
624 pub fn worktrees<'a>(
625 &'a self,
626 cx: &'a AppContext,
627 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
628 self.worktrees
629 .iter()
630 .filter_map(move |worktree| worktree.upgrade(cx))
631 }
632
633 pub fn visible_worktrees<'a>(
634 &'a self,
635 cx: &'a AppContext,
636 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
637 self.worktrees.iter().filter_map(|worktree| {
638 worktree.upgrade(cx).and_then(|worktree| {
639 if worktree.read(cx).is_visible() {
640 Some(worktree)
641 } else {
642 None
643 }
644 })
645 })
646 }
647
648 pub fn worktree_for_id(
649 &self,
650 id: WorktreeId,
651 cx: &AppContext,
652 ) -> Option<ModelHandle<Worktree>> {
653 self.worktrees(cx)
654 .find(|worktree| worktree.read(cx).id() == id)
655 }
656
657 pub fn worktree_for_entry(
658 &self,
659 entry_id: ProjectEntryId,
660 cx: &AppContext,
661 ) -> Option<ModelHandle<Worktree>> {
662 self.worktrees(cx)
663 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
664 }
665
666 pub fn worktree_id_for_entry(
667 &self,
668 entry_id: ProjectEntryId,
669 cx: &AppContext,
670 ) -> Option<WorktreeId> {
671 self.worktree_for_entry(entry_id, cx)
672 .map(|worktree| worktree.read(cx).id())
673 }
674
675 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
676 let rpc = self.client.clone();
677 cx.spawn(|this, mut cx| async move {
678 let project_id = this.update(&mut cx, |this, cx| {
679 if let ProjectClientState::Local {
680 is_shared,
681 remote_id_rx,
682 ..
683 } = &mut this.client_state
684 {
685 *is_shared = true;
686
687 for open_buffer in this.opened_buffers.values_mut() {
688 match open_buffer {
689 OpenBuffer::Strong(_) => {}
690 OpenBuffer::Weak(buffer) => {
691 if let Some(buffer) = buffer.upgrade(cx) {
692 *open_buffer = OpenBuffer::Strong(buffer);
693 }
694 }
695 OpenBuffer::Loading(_) => unreachable!(),
696 }
697 }
698
699 for worktree_handle in this.worktrees.iter_mut() {
700 match worktree_handle {
701 WorktreeHandle::Strong(_) => {}
702 WorktreeHandle::Weak(worktree) => {
703 if let Some(worktree) = worktree.upgrade(cx) {
704 *worktree_handle = WorktreeHandle::Strong(worktree);
705 }
706 }
707 }
708 }
709
710 remote_id_rx
711 .borrow()
712 .ok_or_else(|| anyhow!("no project id"))
713 } else {
714 Err(anyhow!("can't share a remote project"))
715 }
716 })?;
717
718 rpc.request(proto::ShareProject { project_id }).await?;
719
720 let mut tasks = Vec::new();
721 this.update(&mut cx, |this, cx| {
722 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
723 worktree.update(cx, |worktree, cx| {
724 let worktree = worktree.as_local_mut().unwrap();
725 tasks.push(worktree.share(project_id, cx));
726 });
727 }
728 });
729 for task in tasks {
730 task.await?;
731 }
732 this.update(&mut cx, |_, cx| cx.notify());
733 Ok(())
734 })
735 }
736
737 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
738 let rpc = self.client.clone();
739
740 if let ProjectClientState::Local {
741 is_shared,
742 remote_id_rx,
743 ..
744 } = &mut self.client_state
745 {
746 if !*is_shared {
747 return;
748 }
749
750 *is_shared = false;
751 self.collaborators.clear();
752 self.shared_buffers.clear();
753 for worktree_handle in self.worktrees.iter_mut() {
754 if let WorktreeHandle::Strong(worktree) = worktree_handle {
755 let is_visible = worktree.update(cx, |worktree, _| {
756 worktree.as_local_mut().unwrap().unshare();
757 worktree.is_visible()
758 });
759 if !is_visible {
760 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
761 }
762 }
763 }
764
765 for open_buffer in self.opened_buffers.values_mut() {
766 match open_buffer {
767 OpenBuffer::Strong(buffer) => {
768 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
769 }
770 _ => {}
771 }
772 }
773
774 if let Some(project_id) = *remote_id_rx.borrow() {
775 rpc.send(proto::UnshareProject { project_id }).log_err();
776 }
777
778 cx.notify();
779 } else {
780 log::error!("attempted to unshare a remote project");
781 }
782 }
783
784 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
785 if let ProjectClientState::Remote {
786 sharing_has_stopped,
787 ..
788 } = &mut self.client_state
789 {
790 *sharing_has_stopped = true;
791 self.collaborators.clear();
792 cx.notify();
793 }
794 }
795
796 pub fn is_read_only(&self) -> bool {
797 match &self.client_state {
798 ProjectClientState::Local { .. } => false,
799 ProjectClientState::Remote {
800 sharing_has_stopped,
801 ..
802 } => *sharing_has_stopped,
803 }
804 }
805
806 pub fn is_local(&self) -> bool {
807 match &self.client_state {
808 ProjectClientState::Local { .. } => true,
809 ProjectClientState::Remote { .. } => false,
810 }
811 }
812
813 pub fn is_remote(&self) -> bool {
814 !self.is_local()
815 }
816
817 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
818 if self.is_remote() {
819 return Err(anyhow!("creating buffers as a guest is not supported yet"));
820 }
821
822 let buffer = cx.add_model(|cx| {
823 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
824 });
825 self.register_buffer(&buffer, cx)?;
826 Ok(buffer)
827 }
828
829 pub fn open_path(
830 &mut self,
831 path: impl Into<ProjectPath>,
832 cx: &mut ModelContext<Self>,
833 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
834 let task = self.open_buffer(path, cx);
835 cx.spawn_weak(|_, cx| async move {
836 let buffer = task.await?;
837 let project_entry_id = buffer
838 .read_with(&cx, |buffer, cx| {
839 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
840 })
841 .ok_or_else(|| anyhow!("no project entry"))?;
842 Ok((project_entry_id, buffer.into()))
843 })
844 }
845
846 pub fn open_buffer(
847 &mut self,
848 path: impl Into<ProjectPath>,
849 cx: &mut ModelContext<Self>,
850 ) -> Task<Result<ModelHandle<Buffer>>> {
851 let project_path = path.into();
852 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
853 worktree
854 } else {
855 return Task::ready(Err(anyhow!("no such worktree")));
856 };
857
858 // If there is already a buffer for the given path, then return it.
859 let existing_buffer = self.get_open_buffer(&project_path, cx);
860 if let Some(existing_buffer) = existing_buffer {
861 return Task::ready(Ok(existing_buffer));
862 }
863
864 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
865 // If the given path is already being loaded, then wait for that existing
866 // task to complete and return the same buffer.
867 hash_map::Entry::Occupied(e) => e.get().clone(),
868
869 // Otherwise, record the fact that this path is now being loaded.
870 hash_map::Entry::Vacant(entry) => {
871 let (mut tx, rx) = postage::watch::channel();
872 entry.insert(rx.clone());
873
874 let load_buffer = if worktree.read(cx).is_local() {
875 self.open_local_buffer(&project_path.path, &worktree, cx)
876 } else {
877 self.open_remote_buffer(&project_path.path, &worktree, cx)
878 };
879
880 cx.spawn(move |this, mut cx| async move {
881 let load_result = load_buffer.await;
882 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
883 // Record the fact that the buffer is no longer loading.
884 this.loading_buffers.remove(&project_path);
885 let buffer = load_result.map_err(Arc::new)?;
886 Ok(buffer)
887 }));
888 })
889 .detach();
890 rx
891 }
892 };
893
894 cx.foreground().spawn(async move {
895 loop {
896 if let Some(result) = loading_watch.borrow().as_ref() {
897 match result {
898 Ok(buffer) => return Ok(buffer.clone()),
899 Err(error) => return Err(anyhow!("{}", error)),
900 }
901 }
902 loading_watch.next().await;
903 }
904 })
905 }
906
907 fn open_local_buffer(
908 &mut self,
909 path: &Arc<Path>,
910 worktree: &ModelHandle<Worktree>,
911 cx: &mut ModelContext<Self>,
912 ) -> Task<Result<ModelHandle<Buffer>>> {
913 let load_buffer = worktree.update(cx, |worktree, cx| {
914 let worktree = worktree.as_local_mut().unwrap();
915 worktree.load_buffer(path, cx)
916 });
917 cx.spawn(|this, mut cx| async move {
918 let buffer = load_buffer.await?;
919 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
920 Ok(buffer)
921 })
922 }
923
924 fn open_remote_buffer(
925 &mut self,
926 path: &Arc<Path>,
927 worktree: &ModelHandle<Worktree>,
928 cx: &mut ModelContext<Self>,
929 ) -> Task<Result<ModelHandle<Buffer>>> {
930 let rpc = self.client.clone();
931 let project_id = self.remote_id().unwrap();
932 let remote_worktree_id = worktree.read(cx).id();
933 let path = path.clone();
934 let path_string = path.to_string_lossy().to_string();
935 cx.spawn(|this, mut cx| async move {
936 let response = rpc
937 .request(proto::OpenBufferByPath {
938 project_id,
939 worktree_id: remote_worktree_id.to_proto(),
940 path: path_string,
941 })
942 .await?;
943 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
944 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
945 .await
946 })
947 }
948
949 fn open_local_buffer_via_lsp(
950 &mut self,
951 abs_path: lsp::Url,
952 lsp_adapter: Arc<dyn LspAdapter>,
953 lsp_server: Arc<LanguageServer>,
954 cx: &mut ModelContext<Self>,
955 ) -> Task<Result<ModelHandle<Buffer>>> {
956 cx.spawn(|this, mut cx| async move {
957 let abs_path = abs_path
958 .to_file_path()
959 .map_err(|_| anyhow!("can't convert URI to path"))?;
960 let (worktree, relative_path) = if let Some(result) =
961 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
962 {
963 result
964 } else {
965 let worktree = this
966 .update(&mut cx, |this, cx| {
967 this.create_local_worktree(&abs_path, false, cx)
968 })
969 .await?;
970 this.update(&mut cx, |this, cx| {
971 this.language_servers.insert(
972 (worktree.read(cx).id(), lsp_adapter.name()),
973 (lsp_adapter, lsp_server),
974 );
975 });
976 (worktree, PathBuf::new())
977 };
978
979 let project_path = ProjectPath {
980 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
981 path: relative_path.into(),
982 };
983 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
984 .await
985 })
986 }
987
988 pub fn open_buffer_by_id(
989 &mut self,
990 id: u64,
991 cx: &mut ModelContext<Self>,
992 ) -> Task<Result<ModelHandle<Buffer>>> {
993 if let Some(buffer) = self.buffer_for_id(id, cx) {
994 Task::ready(Ok(buffer))
995 } else if self.is_local() {
996 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
997 } else if let Some(project_id) = self.remote_id() {
998 let request = self
999 .client
1000 .request(proto::OpenBufferById { project_id, id });
1001 cx.spawn(|this, mut cx| async move {
1002 let buffer = request
1003 .await?
1004 .buffer
1005 .ok_or_else(|| anyhow!("invalid buffer"))?;
1006 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1007 .await
1008 })
1009 } else {
1010 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1011 }
1012 }
1013
1014 pub fn save_buffer_as(
1015 &mut self,
1016 buffer: ModelHandle<Buffer>,
1017 abs_path: PathBuf,
1018 cx: &mut ModelContext<Project>,
1019 ) -> Task<Result<()>> {
1020 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1021 cx.spawn(|this, mut cx| async move {
1022 let (worktree, path) = worktree_task.await?;
1023 worktree
1024 .update(&mut cx, |worktree, cx| {
1025 worktree
1026 .as_local_mut()
1027 .unwrap()
1028 .save_buffer_as(buffer.clone(), path, cx)
1029 })
1030 .await?;
1031 this.update(&mut cx, |this, cx| {
1032 this.assign_language_to_buffer(&buffer, cx);
1033 this.register_buffer_with_language_server(&buffer, cx);
1034 });
1035 Ok(())
1036 })
1037 }
1038
1039 pub fn get_open_buffer(
1040 &mut self,
1041 path: &ProjectPath,
1042 cx: &mut ModelContext<Self>,
1043 ) -> Option<ModelHandle<Buffer>> {
1044 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1045 self.opened_buffers.values().find_map(|buffer| {
1046 let buffer = buffer.upgrade(cx)?;
1047 let file = File::from_dyn(buffer.read(cx).file())?;
1048 if file.worktree == worktree && file.path() == &path.path {
1049 Some(buffer)
1050 } else {
1051 None
1052 }
1053 })
1054 }
1055
1056 fn register_buffer(
1057 &mut self,
1058 buffer: &ModelHandle<Buffer>,
1059 cx: &mut ModelContext<Self>,
1060 ) -> Result<()> {
1061 let remote_id = buffer.read(cx).remote_id();
1062 let open_buffer = if self.is_remote() || self.is_shared() {
1063 OpenBuffer::Strong(buffer.clone())
1064 } else {
1065 OpenBuffer::Weak(buffer.downgrade())
1066 };
1067
1068 match self.opened_buffers.insert(remote_id, open_buffer) {
1069 None => {}
1070 Some(OpenBuffer::Loading(operations)) => {
1071 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1072 }
1073 Some(OpenBuffer::Weak(existing_handle)) => {
1074 if existing_handle.upgrade(cx).is_some() {
1075 Err(anyhow!(
1076 "already registered buffer with remote id {}",
1077 remote_id
1078 ))?
1079 }
1080 }
1081 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1082 "already registered buffer with remote id {}",
1083 remote_id
1084 ))?,
1085 }
1086 cx.subscribe(buffer, |this, buffer, event, cx| {
1087 this.on_buffer_event(buffer, event, cx);
1088 })
1089 .detach();
1090
1091 self.assign_language_to_buffer(buffer, cx);
1092 self.register_buffer_with_language_server(buffer, cx);
1093
1094 Ok(())
1095 }
1096
1097 fn register_buffer_with_language_server(
1098 &mut self,
1099 buffer_handle: &ModelHandle<Buffer>,
1100 cx: &mut ModelContext<Self>,
1101 ) {
1102 let buffer = buffer_handle.read(cx);
1103 let buffer_id = buffer.remote_id();
1104 if let Some(file) = File::from_dyn(buffer.file()) {
1105 if file.is_local() {
1106 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1107 let initial_snapshot = buffer.text_snapshot();
1108 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1109
1110 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1111 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1112 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1113 .log_err();
1114 }
1115 }
1116
1117 if let Some((_, server)) = language_server {
1118 server
1119 .notify::<lsp::notification::DidOpenTextDocument>(
1120 lsp::DidOpenTextDocumentParams {
1121 text_document: lsp::TextDocumentItem::new(
1122 uri,
1123 Default::default(),
1124 0,
1125 initial_snapshot.text(),
1126 ),
1127 }
1128 .clone(),
1129 )
1130 .log_err();
1131 buffer_handle.update(cx, |buffer, cx| {
1132 buffer.set_completion_triggers(
1133 server
1134 .capabilities()
1135 .completion_provider
1136 .as_ref()
1137 .and_then(|provider| provider.trigger_characters.clone())
1138 .unwrap_or(Vec::new()),
1139 cx,
1140 )
1141 });
1142 self.buffer_snapshots
1143 .insert(buffer_id, vec![(0, initial_snapshot)]);
1144 }
1145
1146 cx.observe_release(buffer_handle, |this, buffer, cx| {
1147 if let Some(file) = File::from_dyn(buffer.file()) {
1148 if file.is_local() {
1149 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1150 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1151 server
1152 .notify::<lsp::notification::DidCloseTextDocument>(
1153 lsp::DidCloseTextDocumentParams {
1154 text_document: lsp::TextDocumentIdentifier::new(
1155 uri.clone(),
1156 ),
1157 },
1158 )
1159 .log_err();
1160 }
1161 }
1162 }
1163 })
1164 .detach();
1165 }
1166 }
1167 }
1168
1169 fn on_buffer_event(
1170 &mut self,
1171 buffer: ModelHandle<Buffer>,
1172 event: &BufferEvent,
1173 cx: &mut ModelContext<Self>,
1174 ) -> Option<()> {
1175 match event {
1176 BufferEvent::Operation(operation) => {
1177 let project_id = self.remote_id()?;
1178 let request = self.client.request(proto::UpdateBuffer {
1179 project_id,
1180 buffer_id: buffer.read(cx).remote_id(),
1181 operations: vec![language::proto::serialize_operation(&operation)],
1182 });
1183 cx.background().spawn(request).detach_and_log_err(cx);
1184 }
1185 BufferEvent::Edited { .. } => {
1186 let (_, language_server) = self
1187 .language_server_for_buffer(buffer.read(cx), cx)?
1188 .clone();
1189 let buffer = buffer.read(cx);
1190 let file = File::from_dyn(buffer.file())?;
1191 let abs_path = file.as_local()?.abs_path(cx);
1192 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1193 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1194 let (version, prev_snapshot) = buffer_snapshots.last()?;
1195 let next_snapshot = buffer.text_snapshot();
1196 let next_version = version + 1;
1197
1198 let content_changes = buffer
1199 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1200 .map(|edit| {
1201 let edit_start = edit.new.start.0;
1202 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1203 let new_text = next_snapshot
1204 .text_for_range(edit.new.start.1..edit.new.end.1)
1205 .collect();
1206 lsp::TextDocumentContentChangeEvent {
1207 range: Some(lsp::Range::new(
1208 point_to_lsp(edit_start),
1209 point_to_lsp(edit_end),
1210 )),
1211 range_length: None,
1212 text: new_text,
1213 }
1214 })
1215 .collect();
1216
1217 buffer_snapshots.push((next_version, next_snapshot));
1218
1219 language_server
1220 .notify::<lsp::notification::DidChangeTextDocument>(
1221 lsp::DidChangeTextDocumentParams {
1222 text_document: lsp::VersionedTextDocumentIdentifier::new(
1223 uri,
1224 next_version,
1225 ),
1226 content_changes,
1227 },
1228 )
1229 .log_err();
1230 }
1231 BufferEvent::Saved => {
1232 let file = File::from_dyn(buffer.read(cx).file())?;
1233 let worktree_id = file.worktree_id(cx);
1234 let abs_path = file.as_local()?.abs_path(cx);
1235 let text_document = lsp::TextDocumentIdentifier {
1236 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1237 };
1238
1239 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1240 server
1241 .notify::<lsp::notification::DidSaveTextDocument>(
1242 lsp::DidSaveTextDocumentParams {
1243 text_document: text_document.clone(),
1244 text: None,
1245 },
1246 )
1247 .log_err();
1248 }
1249 }
1250 _ => {}
1251 }
1252
1253 None
1254 }
1255
1256 fn language_servers_for_worktree(
1257 &self,
1258 worktree_id: WorktreeId,
1259 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1260 self.language_servers.iter().filter_map(
1261 move |((language_server_worktree_id, _), server)| {
1262 if *language_server_worktree_id == worktree_id {
1263 Some(server)
1264 } else {
1265 None
1266 }
1267 },
1268 )
1269 }
1270
1271 fn assign_language_to_buffer(
1272 &mut self,
1273 buffer: &ModelHandle<Buffer>,
1274 cx: &mut ModelContext<Self>,
1275 ) -> Option<()> {
1276 // If the buffer has a language, set it and start the language server if we haven't already.
1277 let full_path = buffer.read(cx).file()?.full_path(cx);
1278 let language = self.languages.select_language(&full_path)?;
1279 buffer.update(cx, |buffer, cx| {
1280 buffer.set_language(Some(language.clone()), cx);
1281 });
1282
1283 let file = File::from_dyn(buffer.read(cx).file())?;
1284 let worktree = file.worktree.read(cx).as_local()?;
1285 let worktree_id = worktree.id();
1286 let worktree_abs_path = worktree.abs_path().clone();
1287 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1288
1289 None
1290 }
1291
1292 fn start_language_server(
1293 &mut self,
1294 worktree_id: WorktreeId,
1295 worktree_path: Arc<Path>,
1296 language: Arc<Language>,
1297 cx: &mut ModelContext<Self>,
1298 ) {
1299 let adapter = if let Some(adapter) = language.lsp_adapter() {
1300 adapter
1301 } else {
1302 return;
1303 };
1304 let key = (worktree_id, adapter.name());
1305 self.started_language_servers
1306 .entry(key.clone())
1307 .or_insert_with(|| {
1308 let server_id = post_inc(&mut self.next_language_server_id);
1309 let language_server = self.languages.start_language_server(
1310 server_id,
1311 language.clone(),
1312 worktree_path,
1313 self.client.http_client(),
1314 cx,
1315 );
1316 cx.spawn_weak(|this, mut cx| async move {
1317 let language_server = language_server?.await.log_err()?;
1318 let language_server = language_server
1319 .initialize(adapter.initialization_options())
1320 .await
1321 .log_err()?;
1322 let this = this.upgrade(&cx)?;
1323 let disk_based_diagnostics_progress_token =
1324 adapter.disk_based_diagnostics_progress_token();
1325
1326 language_server
1327 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1328 let this = this.downgrade();
1329 let adapter = adapter.clone();
1330 move |params, mut cx| {
1331 if let Some(this) = this.upgrade(&cx) {
1332 this.update(&mut cx, |this, cx| {
1333 this.on_lsp_diagnostics_published(
1334 server_id,
1335 params,
1336 &adapter,
1337 disk_based_diagnostics_progress_token,
1338 cx,
1339 );
1340 });
1341 }
1342 }
1343 })
1344 .detach();
1345
1346 language_server
1347 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1348 let settings = this
1349 .read_with(&cx, |this, _| this.language_server_settings.clone());
1350 move |params, _| {
1351 let settings = settings.lock().clone();
1352 async move {
1353 Ok(params
1354 .items
1355 .into_iter()
1356 .map(|item| {
1357 if let Some(section) = &item.section {
1358 settings
1359 .get(section)
1360 .cloned()
1361 .unwrap_or(serde_json::Value::Null)
1362 } else {
1363 settings.clone()
1364 }
1365 })
1366 .collect())
1367 }
1368 }
1369 })
1370 .detach();
1371
1372 language_server
1373 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1374 let this = this.downgrade();
1375 let adapter = adapter.clone();
1376 let language_server = language_server.clone();
1377 move |params, cx| {
1378 Self::on_lsp_workspace_edit(
1379 this,
1380 params,
1381 server_id,
1382 adapter.clone(),
1383 language_server.clone(),
1384 cx,
1385 )
1386 }
1387 })
1388 .detach();
1389
1390 language_server
1391 .on_notification::<lsp::notification::Progress, _>({
1392 let this = this.downgrade();
1393 move |params, mut cx| {
1394 if let Some(this) = this.upgrade(&cx) {
1395 this.update(&mut cx, |this, cx| {
1396 this.on_lsp_progress(
1397 params,
1398 server_id,
1399 disk_based_diagnostics_progress_token,
1400 cx,
1401 );
1402 });
1403 }
1404 }
1405 })
1406 .detach();
1407
1408 this.update(&mut cx, |this, cx| {
1409 this.language_servers
1410 .insert(key.clone(), (adapter, language_server.clone()));
1411 this.language_server_statuses.insert(
1412 server_id,
1413 LanguageServerStatus {
1414 name: language_server.name().to_string(),
1415 pending_work: Default::default(),
1416 pending_diagnostic_updates: 0,
1417 },
1418 );
1419 language_server
1420 .notify::<lsp::notification::DidChangeConfiguration>(
1421 lsp::DidChangeConfigurationParams {
1422 settings: this.language_server_settings.lock().clone(),
1423 },
1424 )
1425 .ok();
1426
1427 if let Some(project_id) = this.remote_id() {
1428 this.client
1429 .send(proto::StartLanguageServer {
1430 project_id,
1431 server: Some(proto::LanguageServer {
1432 id: server_id as u64,
1433 name: language_server.name().to_string(),
1434 }),
1435 })
1436 .log_err();
1437 }
1438
1439 // Tell the language server about every open buffer in the worktree that matches the language.
1440 for buffer in this.opened_buffers.values() {
1441 if let Some(buffer_handle) = buffer.upgrade(cx) {
1442 let buffer = buffer_handle.read(cx);
1443 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1444 file
1445 } else {
1446 continue;
1447 };
1448 let language = if let Some(language) = buffer.language() {
1449 language
1450 } else {
1451 continue;
1452 };
1453 if file.worktree.read(cx).id() != key.0
1454 || language.lsp_adapter().map(|a| a.name())
1455 != Some(key.1.clone())
1456 {
1457 continue;
1458 }
1459
1460 let file = file.as_local()?;
1461 let versions = this
1462 .buffer_snapshots
1463 .entry(buffer.remote_id())
1464 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1465 let (version, initial_snapshot) = versions.last().unwrap();
1466 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1467 language_server
1468 .notify::<lsp::notification::DidOpenTextDocument>(
1469 lsp::DidOpenTextDocumentParams {
1470 text_document: lsp::TextDocumentItem::new(
1471 uri,
1472 Default::default(),
1473 *version,
1474 initial_snapshot.text(),
1475 ),
1476 },
1477 )
1478 .log_err()?;
1479 buffer_handle.update(cx, |buffer, cx| {
1480 buffer.set_completion_triggers(
1481 language_server
1482 .capabilities()
1483 .completion_provider
1484 .as_ref()
1485 .and_then(|provider| {
1486 provider.trigger_characters.clone()
1487 })
1488 .unwrap_or(Vec::new()),
1489 cx,
1490 )
1491 });
1492 }
1493 }
1494
1495 cx.notify();
1496 Some(())
1497 });
1498
1499 Some(language_server)
1500 })
1501 });
1502 }
1503
1504 pub fn restart_language_servers_for_buffers(
1505 &mut self,
1506 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1507 cx: &mut ModelContext<Self>,
1508 ) -> Option<()> {
1509 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1510 .into_iter()
1511 .filter_map(|buffer| {
1512 let file = File::from_dyn(buffer.read(cx).file())?;
1513 let worktree = file.worktree.read(cx).as_local()?;
1514 let worktree_id = worktree.id();
1515 let worktree_abs_path = worktree.abs_path().clone();
1516 let full_path = file.full_path(cx);
1517 Some((worktree_id, worktree_abs_path, full_path))
1518 })
1519 .collect();
1520 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1521 let language = self.languages.select_language(&full_path)?;
1522 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1523 }
1524
1525 None
1526 }
1527
1528 fn restart_language_server(
1529 &mut self,
1530 worktree_id: WorktreeId,
1531 worktree_path: Arc<Path>,
1532 language: Arc<Language>,
1533 cx: &mut ModelContext<Self>,
1534 ) {
1535 let adapter = if let Some(adapter) = language.lsp_adapter() {
1536 adapter
1537 } else {
1538 return;
1539 };
1540 let key = (worktree_id, adapter.name());
1541 let server_to_shutdown = self.language_servers.remove(&key);
1542 self.started_language_servers.remove(&key);
1543 server_to_shutdown
1544 .as_ref()
1545 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1546 cx.spawn_weak(|this, mut cx| async move {
1547 if let Some(this) = this.upgrade(&cx) {
1548 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1549 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1550 shutdown_task.await;
1551 }
1552 }
1553
1554 this.update(&mut cx, |this, cx| {
1555 this.start_language_server(worktree_id, worktree_path, language, cx);
1556 });
1557 }
1558 })
1559 .detach();
1560 }
1561
1562 fn on_lsp_diagnostics_published(
1563 &mut self,
1564 server_id: usize,
1565 mut params: lsp::PublishDiagnosticsParams,
1566 adapter: &Arc<dyn LspAdapter>,
1567 disk_based_diagnostics_progress_token: Option<&str>,
1568 cx: &mut ModelContext<Self>,
1569 ) {
1570 adapter.process_diagnostics(&mut params);
1571 if disk_based_diagnostics_progress_token.is_none() {
1572 self.disk_based_diagnostics_started(cx);
1573 self.broadcast_language_server_update(
1574 server_id,
1575 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1576 proto::LspDiskBasedDiagnosticsUpdating {},
1577 ),
1578 );
1579 }
1580 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1581 .log_err();
1582 if disk_based_diagnostics_progress_token.is_none() {
1583 self.disk_based_diagnostics_finished(cx);
1584 self.broadcast_language_server_update(
1585 server_id,
1586 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1587 proto::LspDiskBasedDiagnosticsUpdated {},
1588 ),
1589 );
1590 }
1591 }
1592
1593 fn on_lsp_progress(
1594 &mut self,
1595 progress: lsp::ProgressParams,
1596 server_id: usize,
1597 disk_based_diagnostics_progress_token: Option<&str>,
1598 cx: &mut ModelContext<Self>,
1599 ) {
1600 let token = match progress.token {
1601 lsp::NumberOrString::String(token) => token,
1602 lsp::NumberOrString::Number(token) => {
1603 log::info!("skipping numeric progress token {}", token);
1604 return;
1605 }
1606 };
1607
1608 match progress.value {
1609 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1610 lsp::WorkDoneProgress::Begin(_) => {
1611 let language_server_status =
1612 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1613 status
1614 } else {
1615 return;
1616 };
1617
1618 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1619 language_server_status.pending_diagnostic_updates += 1;
1620 if language_server_status.pending_diagnostic_updates == 1 {
1621 self.disk_based_diagnostics_started(cx);
1622 self.broadcast_language_server_update(
1623 server_id,
1624 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1625 proto::LspDiskBasedDiagnosticsUpdating {},
1626 ),
1627 );
1628 }
1629 } else {
1630 self.on_lsp_work_start(server_id, token.clone(), cx);
1631 self.broadcast_language_server_update(
1632 server_id,
1633 proto::update_language_server::Variant::WorkStart(
1634 proto::LspWorkStart { token },
1635 ),
1636 );
1637 }
1638 }
1639 lsp::WorkDoneProgress::Report(report) => {
1640 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1641 self.on_lsp_work_progress(
1642 server_id,
1643 token.clone(),
1644 LanguageServerProgress {
1645 message: report.message.clone(),
1646 percentage: report.percentage.map(|p| p as usize),
1647 last_update_at: Instant::now(),
1648 },
1649 cx,
1650 );
1651 self.broadcast_language_server_update(
1652 server_id,
1653 proto::update_language_server::Variant::WorkProgress(
1654 proto::LspWorkProgress {
1655 token,
1656 message: report.message,
1657 percentage: report.percentage.map(|p| p as u32),
1658 },
1659 ),
1660 );
1661 }
1662 }
1663 lsp::WorkDoneProgress::End(_) => {
1664 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1665 let language_server_status = if let Some(status) =
1666 self.language_server_statuses.get_mut(&server_id)
1667 {
1668 status
1669 } else {
1670 return;
1671 };
1672
1673 language_server_status.pending_diagnostic_updates -= 1;
1674 if language_server_status.pending_diagnostic_updates == 0 {
1675 self.disk_based_diagnostics_finished(cx);
1676 self.broadcast_language_server_update(
1677 server_id,
1678 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1679 proto::LspDiskBasedDiagnosticsUpdated {},
1680 ),
1681 );
1682 }
1683 } else {
1684 self.on_lsp_work_end(server_id, token.clone(), cx);
1685 self.broadcast_language_server_update(
1686 server_id,
1687 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1688 token,
1689 }),
1690 );
1691 }
1692 }
1693 },
1694 }
1695 }
1696
1697 fn on_lsp_work_start(
1698 &mut self,
1699 language_server_id: usize,
1700 token: String,
1701 cx: &mut ModelContext<Self>,
1702 ) {
1703 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1704 status.pending_work.insert(
1705 token,
1706 LanguageServerProgress {
1707 message: None,
1708 percentage: None,
1709 last_update_at: Instant::now(),
1710 },
1711 );
1712 cx.notify();
1713 }
1714 }
1715
1716 fn on_lsp_work_progress(
1717 &mut self,
1718 language_server_id: usize,
1719 token: String,
1720 progress: LanguageServerProgress,
1721 cx: &mut ModelContext<Self>,
1722 ) {
1723 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1724 status.pending_work.insert(token, progress);
1725 cx.notify();
1726 }
1727 }
1728
1729 fn on_lsp_work_end(
1730 &mut self,
1731 language_server_id: usize,
1732 token: String,
1733 cx: &mut ModelContext<Self>,
1734 ) {
1735 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1736 status.pending_work.remove(&token);
1737 cx.notify();
1738 }
1739 }
1740
1741 async fn on_lsp_workspace_edit(
1742 this: WeakModelHandle<Self>,
1743 params: lsp::ApplyWorkspaceEditParams,
1744 server_id: usize,
1745 adapter: Arc<dyn LspAdapter>,
1746 language_server: Arc<LanguageServer>,
1747 mut cx: AsyncAppContext,
1748 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1749 let this = this
1750 .upgrade(&cx)
1751 .ok_or_else(|| anyhow!("project project closed"))?;
1752 let transaction = Self::deserialize_workspace_edit(
1753 this.clone(),
1754 params.edit,
1755 true,
1756 adapter.clone(),
1757 language_server.clone(),
1758 &mut cx,
1759 )
1760 .await
1761 .log_err();
1762 this.update(&mut cx, |this, _| {
1763 if let Some(transaction) = transaction {
1764 this.last_workspace_edits_by_language_server
1765 .insert(server_id, transaction);
1766 }
1767 });
1768 Ok(lsp::ApplyWorkspaceEditResponse {
1769 applied: true,
1770 failed_change: None,
1771 failure_reason: None,
1772 })
1773 }
1774
1775 fn broadcast_language_server_update(
1776 &self,
1777 language_server_id: usize,
1778 event: proto::update_language_server::Variant,
1779 ) {
1780 if let Some(project_id) = self.remote_id() {
1781 self.client
1782 .send(proto::UpdateLanguageServer {
1783 project_id,
1784 language_server_id: language_server_id as u64,
1785 variant: Some(event),
1786 })
1787 .log_err();
1788 }
1789 }
1790
1791 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1792 for (_, server) in self.language_servers.values() {
1793 server
1794 .notify::<lsp::notification::DidChangeConfiguration>(
1795 lsp::DidChangeConfigurationParams {
1796 settings: settings.clone(),
1797 },
1798 )
1799 .ok();
1800 }
1801 *self.language_server_settings.lock() = settings;
1802 }
1803
1804 pub fn language_server_statuses(
1805 &self,
1806 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1807 self.language_server_statuses.values()
1808 }
1809
1810 pub fn update_diagnostics(
1811 &mut self,
1812 params: lsp::PublishDiagnosticsParams,
1813 disk_based_sources: &[&str],
1814 cx: &mut ModelContext<Self>,
1815 ) -> Result<()> {
1816 let abs_path = params
1817 .uri
1818 .to_file_path()
1819 .map_err(|_| anyhow!("URI is not a file"))?;
1820 let mut next_group_id = 0;
1821 let mut diagnostics = Vec::default();
1822 let mut primary_diagnostic_group_ids = HashMap::default();
1823 let mut sources_by_group_id = HashMap::default();
1824 let mut supporting_diagnostics = HashMap::default();
1825 for diagnostic in ¶ms.diagnostics {
1826 let source = diagnostic.source.as_ref();
1827 let code = diagnostic.code.as_ref().map(|code| match code {
1828 lsp::NumberOrString::Number(code) => code.to_string(),
1829 lsp::NumberOrString::String(code) => code.clone(),
1830 });
1831 let range = range_from_lsp(diagnostic.range);
1832 let is_supporting = diagnostic
1833 .related_information
1834 .as_ref()
1835 .map_or(false, |infos| {
1836 infos.iter().any(|info| {
1837 primary_diagnostic_group_ids.contains_key(&(
1838 source,
1839 code.clone(),
1840 range_from_lsp(info.location.range),
1841 ))
1842 })
1843 });
1844
1845 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1846 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1847 });
1848
1849 if is_supporting {
1850 supporting_diagnostics.insert(
1851 (source, code.clone(), range),
1852 (diagnostic.severity, is_unnecessary),
1853 );
1854 } else {
1855 let group_id = post_inc(&mut next_group_id);
1856 let is_disk_based = source.map_or(false, |source| {
1857 disk_based_sources.contains(&source.as_str())
1858 });
1859
1860 sources_by_group_id.insert(group_id, source);
1861 primary_diagnostic_group_ids
1862 .insert((source, code.clone(), range.clone()), group_id);
1863
1864 diagnostics.push(DiagnosticEntry {
1865 range,
1866 diagnostic: Diagnostic {
1867 code: code.clone(),
1868 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1869 message: diagnostic.message.clone(),
1870 group_id,
1871 is_primary: true,
1872 is_valid: true,
1873 is_disk_based,
1874 is_unnecessary,
1875 },
1876 });
1877 if let Some(infos) = &diagnostic.related_information {
1878 for info in infos {
1879 if info.location.uri == params.uri && !info.message.is_empty() {
1880 let range = range_from_lsp(info.location.range);
1881 diagnostics.push(DiagnosticEntry {
1882 range,
1883 diagnostic: Diagnostic {
1884 code: code.clone(),
1885 severity: DiagnosticSeverity::INFORMATION,
1886 message: info.message.clone(),
1887 group_id,
1888 is_primary: false,
1889 is_valid: true,
1890 is_disk_based,
1891 is_unnecessary: false,
1892 },
1893 });
1894 }
1895 }
1896 }
1897 }
1898 }
1899
1900 for entry in &mut diagnostics {
1901 let diagnostic = &mut entry.diagnostic;
1902 if !diagnostic.is_primary {
1903 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1904 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1905 source,
1906 diagnostic.code.clone(),
1907 entry.range.clone(),
1908 )) {
1909 if let Some(severity) = severity {
1910 diagnostic.severity = severity;
1911 }
1912 diagnostic.is_unnecessary = is_unnecessary;
1913 }
1914 }
1915 }
1916
1917 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1918 Ok(())
1919 }
1920
1921 pub fn update_diagnostic_entries(
1922 &mut self,
1923 abs_path: PathBuf,
1924 version: Option<i32>,
1925 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1926 cx: &mut ModelContext<Project>,
1927 ) -> Result<(), anyhow::Error> {
1928 let (worktree, relative_path) = self
1929 .find_local_worktree(&abs_path, cx)
1930 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1931 if !worktree.read(cx).is_visible() {
1932 return Ok(());
1933 }
1934
1935 let project_path = ProjectPath {
1936 worktree_id: worktree.read(cx).id(),
1937 path: relative_path.into(),
1938 };
1939
1940 for buffer in self.opened_buffers.values() {
1941 if let Some(buffer) = buffer.upgrade(cx) {
1942 if buffer
1943 .read(cx)
1944 .file()
1945 .map_or(false, |file| *file.path() == project_path.path)
1946 {
1947 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1948 break;
1949 }
1950 }
1951 }
1952 worktree.update(cx, |worktree, cx| {
1953 worktree
1954 .as_local_mut()
1955 .ok_or_else(|| anyhow!("not a local worktree"))?
1956 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1957 })?;
1958 cx.emit(Event::DiagnosticsUpdated(project_path));
1959 Ok(())
1960 }
1961
1962 fn update_buffer_diagnostics(
1963 &mut self,
1964 buffer: &ModelHandle<Buffer>,
1965 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1966 version: Option<i32>,
1967 cx: &mut ModelContext<Self>,
1968 ) -> Result<()> {
1969 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1970 Ordering::Equal
1971 .then_with(|| b.is_primary.cmp(&a.is_primary))
1972 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1973 .then_with(|| a.severity.cmp(&b.severity))
1974 .then_with(|| a.message.cmp(&b.message))
1975 }
1976
1977 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1978
1979 diagnostics.sort_unstable_by(|a, b| {
1980 Ordering::Equal
1981 .then_with(|| a.range.start.cmp(&b.range.start))
1982 .then_with(|| b.range.end.cmp(&a.range.end))
1983 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1984 });
1985
1986 let mut sanitized_diagnostics = Vec::new();
1987 let edits_since_save = Patch::new(
1988 snapshot
1989 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1990 .collect(),
1991 );
1992 for entry in diagnostics {
1993 let start;
1994 let end;
1995 if entry.diagnostic.is_disk_based {
1996 // Some diagnostics are based on files on disk instead of buffers'
1997 // current contents. Adjust these diagnostics' ranges to reflect
1998 // any unsaved edits.
1999 start = edits_since_save.old_to_new(entry.range.start);
2000 end = edits_since_save.old_to_new(entry.range.end);
2001 } else {
2002 start = entry.range.start;
2003 end = entry.range.end;
2004 }
2005
2006 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2007 ..snapshot.clip_point_utf16(end, Bias::Right);
2008
2009 // Expand empty ranges by one character
2010 if range.start == range.end {
2011 range.end.column += 1;
2012 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2013 if range.start == range.end && range.end.column > 0 {
2014 range.start.column -= 1;
2015 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2016 }
2017 }
2018
2019 sanitized_diagnostics.push(DiagnosticEntry {
2020 range,
2021 diagnostic: entry.diagnostic,
2022 });
2023 }
2024 drop(edits_since_save);
2025
2026 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2027 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2028 Ok(())
2029 }
2030
2031 pub fn format(
2032 &self,
2033 buffers: HashSet<ModelHandle<Buffer>>,
2034 push_to_history: bool,
2035 cx: &mut ModelContext<Project>,
2036 ) -> Task<Result<ProjectTransaction>> {
2037 let mut local_buffers = Vec::new();
2038 let mut remote_buffers = None;
2039 for buffer_handle in buffers {
2040 let buffer = buffer_handle.read(cx);
2041 if let Some(file) = File::from_dyn(buffer.file()) {
2042 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2043 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2044 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2045 }
2046 } else {
2047 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2048 }
2049 } else {
2050 return Task::ready(Ok(Default::default()));
2051 }
2052 }
2053
2054 let remote_buffers = self.remote_id().zip(remote_buffers);
2055 let client = self.client.clone();
2056
2057 cx.spawn(|this, mut cx| async move {
2058 let mut project_transaction = ProjectTransaction::default();
2059
2060 if let Some((project_id, remote_buffers)) = remote_buffers {
2061 let response = client
2062 .request(proto::FormatBuffers {
2063 project_id,
2064 buffer_ids: remote_buffers
2065 .iter()
2066 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2067 .collect(),
2068 })
2069 .await?
2070 .transaction
2071 .ok_or_else(|| anyhow!("missing transaction"))?;
2072 project_transaction = this
2073 .update(&mut cx, |this, cx| {
2074 this.deserialize_project_transaction(response, push_to_history, cx)
2075 })
2076 .await?;
2077 }
2078
2079 for (buffer, buffer_abs_path, language_server) in local_buffers {
2080 let text_document = lsp::TextDocumentIdentifier::new(
2081 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2082 );
2083 let capabilities = &language_server.capabilities();
2084 let lsp_edits = if capabilities
2085 .document_formatting_provider
2086 .as_ref()
2087 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2088 {
2089 language_server
2090 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2091 text_document,
2092 options: lsp::FormattingOptions {
2093 tab_size: 4,
2094 insert_spaces: true,
2095 insert_final_newline: Some(true),
2096 ..Default::default()
2097 },
2098 work_done_progress_params: Default::default(),
2099 })
2100 .await?
2101 } else if capabilities
2102 .document_range_formatting_provider
2103 .as_ref()
2104 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2105 {
2106 let buffer_start = lsp::Position::new(0, 0);
2107 let buffer_end =
2108 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2109 language_server
2110 .request::<lsp::request::RangeFormatting>(
2111 lsp::DocumentRangeFormattingParams {
2112 text_document,
2113 range: lsp::Range::new(buffer_start, buffer_end),
2114 options: lsp::FormattingOptions {
2115 tab_size: 4,
2116 insert_spaces: true,
2117 insert_final_newline: Some(true),
2118 ..Default::default()
2119 },
2120 work_done_progress_params: Default::default(),
2121 },
2122 )
2123 .await?
2124 } else {
2125 continue;
2126 };
2127
2128 if let Some(lsp_edits) = lsp_edits {
2129 let edits = this
2130 .update(&mut cx, |this, cx| {
2131 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2132 })
2133 .await?;
2134 buffer.update(&mut cx, |buffer, cx| {
2135 buffer.finalize_last_transaction();
2136 buffer.start_transaction();
2137 for (range, text) in edits {
2138 buffer.edit([range], text, cx);
2139 }
2140 if buffer.end_transaction(cx).is_some() {
2141 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2142 if !push_to_history {
2143 buffer.forget_transaction(transaction.id);
2144 }
2145 project_transaction.0.insert(cx.handle(), transaction);
2146 }
2147 });
2148 }
2149 }
2150
2151 Ok(project_transaction)
2152 })
2153 }
2154
2155 pub fn definition<T: ToPointUtf16>(
2156 &self,
2157 buffer: &ModelHandle<Buffer>,
2158 position: T,
2159 cx: &mut ModelContext<Self>,
2160 ) -> Task<Result<Vec<Location>>> {
2161 let position = position.to_point_utf16(buffer.read(cx));
2162 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2163 }
2164
2165 pub fn references<T: ToPointUtf16>(
2166 &self,
2167 buffer: &ModelHandle<Buffer>,
2168 position: T,
2169 cx: &mut ModelContext<Self>,
2170 ) -> Task<Result<Vec<Location>>> {
2171 let position = position.to_point_utf16(buffer.read(cx));
2172 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2173 }
2174
2175 pub fn document_highlights<T: ToPointUtf16>(
2176 &self,
2177 buffer: &ModelHandle<Buffer>,
2178 position: T,
2179 cx: &mut ModelContext<Self>,
2180 ) -> Task<Result<Vec<DocumentHighlight>>> {
2181 let position = position.to_point_utf16(buffer.read(cx));
2182
2183 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2184 }
2185
2186 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2187 if self.is_local() {
2188 let mut language_servers = HashMap::default();
2189 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2190 if let Some(worktree) = self
2191 .worktree_for_id(*worktree_id, cx)
2192 .and_then(|worktree| worktree.read(cx).as_local())
2193 {
2194 language_servers
2195 .entry(Arc::as_ptr(language_server))
2196 .or_insert((
2197 lsp_adapter.clone(),
2198 language_server.clone(),
2199 *worktree_id,
2200 worktree.abs_path().clone(),
2201 ));
2202 }
2203 }
2204
2205 let mut requests = Vec::new();
2206 for (_, language_server, _, _) in language_servers.values() {
2207 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2208 lsp::WorkspaceSymbolParams {
2209 query: query.to_string(),
2210 ..Default::default()
2211 },
2212 ));
2213 }
2214
2215 cx.spawn_weak(|this, cx| async move {
2216 let responses = futures::future::try_join_all(requests).await?;
2217
2218 let mut symbols = Vec::new();
2219 if let Some(this) = this.upgrade(&cx) {
2220 this.read_with(&cx, |this, cx| {
2221 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2222 language_servers.into_values().zip(responses)
2223 {
2224 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2225 |lsp_symbol| {
2226 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2227 let mut worktree_id = source_worktree_id;
2228 let path;
2229 if let Some((worktree, rel_path)) =
2230 this.find_local_worktree(&abs_path, cx)
2231 {
2232 worktree_id = worktree.read(cx).id();
2233 path = rel_path;
2234 } else {
2235 path = relativize_path(&worktree_abs_path, &abs_path);
2236 }
2237
2238 let label = this
2239 .languages
2240 .select_language(&path)
2241 .and_then(|language| {
2242 language
2243 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2244 })
2245 .unwrap_or_else(|| {
2246 CodeLabel::plain(lsp_symbol.name.clone(), None)
2247 });
2248 let signature = this.symbol_signature(worktree_id, &path);
2249
2250 Some(Symbol {
2251 source_worktree_id,
2252 worktree_id,
2253 language_server_name: adapter.name(),
2254 name: lsp_symbol.name,
2255 kind: lsp_symbol.kind,
2256 label,
2257 path,
2258 range: range_from_lsp(lsp_symbol.location.range),
2259 signature,
2260 })
2261 },
2262 ));
2263 }
2264 })
2265 }
2266
2267 Ok(symbols)
2268 })
2269 } else if let Some(project_id) = self.remote_id() {
2270 let request = self.client.request(proto::GetProjectSymbols {
2271 project_id,
2272 query: query.to_string(),
2273 });
2274 cx.spawn_weak(|this, cx| async move {
2275 let response = request.await?;
2276 let mut symbols = Vec::new();
2277 if let Some(this) = this.upgrade(&cx) {
2278 this.read_with(&cx, |this, _| {
2279 symbols.extend(
2280 response
2281 .symbols
2282 .into_iter()
2283 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2284 );
2285 })
2286 }
2287 Ok(symbols)
2288 })
2289 } else {
2290 Task::ready(Ok(Default::default()))
2291 }
2292 }
2293
2294 pub fn open_buffer_for_symbol(
2295 &mut self,
2296 symbol: &Symbol,
2297 cx: &mut ModelContext<Self>,
2298 ) -> Task<Result<ModelHandle<Buffer>>> {
2299 if self.is_local() {
2300 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2301 symbol.source_worktree_id,
2302 symbol.language_server_name.clone(),
2303 )) {
2304 server.clone()
2305 } else {
2306 return Task::ready(Err(anyhow!(
2307 "language server for worktree and language not found"
2308 )));
2309 };
2310
2311 let worktree_abs_path = if let Some(worktree_abs_path) = self
2312 .worktree_for_id(symbol.worktree_id, cx)
2313 .and_then(|worktree| worktree.read(cx).as_local())
2314 .map(|local_worktree| local_worktree.abs_path())
2315 {
2316 worktree_abs_path
2317 } else {
2318 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2319 };
2320 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2321 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2322 uri
2323 } else {
2324 return Task::ready(Err(anyhow!("invalid symbol path")));
2325 };
2326
2327 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2328 } else if let Some(project_id) = self.remote_id() {
2329 let request = self.client.request(proto::OpenBufferForSymbol {
2330 project_id,
2331 symbol: Some(serialize_symbol(symbol)),
2332 });
2333 cx.spawn(|this, mut cx| async move {
2334 let response = request.await?;
2335 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2336 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2337 .await
2338 })
2339 } else {
2340 Task::ready(Err(anyhow!("project does not have a remote id")))
2341 }
2342 }
2343
2344 pub fn completions<T: ToPointUtf16>(
2345 &self,
2346 source_buffer_handle: &ModelHandle<Buffer>,
2347 position: T,
2348 cx: &mut ModelContext<Self>,
2349 ) -> Task<Result<Vec<Completion>>> {
2350 let source_buffer_handle = source_buffer_handle.clone();
2351 let source_buffer = source_buffer_handle.read(cx);
2352 let buffer_id = source_buffer.remote_id();
2353 let language = source_buffer.language().cloned();
2354 let worktree;
2355 let buffer_abs_path;
2356 if let Some(file) = File::from_dyn(source_buffer.file()) {
2357 worktree = file.worktree.clone();
2358 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2359 } else {
2360 return Task::ready(Ok(Default::default()));
2361 };
2362
2363 let position = position.to_point_utf16(source_buffer);
2364 let anchor = source_buffer.anchor_after(position);
2365
2366 if worktree.read(cx).as_local().is_some() {
2367 let buffer_abs_path = buffer_abs_path.unwrap();
2368 let (_, lang_server) =
2369 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2370 server.clone()
2371 } else {
2372 return Task::ready(Ok(Default::default()));
2373 };
2374
2375 cx.spawn(|_, cx| async move {
2376 let clipped_position = source_buffer_handle
2377 .read_with(&cx, |this, _| this.clip_point_utf16(position, Bias::Left));
2378 if clipped_position != position {
2379 log::info!("Completion position out of date");
2380 return Ok(Default::default());
2381 }
2382
2383 let completions = lang_server
2384 .request::<lsp::request::Completion>(lsp::CompletionParams {
2385 text_document_position: lsp::TextDocumentPositionParams::new(
2386 lsp::TextDocumentIdentifier::new(
2387 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2388 ),
2389 point_to_lsp(position),
2390 ),
2391 context: Default::default(),
2392 work_done_progress_params: Default::default(),
2393 partial_result_params: Default::default(),
2394 })
2395 .await
2396 .context("lsp completion request failed")?;
2397
2398 let completions = if let Some(completions) = completions {
2399 match completions {
2400 lsp::CompletionResponse::Array(completions) => completions,
2401 lsp::CompletionResponse::List(list) => list.items,
2402 }
2403 } else {
2404 Default::default()
2405 };
2406
2407 source_buffer_handle.read_with(&cx, |this, _| {
2408 Ok(completions
2409 .into_iter()
2410 .filter_map(|lsp_completion| {
2411 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2412 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2413 (range_from_lsp(edit.range), edit.new_text.clone())
2414 }
2415 None => (
2416 this.common_prefix_at(position, &lsp_completion.label),
2417 lsp_completion.label.clone(),
2418 ),
2419 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2420 log::info!("unsupported insert/replace completion");
2421 return None;
2422 }
2423 };
2424
2425 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2426 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2427 if clipped_start == old_range.start && clipped_end == old_range.end {
2428 Some(Completion {
2429 old_range: this.anchor_before(old_range.start)
2430 ..this.anchor_after(old_range.end),
2431 new_text,
2432 label: language
2433 .as_ref()
2434 .and_then(|l| l.label_for_completion(&lsp_completion))
2435 .unwrap_or_else(|| {
2436 CodeLabel::plain(
2437 lsp_completion.label.clone(),
2438 lsp_completion.filter_text.as_deref(),
2439 )
2440 }),
2441 lsp_completion,
2442 })
2443 } else {
2444 log::info!("completion out of expected range");
2445 None
2446 }
2447 })
2448 .collect())
2449 })
2450 })
2451 } else if let Some(project_id) = self.remote_id() {
2452 let rpc = self.client.clone();
2453 let message = proto::GetCompletions {
2454 project_id,
2455 buffer_id,
2456 position: Some(language::proto::serialize_anchor(&anchor)),
2457 version: serialize_version(&source_buffer.version()),
2458 };
2459 cx.spawn_weak(|_, mut cx| async move {
2460 let response = rpc.request(message).await?;
2461
2462 source_buffer_handle
2463 .update(&mut cx, |buffer, _| {
2464 buffer.wait_for_version(deserialize_version(response.version))
2465 })
2466 .await;
2467
2468 response
2469 .completions
2470 .into_iter()
2471 .map(|completion| {
2472 language::proto::deserialize_completion(completion, language.as_ref())
2473 })
2474 .collect()
2475 })
2476 } else {
2477 Task::ready(Ok(Default::default()))
2478 }
2479 }
2480
2481 pub fn apply_additional_edits_for_completion(
2482 &self,
2483 buffer_handle: ModelHandle<Buffer>,
2484 completion: Completion,
2485 push_to_history: bool,
2486 cx: &mut ModelContext<Self>,
2487 ) -> Task<Result<Option<Transaction>>> {
2488 let buffer = buffer_handle.read(cx);
2489 let buffer_id = buffer.remote_id();
2490
2491 if self.is_local() {
2492 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2493 {
2494 server.clone()
2495 } else {
2496 return Task::ready(Ok(Default::default()));
2497 };
2498
2499 cx.spawn(|this, mut cx| async move {
2500 let resolved_completion = lang_server
2501 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2502 .await?;
2503 if let Some(edits) = resolved_completion.additional_text_edits {
2504 let edits = this
2505 .update(&mut cx, |this, cx| {
2506 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2507 })
2508 .await?;
2509 buffer_handle.update(&mut cx, |buffer, cx| {
2510 buffer.finalize_last_transaction();
2511 buffer.start_transaction();
2512 for (range, text) in edits {
2513 buffer.edit([range], text, cx);
2514 }
2515 let transaction = if buffer.end_transaction(cx).is_some() {
2516 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2517 if !push_to_history {
2518 buffer.forget_transaction(transaction.id);
2519 }
2520 Some(transaction)
2521 } else {
2522 None
2523 };
2524 Ok(transaction)
2525 })
2526 } else {
2527 Ok(None)
2528 }
2529 })
2530 } else if let Some(project_id) = self.remote_id() {
2531 let client = self.client.clone();
2532 cx.spawn(|_, mut cx| async move {
2533 let response = client
2534 .request(proto::ApplyCompletionAdditionalEdits {
2535 project_id,
2536 buffer_id,
2537 completion: Some(language::proto::serialize_completion(&completion)),
2538 })
2539 .await?;
2540
2541 if let Some(transaction) = response.transaction {
2542 let transaction = language::proto::deserialize_transaction(transaction)?;
2543 buffer_handle
2544 .update(&mut cx, |buffer, _| {
2545 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2546 })
2547 .await;
2548 if push_to_history {
2549 buffer_handle.update(&mut cx, |buffer, _| {
2550 buffer.push_transaction(transaction.clone(), Instant::now());
2551 });
2552 }
2553 Ok(Some(transaction))
2554 } else {
2555 Ok(None)
2556 }
2557 })
2558 } else {
2559 Task::ready(Err(anyhow!("project does not have a remote id")))
2560 }
2561 }
2562
2563 pub fn code_actions<T: Clone + ToOffset>(
2564 &self,
2565 buffer_handle: &ModelHandle<Buffer>,
2566 range: Range<T>,
2567 cx: &mut ModelContext<Self>,
2568 ) -> Task<Result<Vec<CodeAction>>> {
2569 let buffer_handle = buffer_handle.clone();
2570 let buffer = buffer_handle.read(cx);
2571 let snapshot = buffer.snapshot();
2572 let relevant_diagnostics = snapshot
2573 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2574 .map(|entry| entry.to_lsp_diagnostic_stub())
2575 .collect();
2576 let buffer_id = buffer.remote_id();
2577 let worktree;
2578 let buffer_abs_path;
2579 if let Some(file) = File::from_dyn(buffer.file()) {
2580 worktree = file.worktree.clone();
2581 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2582 } else {
2583 return Task::ready(Ok(Default::default()));
2584 };
2585 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2586
2587 if worktree.read(cx).as_local().is_some() {
2588 let buffer_abs_path = buffer_abs_path.unwrap();
2589 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2590 {
2591 server.clone()
2592 } else {
2593 return Task::ready(Ok(Default::default()));
2594 };
2595
2596 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2597 cx.foreground().spawn(async move {
2598 if !lang_server.capabilities().code_action_provider.is_some() {
2599 return Ok(Default::default());
2600 }
2601
2602 Ok(lang_server
2603 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2604 text_document: lsp::TextDocumentIdentifier::new(
2605 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2606 ),
2607 range: lsp_range,
2608 work_done_progress_params: Default::default(),
2609 partial_result_params: Default::default(),
2610 context: lsp::CodeActionContext {
2611 diagnostics: relevant_diagnostics,
2612 only: Some(vec![
2613 lsp::CodeActionKind::QUICKFIX,
2614 lsp::CodeActionKind::REFACTOR,
2615 lsp::CodeActionKind::REFACTOR_EXTRACT,
2616 lsp::CodeActionKind::SOURCE,
2617 ]),
2618 },
2619 })
2620 .await?
2621 .unwrap_or_default()
2622 .into_iter()
2623 .filter_map(|entry| {
2624 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2625 Some(CodeAction {
2626 range: range.clone(),
2627 lsp_action,
2628 })
2629 } else {
2630 None
2631 }
2632 })
2633 .collect())
2634 })
2635 } else if let Some(project_id) = self.remote_id() {
2636 let rpc = self.client.clone();
2637 let version = buffer.version();
2638 cx.spawn_weak(|_, mut cx| async move {
2639 let response = rpc
2640 .request(proto::GetCodeActions {
2641 project_id,
2642 buffer_id,
2643 start: Some(language::proto::serialize_anchor(&range.start)),
2644 end: Some(language::proto::serialize_anchor(&range.end)),
2645 version: serialize_version(&version),
2646 })
2647 .await?;
2648
2649 buffer_handle
2650 .update(&mut cx, |buffer, _| {
2651 buffer.wait_for_version(deserialize_version(response.version))
2652 })
2653 .await;
2654
2655 response
2656 .actions
2657 .into_iter()
2658 .map(language::proto::deserialize_code_action)
2659 .collect()
2660 })
2661 } else {
2662 Task::ready(Ok(Default::default()))
2663 }
2664 }
2665
2666 pub fn apply_code_action(
2667 &self,
2668 buffer_handle: ModelHandle<Buffer>,
2669 mut action: CodeAction,
2670 push_to_history: bool,
2671 cx: &mut ModelContext<Self>,
2672 ) -> Task<Result<ProjectTransaction>> {
2673 if self.is_local() {
2674 let buffer = buffer_handle.read(cx);
2675 let (lsp_adapter, lang_server) =
2676 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2677 server.clone()
2678 } else {
2679 return Task::ready(Ok(Default::default()));
2680 };
2681 let range = action.range.to_point_utf16(buffer);
2682
2683 cx.spawn(|this, mut cx| async move {
2684 if let Some(lsp_range) = action
2685 .lsp_action
2686 .data
2687 .as_mut()
2688 .and_then(|d| d.get_mut("codeActionParams"))
2689 .and_then(|d| d.get_mut("range"))
2690 {
2691 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2692 action.lsp_action = lang_server
2693 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2694 .await?;
2695 } else {
2696 let actions = this
2697 .update(&mut cx, |this, cx| {
2698 this.code_actions(&buffer_handle, action.range, cx)
2699 })
2700 .await?;
2701 action.lsp_action = actions
2702 .into_iter()
2703 .find(|a| a.lsp_action.title == action.lsp_action.title)
2704 .ok_or_else(|| anyhow!("code action is outdated"))?
2705 .lsp_action;
2706 }
2707
2708 if let Some(edit) = action.lsp_action.edit {
2709 Self::deserialize_workspace_edit(
2710 this,
2711 edit,
2712 push_to_history,
2713 lsp_adapter,
2714 lang_server,
2715 &mut cx,
2716 )
2717 .await
2718 } else if let Some(command) = action.lsp_action.command {
2719 this.update(&mut cx, |this, _| {
2720 this.last_workspace_edits_by_language_server
2721 .remove(&lang_server.server_id());
2722 });
2723 lang_server
2724 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2725 command: command.command,
2726 arguments: command.arguments.unwrap_or_default(),
2727 ..Default::default()
2728 })
2729 .await?;
2730 Ok(this.update(&mut cx, |this, _| {
2731 this.last_workspace_edits_by_language_server
2732 .remove(&lang_server.server_id())
2733 .unwrap_or_default()
2734 }))
2735 } else {
2736 Ok(ProjectTransaction::default())
2737 }
2738 })
2739 } else if let Some(project_id) = self.remote_id() {
2740 let client = self.client.clone();
2741 let request = proto::ApplyCodeAction {
2742 project_id,
2743 buffer_id: buffer_handle.read(cx).remote_id(),
2744 action: Some(language::proto::serialize_code_action(&action)),
2745 };
2746 cx.spawn(|this, mut cx| async move {
2747 let response = client
2748 .request(request)
2749 .await?
2750 .transaction
2751 .ok_or_else(|| anyhow!("missing transaction"))?;
2752 this.update(&mut cx, |this, cx| {
2753 this.deserialize_project_transaction(response, push_to_history, cx)
2754 })
2755 .await
2756 })
2757 } else {
2758 Task::ready(Err(anyhow!("project does not have a remote id")))
2759 }
2760 }
2761
2762 async fn deserialize_workspace_edit(
2763 this: ModelHandle<Self>,
2764 edit: lsp::WorkspaceEdit,
2765 push_to_history: bool,
2766 lsp_adapter: Arc<dyn LspAdapter>,
2767 language_server: Arc<LanguageServer>,
2768 cx: &mut AsyncAppContext,
2769 ) -> Result<ProjectTransaction> {
2770 let fs = this.read_with(cx, |this, _| this.fs.clone());
2771 let mut operations = Vec::new();
2772 if let Some(document_changes) = edit.document_changes {
2773 match document_changes {
2774 lsp::DocumentChanges::Edits(edits) => {
2775 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2776 }
2777 lsp::DocumentChanges::Operations(ops) => operations = ops,
2778 }
2779 } else if let Some(changes) = edit.changes {
2780 operations.extend(changes.into_iter().map(|(uri, edits)| {
2781 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2782 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2783 uri,
2784 version: None,
2785 },
2786 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2787 })
2788 }));
2789 }
2790
2791 let mut project_transaction = ProjectTransaction::default();
2792 for operation in operations {
2793 match operation {
2794 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2795 let abs_path = op
2796 .uri
2797 .to_file_path()
2798 .map_err(|_| anyhow!("can't convert URI to path"))?;
2799
2800 if let Some(parent_path) = abs_path.parent() {
2801 fs.create_dir(parent_path).await?;
2802 }
2803 if abs_path.ends_with("/") {
2804 fs.create_dir(&abs_path).await?;
2805 } else {
2806 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2807 .await?;
2808 }
2809 }
2810 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2811 let source_abs_path = op
2812 .old_uri
2813 .to_file_path()
2814 .map_err(|_| anyhow!("can't convert URI to path"))?;
2815 let target_abs_path = op
2816 .new_uri
2817 .to_file_path()
2818 .map_err(|_| anyhow!("can't convert URI to path"))?;
2819 fs.rename(
2820 &source_abs_path,
2821 &target_abs_path,
2822 op.options.map(Into::into).unwrap_or_default(),
2823 )
2824 .await?;
2825 }
2826 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2827 let abs_path = op
2828 .uri
2829 .to_file_path()
2830 .map_err(|_| anyhow!("can't convert URI to path"))?;
2831 let options = op.options.map(Into::into).unwrap_or_default();
2832 if abs_path.ends_with("/") {
2833 fs.remove_dir(&abs_path, options).await?;
2834 } else {
2835 fs.remove_file(&abs_path, options).await?;
2836 }
2837 }
2838 lsp::DocumentChangeOperation::Edit(op) => {
2839 let buffer_to_edit = this
2840 .update(cx, |this, cx| {
2841 this.open_local_buffer_via_lsp(
2842 op.text_document.uri,
2843 lsp_adapter.clone(),
2844 language_server.clone(),
2845 cx,
2846 )
2847 })
2848 .await?;
2849
2850 let edits = this
2851 .update(cx, |this, cx| {
2852 let edits = op.edits.into_iter().map(|edit| match edit {
2853 lsp::OneOf::Left(edit) => edit,
2854 lsp::OneOf::Right(edit) => edit.text_edit,
2855 });
2856 this.edits_from_lsp(
2857 &buffer_to_edit,
2858 edits,
2859 op.text_document.version,
2860 cx,
2861 )
2862 })
2863 .await?;
2864
2865 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2866 buffer.finalize_last_transaction();
2867 buffer.start_transaction();
2868 for (range, text) in edits {
2869 buffer.edit([range], text, cx);
2870 }
2871 let transaction = if buffer.end_transaction(cx).is_some() {
2872 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2873 if !push_to_history {
2874 buffer.forget_transaction(transaction.id);
2875 }
2876 Some(transaction)
2877 } else {
2878 None
2879 };
2880
2881 transaction
2882 });
2883 if let Some(transaction) = transaction {
2884 project_transaction.0.insert(buffer_to_edit, transaction);
2885 }
2886 }
2887 }
2888 }
2889
2890 Ok(project_transaction)
2891 }
2892
2893 pub fn prepare_rename<T: ToPointUtf16>(
2894 &self,
2895 buffer: ModelHandle<Buffer>,
2896 position: T,
2897 cx: &mut ModelContext<Self>,
2898 ) -> Task<Result<Option<Range<Anchor>>>> {
2899 let position = position.to_point_utf16(buffer.read(cx));
2900 self.request_lsp(buffer, PrepareRename { position }, cx)
2901 }
2902
2903 pub fn perform_rename<T: ToPointUtf16>(
2904 &self,
2905 buffer: ModelHandle<Buffer>,
2906 position: T,
2907 new_name: String,
2908 push_to_history: bool,
2909 cx: &mut ModelContext<Self>,
2910 ) -> Task<Result<ProjectTransaction>> {
2911 let position = position.to_point_utf16(buffer.read(cx));
2912 self.request_lsp(
2913 buffer,
2914 PerformRename {
2915 position,
2916 new_name,
2917 push_to_history,
2918 },
2919 cx,
2920 )
2921 }
2922
2923 pub fn search(
2924 &self,
2925 query: SearchQuery,
2926 cx: &mut ModelContext<Self>,
2927 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2928 if self.is_local() {
2929 let snapshots = self
2930 .visible_worktrees(cx)
2931 .filter_map(|tree| {
2932 let tree = tree.read(cx).as_local()?;
2933 Some(tree.snapshot())
2934 })
2935 .collect::<Vec<_>>();
2936
2937 let background = cx.background().clone();
2938 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2939 if path_count == 0 {
2940 return Task::ready(Ok(Default::default()));
2941 }
2942 let workers = background.num_cpus().min(path_count);
2943 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2944 cx.background()
2945 .spawn({
2946 let fs = self.fs.clone();
2947 let background = cx.background().clone();
2948 let query = query.clone();
2949 async move {
2950 let fs = &fs;
2951 let query = &query;
2952 let matching_paths_tx = &matching_paths_tx;
2953 let paths_per_worker = (path_count + workers - 1) / workers;
2954 let snapshots = &snapshots;
2955 background
2956 .scoped(|scope| {
2957 for worker_ix in 0..workers {
2958 let worker_start_ix = worker_ix * paths_per_worker;
2959 let worker_end_ix = worker_start_ix + paths_per_worker;
2960 scope.spawn(async move {
2961 let mut snapshot_start_ix = 0;
2962 let mut abs_path = PathBuf::new();
2963 for snapshot in snapshots {
2964 let snapshot_end_ix =
2965 snapshot_start_ix + snapshot.visible_file_count();
2966 if worker_end_ix <= snapshot_start_ix {
2967 break;
2968 } else if worker_start_ix > snapshot_end_ix {
2969 snapshot_start_ix = snapshot_end_ix;
2970 continue;
2971 } else {
2972 let start_in_snapshot = worker_start_ix
2973 .saturating_sub(snapshot_start_ix);
2974 let end_in_snapshot =
2975 cmp::min(worker_end_ix, snapshot_end_ix)
2976 - snapshot_start_ix;
2977
2978 for entry in snapshot
2979 .files(false, start_in_snapshot)
2980 .take(end_in_snapshot - start_in_snapshot)
2981 {
2982 if matching_paths_tx.is_closed() {
2983 break;
2984 }
2985
2986 abs_path.clear();
2987 abs_path.push(&snapshot.abs_path());
2988 abs_path.push(&entry.path);
2989 let matches = if let Some(file) =
2990 fs.open_sync(&abs_path).await.log_err()
2991 {
2992 query.detect(file).unwrap_or(false)
2993 } else {
2994 false
2995 };
2996
2997 if matches {
2998 let project_path =
2999 (snapshot.id(), entry.path.clone());
3000 if matching_paths_tx
3001 .send(project_path)
3002 .await
3003 .is_err()
3004 {
3005 break;
3006 }
3007 }
3008 }
3009
3010 snapshot_start_ix = snapshot_end_ix;
3011 }
3012 }
3013 });
3014 }
3015 })
3016 .await;
3017 }
3018 })
3019 .detach();
3020
3021 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3022 let open_buffers = self
3023 .opened_buffers
3024 .values()
3025 .filter_map(|b| b.upgrade(cx))
3026 .collect::<HashSet<_>>();
3027 cx.spawn(|this, cx| async move {
3028 for buffer in &open_buffers {
3029 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3030 buffers_tx.send((buffer.clone(), snapshot)).await?;
3031 }
3032
3033 let open_buffers = Rc::new(RefCell::new(open_buffers));
3034 while let Some(project_path) = matching_paths_rx.next().await {
3035 if buffers_tx.is_closed() {
3036 break;
3037 }
3038
3039 let this = this.clone();
3040 let open_buffers = open_buffers.clone();
3041 let buffers_tx = buffers_tx.clone();
3042 cx.spawn(|mut cx| async move {
3043 if let Some(buffer) = this
3044 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3045 .await
3046 .log_err()
3047 {
3048 if open_buffers.borrow_mut().insert(buffer.clone()) {
3049 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3050 buffers_tx.send((buffer, snapshot)).await?;
3051 }
3052 }
3053
3054 Ok::<_, anyhow::Error>(())
3055 })
3056 .detach();
3057 }
3058
3059 Ok::<_, anyhow::Error>(())
3060 })
3061 .detach_and_log_err(cx);
3062
3063 let background = cx.background().clone();
3064 cx.background().spawn(async move {
3065 let query = &query;
3066 let mut matched_buffers = Vec::new();
3067 for _ in 0..workers {
3068 matched_buffers.push(HashMap::default());
3069 }
3070 background
3071 .scoped(|scope| {
3072 for worker_matched_buffers in matched_buffers.iter_mut() {
3073 let mut buffers_rx = buffers_rx.clone();
3074 scope.spawn(async move {
3075 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3076 let buffer_matches = query
3077 .search(snapshot.as_rope())
3078 .await
3079 .iter()
3080 .map(|range| {
3081 snapshot.anchor_before(range.start)
3082 ..snapshot.anchor_after(range.end)
3083 })
3084 .collect::<Vec<_>>();
3085 if !buffer_matches.is_empty() {
3086 worker_matched_buffers
3087 .insert(buffer.clone(), buffer_matches);
3088 }
3089 }
3090 });
3091 }
3092 })
3093 .await;
3094 Ok(matched_buffers.into_iter().flatten().collect())
3095 })
3096 } else if let Some(project_id) = self.remote_id() {
3097 let request = self.client.request(query.to_proto(project_id));
3098 cx.spawn(|this, mut cx| async move {
3099 let response = request.await?;
3100 let mut result = HashMap::default();
3101 for location in response.locations {
3102 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3103 let target_buffer = this
3104 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3105 .await?;
3106 let start = location
3107 .start
3108 .and_then(deserialize_anchor)
3109 .ok_or_else(|| anyhow!("missing target start"))?;
3110 let end = location
3111 .end
3112 .and_then(deserialize_anchor)
3113 .ok_or_else(|| anyhow!("missing target end"))?;
3114 result
3115 .entry(target_buffer)
3116 .or_insert(Vec::new())
3117 .push(start..end)
3118 }
3119 Ok(result)
3120 })
3121 } else {
3122 Task::ready(Ok(Default::default()))
3123 }
3124 }
3125
3126 fn request_lsp<R: LspCommand>(
3127 &self,
3128 buffer_handle: ModelHandle<Buffer>,
3129 request: R,
3130 cx: &mut ModelContext<Self>,
3131 ) -> Task<Result<R::Response>>
3132 where
3133 <R::LspRequest as lsp::request::Request>::Result: Send,
3134 {
3135 let buffer = buffer_handle.read(cx);
3136 if self.is_local() {
3137 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3138 if let Some((file, (_, language_server))) =
3139 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3140 {
3141 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3142 return cx.spawn(|this, cx| async move {
3143 if !request.check_capabilities(&language_server.capabilities()) {
3144 return Ok(Default::default());
3145 }
3146
3147 let response = language_server
3148 .request::<R::LspRequest>(lsp_params)
3149 .await
3150 .context("lsp request failed")?;
3151 request
3152 .response_from_lsp(response, this, buffer_handle, cx)
3153 .await
3154 });
3155 }
3156 } else if let Some(project_id) = self.remote_id() {
3157 let rpc = self.client.clone();
3158 let message = request.to_proto(project_id, buffer);
3159 return cx.spawn(|this, cx| async move {
3160 let response = rpc.request(message).await?;
3161 request
3162 .response_from_proto(response, this, buffer_handle, cx)
3163 .await
3164 });
3165 }
3166 Task::ready(Ok(Default::default()))
3167 }
3168
3169 pub fn find_or_create_local_worktree(
3170 &mut self,
3171 abs_path: impl AsRef<Path>,
3172 visible: bool,
3173 cx: &mut ModelContext<Self>,
3174 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3175 let abs_path = abs_path.as_ref();
3176 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3177 Task::ready(Ok((tree.clone(), relative_path.into())))
3178 } else {
3179 let worktree = self.create_local_worktree(abs_path, visible, cx);
3180 cx.foreground()
3181 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3182 }
3183 }
3184
3185 pub fn find_local_worktree(
3186 &self,
3187 abs_path: &Path,
3188 cx: &AppContext,
3189 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3190 for tree in self.worktrees(cx) {
3191 if let Some(relative_path) = tree
3192 .read(cx)
3193 .as_local()
3194 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3195 {
3196 return Some((tree.clone(), relative_path.into()));
3197 }
3198 }
3199 None
3200 }
3201
3202 pub fn is_shared(&self) -> bool {
3203 match &self.client_state {
3204 ProjectClientState::Local { is_shared, .. } => *is_shared,
3205 ProjectClientState::Remote { .. } => false,
3206 }
3207 }
3208
3209 fn create_local_worktree(
3210 &mut self,
3211 abs_path: impl AsRef<Path>,
3212 visible: bool,
3213 cx: &mut ModelContext<Self>,
3214 ) -> Task<Result<ModelHandle<Worktree>>> {
3215 let fs = self.fs.clone();
3216 let client = self.client.clone();
3217 let next_entry_id = self.next_entry_id.clone();
3218 let path: Arc<Path> = abs_path.as_ref().into();
3219 let task = self
3220 .loading_local_worktrees
3221 .entry(path.clone())
3222 .or_insert_with(|| {
3223 cx.spawn(|project, mut cx| {
3224 async move {
3225 let worktree = Worktree::local(
3226 client.clone(),
3227 path.clone(),
3228 visible,
3229 fs,
3230 next_entry_id,
3231 &mut cx,
3232 )
3233 .await;
3234 project.update(&mut cx, |project, _| {
3235 project.loading_local_worktrees.remove(&path);
3236 });
3237 let worktree = worktree?;
3238
3239 let (remote_project_id, is_shared) =
3240 project.update(&mut cx, |project, cx| {
3241 project.add_worktree(&worktree, cx);
3242 (project.remote_id(), project.is_shared())
3243 });
3244
3245 if let Some(project_id) = remote_project_id {
3246 if is_shared {
3247 worktree
3248 .update(&mut cx, |worktree, cx| {
3249 worktree.as_local_mut().unwrap().share(project_id, cx)
3250 })
3251 .await?;
3252 } else {
3253 worktree
3254 .update(&mut cx, |worktree, cx| {
3255 worktree.as_local_mut().unwrap().register(project_id, cx)
3256 })
3257 .await?;
3258 }
3259 }
3260
3261 Ok(worktree)
3262 }
3263 .map_err(|err| Arc::new(err))
3264 })
3265 .shared()
3266 })
3267 .clone();
3268 cx.foreground().spawn(async move {
3269 match task.await {
3270 Ok(worktree) => Ok(worktree),
3271 Err(err) => Err(anyhow!("{}", err)),
3272 }
3273 })
3274 }
3275
3276 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3277 self.worktrees.retain(|worktree| {
3278 worktree
3279 .upgrade(cx)
3280 .map_or(false, |w| w.read(cx).id() != id)
3281 });
3282 cx.notify();
3283 }
3284
3285 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3286 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3287 if worktree.read(cx).is_local() {
3288 cx.subscribe(&worktree, |this, worktree, _, cx| {
3289 this.update_local_worktree_buffers(worktree, cx);
3290 })
3291 .detach();
3292 }
3293
3294 let push_strong_handle = {
3295 let worktree = worktree.read(cx);
3296 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3297 };
3298 if push_strong_handle {
3299 self.worktrees
3300 .push(WorktreeHandle::Strong(worktree.clone()));
3301 } else {
3302 cx.observe_release(&worktree, |this, _, cx| {
3303 this.worktrees
3304 .retain(|worktree| worktree.upgrade(cx).is_some());
3305 cx.notify();
3306 })
3307 .detach();
3308 self.worktrees
3309 .push(WorktreeHandle::Weak(worktree.downgrade()));
3310 }
3311 cx.notify();
3312 }
3313
3314 fn update_local_worktree_buffers(
3315 &mut self,
3316 worktree_handle: ModelHandle<Worktree>,
3317 cx: &mut ModelContext<Self>,
3318 ) {
3319 let snapshot = worktree_handle.read(cx).snapshot();
3320 let mut buffers_to_delete = Vec::new();
3321 for (buffer_id, buffer) in &self.opened_buffers {
3322 if let Some(buffer) = buffer.upgrade(cx) {
3323 buffer.update(cx, |buffer, cx| {
3324 if let Some(old_file) = File::from_dyn(buffer.file()) {
3325 if old_file.worktree != worktree_handle {
3326 return;
3327 }
3328
3329 let new_file = if let Some(entry) = old_file
3330 .entry_id
3331 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3332 {
3333 File {
3334 is_local: true,
3335 entry_id: Some(entry.id),
3336 mtime: entry.mtime,
3337 path: entry.path.clone(),
3338 worktree: worktree_handle.clone(),
3339 }
3340 } else if let Some(entry) =
3341 snapshot.entry_for_path(old_file.path().as_ref())
3342 {
3343 File {
3344 is_local: true,
3345 entry_id: Some(entry.id),
3346 mtime: entry.mtime,
3347 path: entry.path.clone(),
3348 worktree: worktree_handle.clone(),
3349 }
3350 } else {
3351 File {
3352 is_local: true,
3353 entry_id: None,
3354 path: old_file.path().clone(),
3355 mtime: old_file.mtime(),
3356 worktree: worktree_handle.clone(),
3357 }
3358 };
3359
3360 if let Some(project_id) = self.remote_id() {
3361 self.client
3362 .send(proto::UpdateBufferFile {
3363 project_id,
3364 buffer_id: *buffer_id as u64,
3365 file: Some(new_file.to_proto()),
3366 })
3367 .log_err();
3368 }
3369 buffer.file_updated(Box::new(new_file), cx).detach();
3370 }
3371 });
3372 } else {
3373 buffers_to_delete.push(*buffer_id);
3374 }
3375 }
3376
3377 for buffer_id in buffers_to_delete {
3378 self.opened_buffers.remove(&buffer_id);
3379 }
3380 }
3381
3382 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3383 let new_active_entry = entry.and_then(|project_path| {
3384 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3385 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3386 Some(entry.id)
3387 });
3388 if new_active_entry != self.active_entry {
3389 self.active_entry = new_active_entry;
3390 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3391 }
3392 }
3393
3394 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3395 self.language_servers_with_diagnostics_running > 0
3396 }
3397
3398 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3399 let mut summary = DiagnosticSummary::default();
3400 for (_, path_summary) in self.diagnostic_summaries(cx) {
3401 summary.error_count += path_summary.error_count;
3402 summary.warning_count += path_summary.warning_count;
3403 summary.info_count += path_summary.info_count;
3404 summary.hint_count += path_summary.hint_count;
3405 }
3406 summary
3407 }
3408
3409 pub fn diagnostic_summaries<'a>(
3410 &'a self,
3411 cx: &'a AppContext,
3412 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3413 self.worktrees(cx).flat_map(move |worktree| {
3414 let worktree = worktree.read(cx);
3415 let worktree_id = worktree.id();
3416 worktree
3417 .diagnostic_summaries()
3418 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3419 })
3420 }
3421
3422 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3423 self.language_servers_with_diagnostics_running += 1;
3424 if self.language_servers_with_diagnostics_running == 1 {
3425 cx.emit(Event::DiskBasedDiagnosticsStarted);
3426 }
3427 }
3428
3429 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3430 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3431 self.language_servers_with_diagnostics_running -= 1;
3432 if self.language_servers_with_diagnostics_running == 0 {
3433 cx.emit(Event::DiskBasedDiagnosticsFinished);
3434 }
3435 }
3436
3437 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3438 self.active_entry
3439 }
3440
3441 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3442 self.worktree_for_id(path.worktree_id, cx)?
3443 .read(cx)
3444 .entry_for_path(&path.path)
3445 .map(|entry| entry.id)
3446 }
3447
3448 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3449 let worktree = self.worktree_for_entry(entry_id, cx)?;
3450 let worktree = worktree.read(cx);
3451 let worktree_id = worktree.id();
3452 let path = worktree.entry_for_id(entry_id)?.path.clone();
3453 Some(ProjectPath { worktree_id, path })
3454 }
3455
3456 // RPC message handlers
3457
3458 async fn handle_unshare_project(
3459 this: ModelHandle<Self>,
3460 _: TypedEnvelope<proto::UnshareProject>,
3461 _: Arc<Client>,
3462 mut cx: AsyncAppContext,
3463 ) -> Result<()> {
3464 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3465 Ok(())
3466 }
3467
3468 async fn handle_add_collaborator(
3469 this: ModelHandle<Self>,
3470 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3471 _: Arc<Client>,
3472 mut cx: AsyncAppContext,
3473 ) -> Result<()> {
3474 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3475 let collaborator = envelope
3476 .payload
3477 .collaborator
3478 .take()
3479 .ok_or_else(|| anyhow!("empty collaborator"))?;
3480
3481 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3482 this.update(&mut cx, |this, cx| {
3483 this.collaborators
3484 .insert(collaborator.peer_id, collaborator);
3485 cx.notify();
3486 });
3487
3488 Ok(())
3489 }
3490
3491 async fn handle_remove_collaborator(
3492 this: ModelHandle<Self>,
3493 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3494 _: Arc<Client>,
3495 mut cx: AsyncAppContext,
3496 ) -> Result<()> {
3497 this.update(&mut cx, |this, cx| {
3498 let peer_id = PeerId(envelope.payload.peer_id);
3499 let replica_id = this
3500 .collaborators
3501 .remove(&peer_id)
3502 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3503 .replica_id;
3504 for (_, buffer) in &this.opened_buffers {
3505 if let Some(buffer) = buffer.upgrade(cx) {
3506 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3507 }
3508 }
3509 cx.emit(Event::CollaboratorLeft(peer_id));
3510 cx.notify();
3511 Ok(())
3512 })
3513 }
3514
3515 async fn handle_register_worktree(
3516 this: ModelHandle<Self>,
3517 envelope: TypedEnvelope<proto::RegisterWorktree>,
3518 client: Arc<Client>,
3519 mut cx: AsyncAppContext,
3520 ) -> Result<()> {
3521 this.update(&mut cx, |this, cx| {
3522 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3523 let replica_id = this.replica_id();
3524 let worktree = proto::Worktree {
3525 id: envelope.payload.worktree_id,
3526 root_name: envelope.payload.root_name,
3527 entries: Default::default(),
3528 diagnostic_summaries: Default::default(),
3529 visible: envelope.payload.visible,
3530 };
3531 let (worktree, load_task) =
3532 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3533 this.add_worktree(&worktree, cx);
3534 load_task.detach();
3535 Ok(())
3536 })
3537 }
3538
3539 async fn handle_unregister_worktree(
3540 this: ModelHandle<Self>,
3541 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3542 _: Arc<Client>,
3543 mut cx: AsyncAppContext,
3544 ) -> Result<()> {
3545 this.update(&mut cx, |this, cx| {
3546 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3547 this.remove_worktree(worktree_id, cx);
3548 Ok(())
3549 })
3550 }
3551
3552 async fn handle_update_worktree(
3553 this: ModelHandle<Self>,
3554 envelope: TypedEnvelope<proto::UpdateWorktree>,
3555 _: Arc<Client>,
3556 mut cx: AsyncAppContext,
3557 ) -> Result<()> {
3558 this.update(&mut cx, |this, cx| {
3559 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3560 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3561 worktree.update(cx, |worktree, _| {
3562 let worktree = worktree.as_remote_mut().unwrap();
3563 worktree.update_from_remote(envelope)
3564 })?;
3565 }
3566 Ok(())
3567 })
3568 }
3569
3570 async fn handle_update_diagnostic_summary(
3571 this: ModelHandle<Self>,
3572 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3573 _: Arc<Client>,
3574 mut cx: AsyncAppContext,
3575 ) -> Result<()> {
3576 this.update(&mut cx, |this, cx| {
3577 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3578 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3579 if let Some(summary) = envelope.payload.summary {
3580 let project_path = ProjectPath {
3581 worktree_id,
3582 path: Path::new(&summary.path).into(),
3583 };
3584 worktree.update(cx, |worktree, _| {
3585 worktree
3586 .as_remote_mut()
3587 .unwrap()
3588 .update_diagnostic_summary(project_path.path.clone(), &summary);
3589 });
3590 cx.emit(Event::DiagnosticsUpdated(project_path));
3591 }
3592 }
3593 Ok(())
3594 })
3595 }
3596
3597 async fn handle_start_language_server(
3598 this: ModelHandle<Self>,
3599 envelope: TypedEnvelope<proto::StartLanguageServer>,
3600 _: Arc<Client>,
3601 mut cx: AsyncAppContext,
3602 ) -> Result<()> {
3603 let server = envelope
3604 .payload
3605 .server
3606 .ok_or_else(|| anyhow!("invalid server"))?;
3607 this.update(&mut cx, |this, cx| {
3608 this.language_server_statuses.insert(
3609 server.id as usize,
3610 LanguageServerStatus {
3611 name: server.name,
3612 pending_work: Default::default(),
3613 pending_diagnostic_updates: 0,
3614 },
3615 );
3616 cx.notify();
3617 });
3618 Ok(())
3619 }
3620
3621 async fn handle_update_language_server(
3622 this: ModelHandle<Self>,
3623 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3624 _: Arc<Client>,
3625 mut cx: AsyncAppContext,
3626 ) -> Result<()> {
3627 let language_server_id = envelope.payload.language_server_id as usize;
3628 match envelope
3629 .payload
3630 .variant
3631 .ok_or_else(|| anyhow!("invalid variant"))?
3632 {
3633 proto::update_language_server::Variant::WorkStart(payload) => {
3634 this.update(&mut cx, |this, cx| {
3635 this.on_lsp_work_start(language_server_id, payload.token, cx);
3636 })
3637 }
3638 proto::update_language_server::Variant::WorkProgress(payload) => {
3639 this.update(&mut cx, |this, cx| {
3640 this.on_lsp_work_progress(
3641 language_server_id,
3642 payload.token,
3643 LanguageServerProgress {
3644 message: payload.message,
3645 percentage: payload.percentage.map(|p| p as usize),
3646 last_update_at: Instant::now(),
3647 },
3648 cx,
3649 );
3650 })
3651 }
3652 proto::update_language_server::Variant::WorkEnd(payload) => {
3653 this.update(&mut cx, |this, cx| {
3654 this.on_lsp_work_end(language_server_id, payload.token, cx);
3655 })
3656 }
3657 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3658 this.update(&mut cx, |this, cx| {
3659 this.disk_based_diagnostics_started(cx);
3660 })
3661 }
3662 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3663 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3664 }
3665 }
3666
3667 Ok(())
3668 }
3669
3670 async fn handle_update_buffer(
3671 this: ModelHandle<Self>,
3672 envelope: TypedEnvelope<proto::UpdateBuffer>,
3673 _: Arc<Client>,
3674 mut cx: AsyncAppContext,
3675 ) -> Result<()> {
3676 this.update(&mut cx, |this, cx| {
3677 let payload = envelope.payload.clone();
3678 let buffer_id = payload.buffer_id;
3679 let ops = payload
3680 .operations
3681 .into_iter()
3682 .map(|op| language::proto::deserialize_operation(op))
3683 .collect::<Result<Vec<_>, _>>()?;
3684 match this.opened_buffers.entry(buffer_id) {
3685 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3686 OpenBuffer::Strong(buffer) => {
3687 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3688 }
3689 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3690 OpenBuffer::Weak(_) => {}
3691 },
3692 hash_map::Entry::Vacant(e) => {
3693 e.insert(OpenBuffer::Loading(ops));
3694 }
3695 }
3696 Ok(())
3697 })
3698 }
3699
3700 async fn handle_update_buffer_file(
3701 this: ModelHandle<Self>,
3702 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3703 _: Arc<Client>,
3704 mut cx: AsyncAppContext,
3705 ) -> Result<()> {
3706 this.update(&mut cx, |this, cx| {
3707 let payload = envelope.payload.clone();
3708 let buffer_id = payload.buffer_id;
3709 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3710 let worktree = this
3711 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3712 .ok_or_else(|| anyhow!("no such worktree"))?;
3713 let file = File::from_proto(file, worktree.clone(), cx)?;
3714 let buffer = this
3715 .opened_buffers
3716 .get_mut(&buffer_id)
3717 .and_then(|b| b.upgrade(cx))
3718 .ok_or_else(|| anyhow!("no such buffer"))?;
3719 buffer.update(cx, |buffer, cx| {
3720 buffer.file_updated(Box::new(file), cx).detach();
3721 });
3722 Ok(())
3723 })
3724 }
3725
3726 async fn handle_save_buffer(
3727 this: ModelHandle<Self>,
3728 envelope: TypedEnvelope<proto::SaveBuffer>,
3729 _: Arc<Client>,
3730 mut cx: AsyncAppContext,
3731 ) -> Result<proto::BufferSaved> {
3732 let buffer_id = envelope.payload.buffer_id;
3733 let requested_version = deserialize_version(envelope.payload.version);
3734
3735 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3736 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3737 let buffer = this
3738 .opened_buffers
3739 .get(&buffer_id)
3740 .map(|buffer| buffer.upgrade(cx).unwrap())
3741 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3742 Ok::<_, anyhow::Error>((project_id, buffer))
3743 })?;
3744 buffer
3745 .update(&mut cx, |buffer, _| {
3746 buffer.wait_for_version(requested_version)
3747 })
3748 .await;
3749
3750 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3751 Ok(proto::BufferSaved {
3752 project_id,
3753 buffer_id,
3754 version: serialize_version(&saved_version),
3755 mtime: Some(mtime.into()),
3756 })
3757 }
3758
3759 async fn handle_format_buffers(
3760 this: ModelHandle<Self>,
3761 envelope: TypedEnvelope<proto::FormatBuffers>,
3762 _: Arc<Client>,
3763 mut cx: AsyncAppContext,
3764 ) -> Result<proto::FormatBuffersResponse> {
3765 let sender_id = envelope.original_sender_id()?;
3766 let format = this.update(&mut cx, |this, cx| {
3767 let mut buffers = HashSet::default();
3768 for buffer_id in &envelope.payload.buffer_ids {
3769 buffers.insert(
3770 this.opened_buffers
3771 .get(buffer_id)
3772 .map(|buffer| buffer.upgrade(cx).unwrap())
3773 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3774 );
3775 }
3776 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3777 })?;
3778
3779 let project_transaction = format.await?;
3780 let project_transaction = this.update(&mut cx, |this, cx| {
3781 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3782 });
3783 Ok(proto::FormatBuffersResponse {
3784 transaction: Some(project_transaction),
3785 })
3786 }
3787
3788 async fn handle_get_completions(
3789 this: ModelHandle<Self>,
3790 envelope: TypedEnvelope<proto::GetCompletions>,
3791 _: Arc<Client>,
3792 mut cx: AsyncAppContext,
3793 ) -> Result<proto::GetCompletionsResponse> {
3794 let position = envelope
3795 .payload
3796 .position
3797 .and_then(language::proto::deserialize_anchor)
3798 .ok_or_else(|| anyhow!("invalid position"))?;
3799 let version = deserialize_version(envelope.payload.version);
3800 let buffer = this.read_with(&cx, |this, cx| {
3801 this.opened_buffers
3802 .get(&envelope.payload.buffer_id)
3803 .map(|buffer| buffer.upgrade(cx).unwrap())
3804 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3805 })?;
3806 buffer
3807 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3808 .await;
3809 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3810 let completions = this
3811 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3812 .await?;
3813
3814 Ok(proto::GetCompletionsResponse {
3815 completions: completions
3816 .iter()
3817 .map(language::proto::serialize_completion)
3818 .collect(),
3819 version: serialize_version(&version),
3820 })
3821 }
3822
3823 async fn handle_apply_additional_edits_for_completion(
3824 this: ModelHandle<Self>,
3825 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3826 _: Arc<Client>,
3827 mut cx: AsyncAppContext,
3828 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3829 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3830 let buffer = this
3831 .opened_buffers
3832 .get(&envelope.payload.buffer_id)
3833 .map(|buffer| buffer.upgrade(cx).unwrap())
3834 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3835 let language = buffer.read(cx).language();
3836 let completion = language::proto::deserialize_completion(
3837 envelope
3838 .payload
3839 .completion
3840 .ok_or_else(|| anyhow!("invalid completion"))?,
3841 language,
3842 )?;
3843 Ok::<_, anyhow::Error>(
3844 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3845 )
3846 })?;
3847
3848 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3849 transaction: apply_additional_edits
3850 .await?
3851 .as_ref()
3852 .map(language::proto::serialize_transaction),
3853 })
3854 }
3855
3856 async fn handle_get_code_actions(
3857 this: ModelHandle<Self>,
3858 envelope: TypedEnvelope<proto::GetCodeActions>,
3859 _: Arc<Client>,
3860 mut cx: AsyncAppContext,
3861 ) -> Result<proto::GetCodeActionsResponse> {
3862 let start = envelope
3863 .payload
3864 .start
3865 .and_then(language::proto::deserialize_anchor)
3866 .ok_or_else(|| anyhow!("invalid start"))?;
3867 let end = envelope
3868 .payload
3869 .end
3870 .and_then(language::proto::deserialize_anchor)
3871 .ok_or_else(|| anyhow!("invalid end"))?;
3872 let buffer = this.update(&mut cx, |this, cx| {
3873 this.opened_buffers
3874 .get(&envelope.payload.buffer_id)
3875 .map(|buffer| buffer.upgrade(cx).unwrap())
3876 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3877 })?;
3878 buffer
3879 .update(&mut cx, |buffer, _| {
3880 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3881 })
3882 .await;
3883
3884 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3885 let code_actions = this.update(&mut cx, |this, cx| {
3886 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3887 })?;
3888
3889 Ok(proto::GetCodeActionsResponse {
3890 actions: code_actions
3891 .await?
3892 .iter()
3893 .map(language::proto::serialize_code_action)
3894 .collect(),
3895 version: serialize_version(&version),
3896 })
3897 }
3898
3899 async fn handle_apply_code_action(
3900 this: ModelHandle<Self>,
3901 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3902 _: Arc<Client>,
3903 mut cx: AsyncAppContext,
3904 ) -> Result<proto::ApplyCodeActionResponse> {
3905 let sender_id = envelope.original_sender_id()?;
3906 let action = language::proto::deserialize_code_action(
3907 envelope
3908 .payload
3909 .action
3910 .ok_or_else(|| anyhow!("invalid action"))?,
3911 )?;
3912 let apply_code_action = this.update(&mut cx, |this, cx| {
3913 let buffer = this
3914 .opened_buffers
3915 .get(&envelope.payload.buffer_id)
3916 .map(|buffer| buffer.upgrade(cx).unwrap())
3917 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3918 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3919 })?;
3920
3921 let project_transaction = apply_code_action.await?;
3922 let project_transaction = this.update(&mut cx, |this, cx| {
3923 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3924 });
3925 Ok(proto::ApplyCodeActionResponse {
3926 transaction: Some(project_transaction),
3927 })
3928 }
3929
3930 async fn handle_lsp_command<T: LspCommand>(
3931 this: ModelHandle<Self>,
3932 envelope: TypedEnvelope<T::ProtoRequest>,
3933 _: Arc<Client>,
3934 mut cx: AsyncAppContext,
3935 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3936 where
3937 <T::LspRequest as lsp::request::Request>::Result: Send,
3938 {
3939 let sender_id = envelope.original_sender_id()?;
3940 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3941 let buffer_handle = this.read_with(&cx, |this, _| {
3942 this.opened_buffers
3943 .get(&buffer_id)
3944 .and_then(|buffer| buffer.upgrade(&cx))
3945 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3946 })?;
3947 let request = T::from_proto(
3948 envelope.payload,
3949 this.clone(),
3950 buffer_handle.clone(),
3951 cx.clone(),
3952 )
3953 .await?;
3954 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3955 let response = this
3956 .update(&mut cx, |this, cx| {
3957 this.request_lsp(buffer_handle, request, cx)
3958 })
3959 .await?;
3960 this.update(&mut cx, |this, cx| {
3961 Ok(T::response_to_proto(
3962 response,
3963 this,
3964 sender_id,
3965 &buffer_version,
3966 cx,
3967 ))
3968 })
3969 }
3970
3971 async fn handle_get_project_symbols(
3972 this: ModelHandle<Self>,
3973 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3974 _: Arc<Client>,
3975 mut cx: AsyncAppContext,
3976 ) -> Result<proto::GetProjectSymbolsResponse> {
3977 let symbols = this
3978 .update(&mut cx, |this, cx| {
3979 this.symbols(&envelope.payload.query, cx)
3980 })
3981 .await?;
3982
3983 Ok(proto::GetProjectSymbolsResponse {
3984 symbols: symbols.iter().map(serialize_symbol).collect(),
3985 })
3986 }
3987
3988 async fn handle_search_project(
3989 this: ModelHandle<Self>,
3990 envelope: TypedEnvelope<proto::SearchProject>,
3991 _: Arc<Client>,
3992 mut cx: AsyncAppContext,
3993 ) -> Result<proto::SearchProjectResponse> {
3994 let peer_id = envelope.original_sender_id()?;
3995 let query = SearchQuery::from_proto(envelope.payload)?;
3996 let result = this
3997 .update(&mut cx, |this, cx| this.search(query, cx))
3998 .await?;
3999
4000 this.update(&mut cx, |this, cx| {
4001 let mut locations = Vec::new();
4002 for (buffer, ranges) in result {
4003 for range in ranges {
4004 let start = serialize_anchor(&range.start);
4005 let end = serialize_anchor(&range.end);
4006 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4007 locations.push(proto::Location {
4008 buffer: Some(buffer),
4009 start: Some(start),
4010 end: Some(end),
4011 });
4012 }
4013 }
4014 Ok(proto::SearchProjectResponse { locations })
4015 })
4016 }
4017
4018 async fn handle_open_buffer_for_symbol(
4019 this: ModelHandle<Self>,
4020 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4021 _: Arc<Client>,
4022 mut cx: AsyncAppContext,
4023 ) -> Result<proto::OpenBufferForSymbolResponse> {
4024 let peer_id = envelope.original_sender_id()?;
4025 let symbol = envelope
4026 .payload
4027 .symbol
4028 .ok_or_else(|| anyhow!("invalid symbol"))?;
4029 let symbol = this.read_with(&cx, |this, _| {
4030 let symbol = this.deserialize_symbol(symbol)?;
4031 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4032 if signature == symbol.signature {
4033 Ok(symbol)
4034 } else {
4035 Err(anyhow!("invalid symbol signature"))
4036 }
4037 })?;
4038 let buffer = this
4039 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4040 .await?;
4041
4042 Ok(proto::OpenBufferForSymbolResponse {
4043 buffer: Some(this.update(&mut cx, |this, cx| {
4044 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4045 })),
4046 })
4047 }
4048
4049 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4050 let mut hasher = Sha256::new();
4051 hasher.update(worktree_id.to_proto().to_be_bytes());
4052 hasher.update(path.to_string_lossy().as_bytes());
4053 hasher.update(self.nonce.to_be_bytes());
4054 hasher.finalize().as_slice().try_into().unwrap()
4055 }
4056
4057 async fn handle_open_buffer_by_id(
4058 this: ModelHandle<Self>,
4059 envelope: TypedEnvelope<proto::OpenBufferById>,
4060 _: Arc<Client>,
4061 mut cx: AsyncAppContext,
4062 ) -> Result<proto::OpenBufferResponse> {
4063 let peer_id = envelope.original_sender_id()?;
4064 let buffer = this
4065 .update(&mut cx, |this, cx| {
4066 this.open_buffer_by_id(envelope.payload.id, cx)
4067 })
4068 .await?;
4069 this.update(&mut cx, |this, cx| {
4070 Ok(proto::OpenBufferResponse {
4071 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4072 })
4073 })
4074 }
4075
4076 async fn handle_open_buffer_by_path(
4077 this: ModelHandle<Self>,
4078 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4079 _: Arc<Client>,
4080 mut cx: AsyncAppContext,
4081 ) -> Result<proto::OpenBufferResponse> {
4082 let peer_id = envelope.original_sender_id()?;
4083 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4084 let open_buffer = this.update(&mut cx, |this, cx| {
4085 this.open_buffer(
4086 ProjectPath {
4087 worktree_id,
4088 path: PathBuf::from(envelope.payload.path).into(),
4089 },
4090 cx,
4091 )
4092 });
4093
4094 let buffer = open_buffer.await?;
4095 this.update(&mut cx, |this, cx| {
4096 Ok(proto::OpenBufferResponse {
4097 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4098 })
4099 })
4100 }
4101
4102 fn serialize_project_transaction_for_peer(
4103 &mut self,
4104 project_transaction: ProjectTransaction,
4105 peer_id: PeerId,
4106 cx: &AppContext,
4107 ) -> proto::ProjectTransaction {
4108 let mut serialized_transaction = proto::ProjectTransaction {
4109 buffers: Default::default(),
4110 transactions: Default::default(),
4111 };
4112 for (buffer, transaction) in project_transaction.0 {
4113 serialized_transaction
4114 .buffers
4115 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4116 serialized_transaction
4117 .transactions
4118 .push(language::proto::serialize_transaction(&transaction));
4119 }
4120 serialized_transaction
4121 }
4122
4123 fn deserialize_project_transaction(
4124 &mut self,
4125 message: proto::ProjectTransaction,
4126 push_to_history: bool,
4127 cx: &mut ModelContext<Self>,
4128 ) -> Task<Result<ProjectTransaction>> {
4129 cx.spawn(|this, mut cx| async move {
4130 let mut project_transaction = ProjectTransaction::default();
4131 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4132 let buffer = this
4133 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4134 .await?;
4135 let transaction = language::proto::deserialize_transaction(transaction)?;
4136 project_transaction.0.insert(buffer, transaction);
4137 }
4138
4139 for (buffer, transaction) in &project_transaction.0 {
4140 buffer
4141 .update(&mut cx, |buffer, _| {
4142 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4143 })
4144 .await;
4145
4146 if push_to_history {
4147 buffer.update(&mut cx, |buffer, _| {
4148 buffer.push_transaction(transaction.clone(), Instant::now());
4149 });
4150 }
4151 }
4152
4153 Ok(project_transaction)
4154 })
4155 }
4156
4157 fn serialize_buffer_for_peer(
4158 &mut self,
4159 buffer: &ModelHandle<Buffer>,
4160 peer_id: PeerId,
4161 cx: &AppContext,
4162 ) -> proto::Buffer {
4163 let buffer_id = buffer.read(cx).remote_id();
4164 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4165 if shared_buffers.insert(buffer_id) {
4166 proto::Buffer {
4167 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4168 }
4169 } else {
4170 proto::Buffer {
4171 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4172 }
4173 }
4174 }
4175
4176 fn deserialize_buffer(
4177 &mut self,
4178 buffer: proto::Buffer,
4179 cx: &mut ModelContext<Self>,
4180 ) -> Task<Result<ModelHandle<Buffer>>> {
4181 let replica_id = self.replica_id();
4182
4183 let opened_buffer_tx = self.opened_buffer.0.clone();
4184 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4185 cx.spawn(|this, mut cx| async move {
4186 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4187 proto::buffer::Variant::Id(id) => {
4188 let buffer = loop {
4189 let buffer = this.read_with(&cx, |this, cx| {
4190 this.opened_buffers
4191 .get(&id)
4192 .and_then(|buffer| buffer.upgrade(cx))
4193 });
4194 if let Some(buffer) = buffer {
4195 break buffer;
4196 }
4197 opened_buffer_rx
4198 .next()
4199 .await
4200 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4201 };
4202 Ok(buffer)
4203 }
4204 proto::buffer::Variant::State(mut buffer) => {
4205 let mut buffer_worktree = None;
4206 let mut buffer_file = None;
4207 if let Some(file) = buffer.file.take() {
4208 this.read_with(&cx, |this, cx| {
4209 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4210 let worktree =
4211 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4212 anyhow!("no worktree found for id {}", file.worktree_id)
4213 })?;
4214 buffer_file =
4215 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4216 as Box<dyn language::File>);
4217 buffer_worktree = Some(worktree);
4218 Ok::<_, anyhow::Error>(())
4219 })?;
4220 }
4221
4222 let buffer = cx.add_model(|cx| {
4223 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4224 });
4225
4226 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4227
4228 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4229 Ok(buffer)
4230 }
4231 }
4232 })
4233 }
4234
4235 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4236 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4237 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4238 let start = serialized_symbol
4239 .start
4240 .ok_or_else(|| anyhow!("invalid start"))?;
4241 let end = serialized_symbol
4242 .end
4243 .ok_or_else(|| anyhow!("invalid end"))?;
4244 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4245 let path = PathBuf::from(serialized_symbol.path);
4246 let language = self.languages.select_language(&path);
4247 Ok(Symbol {
4248 source_worktree_id,
4249 worktree_id,
4250 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4251 label: language
4252 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4253 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4254 name: serialized_symbol.name,
4255 path,
4256 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4257 kind,
4258 signature: serialized_symbol
4259 .signature
4260 .try_into()
4261 .map_err(|_| anyhow!("invalid signature"))?,
4262 })
4263 }
4264
4265 async fn handle_buffer_saved(
4266 this: ModelHandle<Self>,
4267 envelope: TypedEnvelope<proto::BufferSaved>,
4268 _: Arc<Client>,
4269 mut cx: AsyncAppContext,
4270 ) -> Result<()> {
4271 let version = deserialize_version(envelope.payload.version);
4272 let mtime = envelope
4273 .payload
4274 .mtime
4275 .ok_or_else(|| anyhow!("missing mtime"))?
4276 .into();
4277
4278 this.update(&mut cx, |this, cx| {
4279 let buffer = this
4280 .opened_buffers
4281 .get(&envelope.payload.buffer_id)
4282 .and_then(|buffer| buffer.upgrade(cx));
4283 if let Some(buffer) = buffer {
4284 buffer.update(cx, |buffer, cx| {
4285 buffer.did_save(version, mtime, None, cx);
4286 });
4287 }
4288 Ok(())
4289 })
4290 }
4291
4292 async fn handle_buffer_reloaded(
4293 this: ModelHandle<Self>,
4294 envelope: TypedEnvelope<proto::BufferReloaded>,
4295 _: Arc<Client>,
4296 mut cx: AsyncAppContext,
4297 ) -> Result<()> {
4298 let payload = envelope.payload.clone();
4299 let version = deserialize_version(payload.version);
4300 let mtime = payload
4301 .mtime
4302 .ok_or_else(|| anyhow!("missing mtime"))?
4303 .into();
4304 this.update(&mut cx, |this, cx| {
4305 let buffer = this
4306 .opened_buffers
4307 .get(&payload.buffer_id)
4308 .and_then(|buffer| buffer.upgrade(cx));
4309 if let Some(buffer) = buffer {
4310 buffer.update(cx, |buffer, cx| {
4311 buffer.did_reload(version, mtime, cx);
4312 });
4313 }
4314 Ok(())
4315 })
4316 }
4317
4318 pub fn match_paths<'a>(
4319 &self,
4320 query: &'a str,
4321 include_ignored: bool,
4322 smart_case: bool,
4323 max_results: usize,
4324 cancel_flag: &'a AtomicBool,
4325 cx: &AppContext,
4326 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4327 let worktrees = self
4328 .worktrees(cx)
4329 .filter(|worktree| worktree.read(cx).is_visible())
4330 .collect::<Vec<_>>();
4331 let include_root_name = worktrees.len() > 1;
4332 let candidate_sets = worktrees
4333 .into_iter()
4334 .map(|worktree| CandidateSet {
4335 snapshot: worktree.read(cx).snapshot(),
4336 include_ignored,
4337 include_root_name,
4338 })
4339 .collect::<Vec<_>>();
4340
4341 let background = cx.background().clone();
4342 async move {
4343 fuzzy::match_paths(
4344 candidate_sets.as_slice(),
4345 query,
4346 smart_case,
4347 max_results,
4348 cancel_flag,
4349 background,
4350 )
4351 .await
4352 }
4353 }
4354
4355 fn edits_from_lsp(
4356 &mut self,
4357 buffer: &ModelHandle<Buffer>,
4358 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4359 version: Option<i32>,
4360 cx: &mut ModelContext<Self>,
4361 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4362 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4363 cx.background().spawn(async move {
4364 let snapshot = snapshot?;
4365 let mut lsp_edits = lsp_edits
4366 .into_iter()
4367 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4368 .peekable();
4369
4370 let mut edits = Vec::new();
4371 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4372 // Combine any LSP edits that are adjacent.
4373 //
4374 // Also, combine LSP edits that are separated from each other by only
4375 // a newline. This is important because for some code actions,
4376 // Rust-analyzer rewrites the entire buffer via a series of edits that
4377 // are separated by unchanged newline characters.
4378 //
4379 // In order for the diffing logic below to work properly, any edits that
4380 // cancel each other out must be combined into one.
4381 while let Some((next_range, next_text)) = lsp_edits.peek() {
4382 if next_range.start > range.end {
4383 if next_range.start.row > range.end.row + 1
4384 || next_range.start.column > 0
4385 || snapshot.clip_point_utf16(
4386 PointUtf16::new(range.end.row, u32::MAX),
4387 Bias::Left,
4388 ) > range.end
4389 {
4390 break;
4391 }
4392 new_text.push('\n');
4393 }
4394 range.end = next_range.end;
4395 new_text.push_str(&next_text);
4396 lsp_edits.next();
4397 }
4398
4399 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4400 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4401 {
4402 return Err(anyhow!("invalid edits received from language server"));
4403 }
4404
4405 // For multiline edits, perform a diff of the old and new text so that
4406 // we can identify the changes more precisely, preserving the locations
4407 // of any anchors positioned in the unchanged regions.
4408 if range.end.row > range.start.row {
4409 let mut offset = range.start.to_offset(&snapshot);
4410 let old_text = snapshot.text_for_range(range).collect::<String>();
4411
4412 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4413 let mut moved_since_edit = true;
4414 for change in diff.iter_all_changes() {
4415 let tag = change.tag();
4416 let value = change.value();
4417 match tag {
4418 ChangeTag::Equal => {
4419 offset += value.len();
4420 moved_since_edit = true;
4421 }
4422 ChangeTag::Delete => {
4423 let start = snapshot.anchor_after(offset);
4424 let end = snapshot.anchor_before(offset + value.len());
4425 if moved_since_edit {
4426 edits.push((start..end, String::new()));
4427 } else {
4428 edits.last_mut().unwrap().0.end = end;
4429 }
4430 offset += value.len();
4431 moved_since_edit = false;
4432 }
4433 ChangeTag::Insert => {
4434 if moved_since_edit {
4435 let anchor = snapshot.anchor_after(offset);
4436 edits.push((anchor.clone()..anchor, value.to_string()));
4437 } else {
4438 edits.last_mut().unwrap().1.push_str(value);
4439 }
4440 moved_since_edit = false;
4441 }
4442 }
4443 }
4444 } else if range.end == range.start {
4445 let anchor = snapshot.anchor_after(range.start);
4446 edits.push((anchor.clone()..anchor, new_text));
4447 } else {
4448 let edit_start = snapshot.anchor_after(range.start);
4449 let edit_end = snapshot.anchor_before(range.end);
4450 edits.push((edit_start..edit_end, new_text));
4451 }
4452 }
4453
4454 Ok(edits)
4455 })
4456 }
4457
4458 fn buffer_snapshot_for_lsp_version(
4459 &mut self,
4460 buffer: &ModelHandle<Buffer>,
4461 version: Option<i32>,
4462 cx: &AppContext,
4463 ) -> Result<TextBufferSnapshot> {
4464 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4465
4466 if let Some(version) = version {
4467 let buffer_id = buffer.read(cx).remote_id();
4468 let snapshots = self
4469 .buffer_snapshots
4470 .get_mut(&buffer_id)
4471 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4472 let mut found_snapshot = None;
4473 snapshots.retain(|(snapshot_version, snapshot)| {
4474 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4475 false
4476 } else {
4477 if *snapshot_version == version {
4478 found_snapshot = Some(snapshot.clone());
4479 }
4480 true
4481 }
4482 });
4483
4484 found_snapshot.ok_or_else(|| {
4485 anyhow!(
4486 "snapshot not found for buffer {} at version {}",
4487 buffer_id,
4488 version
4489 )
4490 })
4491 } else {
4492 Ok((buffer.read(cx)).text_snapshot())
4493 }
4494 }
4495
4496 fn language_server_for_buffer(
4497 &self,
4498 buffer: &Buffer,
4499 cx: &AppContext,
4500 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4501 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4502 let worktree_id = file.worktree_id(cx);
4503 self.language_servers
4504 .get(&(worktree_id, language.lsp_adapter()?.name()))
4505 } else {
4506 None
4507 }
4508 }
4509}
4510
4511impl WorktreeHandle {
4512 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4513 match self {
4514 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4515 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4516 }
4517 }
4518}
4519
4520impl OpenBuffer {
4521 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4522 match self {
4523 OpenBuffer::Strong(handle) => Some(handle.clone()),
4524 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4525 OpenBuffer::Loading(_) => None,
4526 }
4527 }
4528}
4529
4530struct CandidateSet {
4531 snapshot: Snapshot,
4532 include_ignored: bool,
4533 include_root_name: bool,
4534}
4535
4536impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4537 type Candidates = CandidateSetIter<'a>;
4538
4539 fn id(&self) -> usize {
4540 self.snapshot.id().to_usize()
4541 }
4542
4543 fn len(&self) -> usize {
4544 if self.include_ignored {
4545 self.snapshot.file_count()
4546 } else {
4547 self.snapshot.visible_file_count()
4548 }
4549 }
4550
4551 fn prefix(&self) -> Arc<str> {
4552 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4553 self.snapshot.root_name().into()
4554 } else if self.include_root_name {
4555 format!("{}/", self.snapshot.root_name()).into()
4556 } else {
4557 "".into()
4558 }
4559 }
4560
4561 fn candidates(&'a self, start: usize) -> Self::Candidates {
4562 CandidateSetIter {
4563 traversal: self.snapshot.files(self.include_ignored, start),
4564 }
4565 }
4566}
4567
4568struct CandidateSetIter<'a> {
4569 traversal: Traversal<'a>,
4570}
4571
4572impl<'a> Iterator for CandidateSetIter<'a> {
4573 type Item = PathMatchCandidate<'a>;
4574
4575 fn next(&mut self) -> Option<Self::Item> {
4576 self.traversal.next().map(|entry| {
4577 if let EntryKind::File(char_bag) = entry.kind {
4578 PathMatchCandidate {
4579 path: &entry.path,
4580 char_bag,
4581 }
4582 } else {
4583 unreachable!()
4584 }
4585 })
4586 }
4587}
4588
4589impl Entity for Project {
4590 type Event = Event;
4591
4592 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4593 match &self.client_state {
4594 ProjectClientState::Local { remote_id_rx, .. } => {
4595 if let Some(project_id) = *remote_id_rx.borrow() {
4596 self.client
4597 .send(proto::UnregisterProject { project_id })
4598 .log_err();
4599 }
4600 }
4601 ProjectClientState::Remote { remote_id, .. } => {
4602 self.client
4603 .send(proto::LeaveProject {
4604 project_id: *remote_id,
4605 })
4606 .log_err();
4607 }
4608 }
4609 }
4610
4611 fn app_will_quit(
4612 &mut self,
4613 _: &mut MutableAppContext,
4614 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4615 let shutdown_futures = self
4616 .language_servers
4617 .drain()
4618 .filter_map(|(_, (_, server))| server.shutdown())
4619 .collect::<Vec<_>>();
4620 Some(
4621 async move {
4622 futures::future::join_all(shutdown_futures).await;
4623 }
4624 .boxed(),
4625 )
4626 }
4627}
4628
4629impl Collaborator {
4630 fn from_proto(
4631 message: proto::Collaborator,
4632 user_store: &ModelHandle<UserStore>,
4633 cx: &mut AsyncAppContext,
4634 ) -> impl Future<Output = Result<Self>> {
4635 let user = user_store.update(cx, |user_store, cx| {
4636 user_store.fetch_user(message.user_id, cx)
4637 });
4638
4639 async move {
4640 Ok(Self {
4641 peer_id: PeerId(message.peer_id),
4642 user: user.await?,
4643 replica_id: message.replica_id as ReplicaId,
4644 })
4645 }
4646 }
4647}
4648
4649impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4650 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4651 Self {
4652 worktree_id,
4653 path: path.as_ref().into(),
4654 }
4655 }
4656}
4657
4658impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4659 fn from(options: lsp::CreateFileOptions) -> Self {
4660 Self {
4661 overwrite: options.overwrite.unwrap_or(false),
4662 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4663 }
4664 }
4665}
4666
4667impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4668 fn from(options: lsp::RenameFileOptions) -> Self {
4669 Self {
4670 overwrite: options.overwrite.unwrap_or(false),
4671 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4672 }
4673 }
4674}
4675
4676impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4677 fn from(options: lsp::DeleteFileOptions) -> Self {
4678 Self {
4679 recursive: options.recursive.unwrap_or(false),
4680 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4681 }
4682 }
4683}
4684
4685fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4686 proto::Symbol {
4687 source_worktree_id: symbol.source_worktree_id.to_proto(),
4688 worktree_id: symbol.worktree_id.to_proto(),
4689 language_server_name: symbol.language_server_name.0.to_string(),
4690 name: symbol.name.clone(),
4691 kind: unsafe { mem::transmute(symbol.kind) },
4692 path: symbol.path.to_string_lossy().to_string(),
4693 start: Some(proto::Point {
4694 row: symbol.range.start.row,
4695 column: symbol.range.start.column,
4696 }),
4697 end: Some(proto::Point {
4698 row: symbol.range.end.row,
4699 column: symbol.range.end.column,
4700 }),
4701 signature: symbol.signature.to_vec(),
4702 }
4703}
4704
4705fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4706 let mut path_components = path.components();
4707 let mut base_components = base.components();
4708 let mut components: Vec<Component> = Vec::new();
4709 loop {
4710 match (path_components.next(), base_components.next()) {
4711 (None, None) => break,
4712 (Some(a), None) => {
4713 components.push(a);
4714 components.extend(path_components.by_ref());
4715 break;
4716 }
4717 (None, _) => components.push(Component::ParentDir),
4718 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4719 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4720 (Some(a), Some(_)) => {
4721 components.push(Component::ParentDir);
4722 for _ in base_components {
4723 components.push(Component::ParentDir);
4724 }
4725 components.push(a);
4726 components.extend(path_components.by_ref());
4727 break;
4728 }
4729 }
4730 }
4731 components.iter().map(|c| c.as_os_str()).collect()
4732}
4733
4734impl Item for Buffer {
4735 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4736 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4737 }
4738}
4739
4740#[cfg(test)]
4741mod tests {
4742 use super::{Event, *};
4743 use fs::RealFs;
4744 use futures::{future, StreamExt};
4745 use gpui::test::subscribe;
4746 use language::{
4747 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4748 ToPoint,
4749 };
4750 use lsp::Url;
4751 use serde_json::json;
4752 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4753 use unindent::Unindent as _;
4754 use util::{assert_set_eq, test::temp_tree};
4755 use worktree::WorktreeHandle as _;
4756
4757 #[gpui::test]
4758 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4759 let dir = temp_tree(json!({
4760 "root": {
4761 "apple": "",
4762 "banana": {
4763 "carrot": {
4764 "date": "",
4765 "endive": "",
4766 }
4767 },
4768 "fennel": {
4769 "grape": "",
4770 }
4771 }
4772 }));
4773
4774 let root_link_path = dir.path().join("root_link");
4775 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4776 unix::fs::symlink(
4777 &dir.path().join("root/fennel"),
4778 &dir.path().join("root/finnochio"),
4779 )
4780 .unwrap();
4781
4782 let project = Project::test(Arc::new(RealFs), cx);
4783
4784 let (tree, _) = project
4785 .update(cx, |project, cx| {
4786 project.find_or_create_local_worktree(&root_link_path, true, cx)
4787 })
4788 .await
4789 .unwrap();
4790
4791 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4792 .await;
4793 cx.read(|cx| {
4794 let tree = tree.read(cx);
4795 assert_eq!(tree.file_count(), 5);
4796 assert_eq!(
4797 tree.inode_for_path("fennel/grape"),
4798 tree.inode_for_path("finnochio/grape")
4799 );
4800 });
4801
4802 let cancel_flag = Default::default();
4803 let results = project
4804 .read_with(cx, |project, cx| {
4805 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4806 })
4807 .await;
4808 assert_eq!(
4809 results
4810 .into_iter()
4811 .map(|result| result.path)
4812 .collect::<Vec<Arc<Path>>>(),
4813 vec![
4814 PathBuf::from("banana/carrot/date").into(),
4815 PathBuf::from("banana/carrot/endive").into(),
4816 ]
4817 );
4818 }
4819
4820 #[gpui::test]
4821 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4822 cx.foreground().forbid_parking();
4823
4824 let mut rust_language = Language::new(
4825 LanguageConfig {
4826 name: "Rust".into(),
4827 path_suffixes: vec!["rs".to_string()],
4828 ..Default::default()
4829 },
4830 Some(tree_sitter_rust::language()),
4831 );
4832 let mut json_language = Language::new(
4833 LanguageConfig {
4834 name: "JSON".into(),
4835 path_suffixes: vec!["json".to_string()],
4836 ..Default::default()
4837 },
4838 None,
4839 );
4840 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4841 name: "the-rust-language-server",
4842 capabilities: lsp::ServerCapabilities {
4843 completion_provider: Some(lsp::CompletionOptions {
4844 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4845 ..Default::default()
4846 }),
4847 ..Default::default()
4848 },
4849 ..Default::default()
4850 });
4851 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4852 name: "the-json-language-server",
4853 capabilities: lsp::ServerCapabilities {
4854 completion_provider: Some(lsp::CompletionOptions {
4855 trigger_characters: Some(vec![":".to_string()]),
4856 ..Default::default()
4857 }),
4858 ..Default::default()
4859 },
4860 ..Default::default()
4861 });
4862
4863 let fs = FakeFs::new(cx.background());
4864 fs.insert_tree(
4865 "/the-root",
4866 json!({
4867 "test.rs": "const A: i32 = 1;",
4868 "test2.rs": "",
4869 "Cargo.toml": "a = 1",
4870 "package.json": "{\"a\": 1}",
4871 }),
4872 )
4873 .await;
4874
4875 let project = Project::test(fs, cx);
4876 project.update(cx, |project, _| {
4877 project.languages.add(Arc::new(rust_language));
4878 project.languages.add(Arc::new(json_language));
4879 });
4880
4881 let worktree_id = project
4882 .update(cx, |project, cx| {
4883 project.find_or_create_local_worktree("/the-root", true, cx)
4884 })
4885 .await
4886 .unwrap()
4887 .0
4888 .read_with(cx, |tree, _| tree.id());
4889
4890 // Open a buffer without an associated language server.
4891 let toml_buffer = project
4892 .update(cx, |project, cx| {
4893 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4894 })
4895 .await
4896 .unwrap();
4897
4898 // Open a buffer with an associated language server.
4899 let rust_buffer = project
4900 .update(cx, |project, cx| {
4901 project.open_buffer((worktree_id, "test.rs"), cx)
4902 })
4903 .await
4904 .unwrap();
4905
4906 // A server is started up, and it is notified about Rust files.
4907 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4908 assert_eq!(
4909 fake_rust_server
4910 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4911 .await
4912 .text_document,
4913 lsp::TextDocumentItem {
4914 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4915 version: 0,
4916 text: "const A: i32 = 1;".to_string(),
4917 language_id: Default::default()
4918 }
4919 );
4920
4921 // The buffer is configured based on the language server's capabilities.
4922 rust_buffer.read_with(cx, |buffer, _| {
4923 assert_eq!(
4924 buffer.completion_triggers(),
4925 &[".".to_string(), "::".to_string()]
4926 );
4927 });
4928 toml_buffer.read_with(cx, |buffer, _| {
4929 assert!(buffer.completion_triggers().is_empty());
4930 });
4931
4932 // Edit a buffer. The changes are reported to the language server.
4933 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4934 assert_eq!(
4935 fake_rust_server
4936 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4937 .await
4938 .text_document,
4939 lsp::VersionedTextDocumentIdentifier::new(
4940 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4941 1
4942 )
4943 );
4944
4945 // Open a third buffer with a different associated language server.
4946 let json_buffer = project
4947 .update(cx, |project, cx| {
4948 project.open_buffer((worktree_id, "package.json"), cx)
4949 })
4950 .await
4951 .unwrap();
4952
4953 // A json language server is started up and is only notified about the json buffer.
4954 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4955 assert_eq!(
4956 fake_json_server
4957 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4958 .await
4959 .text_document,
4960 lsp::TextDocumentItem {
4961 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4962 version: 0,
4963 text: "{\"a\": 1}".to_string(),
4964 language_id: Default::default()
4965 }
4966 );
4967
4968 // This buffer is configured based on the second language server's
4969 // capabilities.
4970 json_buffer.read_with(cx, |buffer, _| {
4971 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4972 });
4973
4974 // When opening another buffer whose language server is already running,
4975 // it is also configured based on the existing language server's capabilities.
4976 let rust_buffer2 = project
4977 .update(cx, |project, cx| {
4978 project.open_buffer((worktree_id, "test2.rs"), cx)
4979 })
4980 .await
4981 .unwrap();
4982 rust_buffer2.read_with(cx, |buffer, _| {
4983 assert_eq!(
4984 buffer.completion_triggers(),
4985 &[".".to_string(), "::".to_string()]
4986 );
4987 });
4988
4989 // Changes are reported only to servers matching the buffer's language.
4990 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4991 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4992 assert_eq!(
4993 fake_rust_server
4994 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4995 .await
4996 .text_document,
4997 lsp::VersionedTextDocumentIdentifier::new(
4998 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4999 1
5000 )
5001 );
5002
5003 // Save notifications are reported to all servers.
5004 toml_buffer
5005 .update(cx, |buffer, cx| buffer.save(cx))
5006 .await
5007 .unwrap();
5008 assert_eq!(
5009 fake_rust_server
5010 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5011 .await
5012 .text_document,
5013 lsp::TextDocumentIdentifier::new(
5014 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5015 )
5016 );
5017 assert_eq!(
5018 fake_json_server
5019 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5020 .await
5021 .text_document,
5022 lsp::TextDocumentIdentifier::new(
5023 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5024 )
5025 );
5026
5027 // Restart language servers
5028 project.update(cx, |project, cx| {
5029 project.restart_language_servers_for_buffers(
5030 vec![rust_buffer.clone(), json_buffer.clone()],
5031 cx,
5032 );
5033 });
5034
5035 let mut rust_shutdown_requests = fake_rust_server
5036 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5037 let mut json_shutdown_requests = fake_json_server
5038 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5039 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5040
5041 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5042 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5043
5044 // Ensure both rust documents are reopened in new rust language server without worrying about order
5045 assert_set_eq!(
5046 [
5047 fake_rust_server
5048 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5049 .await
5050 .text_document,
5051 fake_rust_server
5052 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5053 .await
5054 .text_document,
5055 ],
5056 [
5057 lsp::TextDocumentItem {
5058 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5059 version: 1,
5060 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5061 language_id: Default::default()
5062 },
5063 lsp::TextDocumentItem {
5064 uri: lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5065 version: 1,
5066 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5067 language_id: Default::default()
5068 },
5069 ]
5070 );
5071
5072 // Ensure json document is reopened in new json language server
5073 assert_eq!(
5074 fake_json_server
5075 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5076 .await
5077 .text_document,
5078 lsp::TextDocumentItem {
5079 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5080 version: 0,
5081 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5082 language_id: Default::default()
5083 }
5084 );
5085
5086 // Close notifications are reported only to servers matching the buffer's language.
5087 cx.update(|_| drop(json_buffer));
5088 let close_message = lsp::DidCloseTextDocumentParams {
5089 text_document: lsp::TextDocumentIdentifier::new(
5090 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5091 ),
5092 };
5093 assert_eq!(
5094 fake_json_server
5095 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5096 .await,
5097 close_message,
5098 );
5099 }
5100
5101 #[gpui::test]
5102 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5103 cx.foreground().forbid_parking();
5104
5105 let progress_token = "the-progress-token";
5106 let mut language = Language::new(
5107 LanguageConfig {
5108 name: "Rust".into(),
5109 path_suffixes: vec!["rs".to_string()],
5110 ..Default::default()
5111 },
5112 Some(tree_sitter_rust::language()),
5113 );
5114 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5115 disk_based_diagnostics_progress_token: Some(progress_token),
5116 disk_based_diagnostics_sources: &["disk"],
5117 ..Default::default()
5118 });
5119
5120 let fs = FakeFs::new(cx.background());
5121 fs.insert_tree(
5122 "/dir",
5123 json!({
5124 "a.rs": "fn a() { A }",
5125 "b.rs": "const y: i32 = 1",
5126 }),
5127 )
5128 .await;
5129
5130 let project = Project::test(fs, cx);
5131 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5132
5133 let (tree, _) = project
5134 .update(cx, |project, cx| {
5135 project.find_or_create_local_worktree("/dir", true, cx)
5136 })
5137 .await
5138 .unwrap();
5139 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5140
5141 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5142 .await;
5143
5144 // Cause worktree to start the fake language server
5145 let _buffer = project
5146 .update(cx, |project, cx| {
5147 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5148 })
5149 .await
5150 .unwrap();
5151
5152 let mut events = subscribe(&project, cx);
5153
5154 let mut fake_server = fake_servers.next().await.unwrap();
5155 fake_server.start_progress(progress_token).await;
5156 assert_eq!(
5157 events.next().await.unwrap(),
5158 Event::DiskBasedDiagnosticsStarted
5159 );
5160
5161 fake_server.start_progress(progress_token).await;
5162 fake_server.end_progress(progress_token).await;
5163 fake_server.start_progress(progress_token).await;
5164
5165 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5166 lsp::PublishDiagnosticsParams {
5167 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5168 version: None,
5169 diagnostics: vec![lsp::Diagnostic {
5170 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5171 severity: Some(lsp::DiagnosticSeverity::ERROR),
5172 message: "undefined variable 'A'".to_string(),
5173 ..Default::default()
5174 }],
5175 },
5176 );
5177 assert_eq!(
5178 events.next().await.unwrap(),
5179 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5180 );
5181
5182 fake_server.end_progress(progress_token).await;
5183 fake_server.end_progress(progress_token).await;
5184 assert_eq!(
5185 events.next().await.unwrap(),
5186 Event::DiskBasedDiagnosticsUpdated
5187 );
5188 assert_eq!(
5189 events.next().await.unwrap(),
5190 Event::DiskBasedDiagnosticsFinished
5191 );
5192
5193 let buffer = project
5194 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5195 .await
5196 .unwrap();
5197
5198 buffer.read_with(cx, |buffer, _| {
5199 let snapshot = buffer.snapshot();
5200 let diagnostics = snapshot
5201 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5202 .collect::<Vec<_>>();
5203 assert_eq!(
5204 diagnostics,
5205 &[DiagnosticEntry {
5206 range: Point::new(0, 9)..Point::new(0, 10),
5207 diagnostic: Diagnostic {
5208 severity: lsp::DiagnosticSeverity::ERROR,
5209 message: "undefined variable 'A'".to_string(),
5210 group_id: 0,
5211 is_primary: true,
5212 ..Default::default()
5213 }
5214 }]
5215 )
5216 });
5217 }
5218
5219 #[gpui::test]
5220 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5221 cx.foreground().forbid_parking();
5222
5223 let mut language = Language::new(
5224 LanguageConfig {
5225 name: "Rust".into(),
5226 path_suffixes: vec!["rs".to_string()],
5227 ..Default::default()
5228 },
5229 Some(tree_sitter_rust::language()),
5230 );
5231 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5232 disk_based_diagnostics_sources: &["disk"],
5233 ..Default::default()
5234 });
5235
5236 let text = "
5237 fn a() { A }
5238 fn b() { BB }
5239 fn c() { CCC }
5240 "
5241 .unindent();
5242
5243 let fs = FakeFs::new(cx.background());
5244 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5245
5246 let project = Project::test(fs, cx);
5247 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5248
5249 let worktree_id = project
5250 .update(cx, |project, cx| {
5251 project.find_or_create_local_worktree("/dir", true, cx)
5252 })
5253 .await
5254 .unwrap()
5255 .0
5256 .read_with(cx, |tree, _| tree.id());
5257
5258 let buffer = project
5259 .update(cx, |project, cx| {
5260 project.open_buffer((worktree_id, "a.rs"), cx)
5261 })
5262 .await
5263 .unwrap();
5264
5265 let mut fake_server = fake_servers.next().await.unwrap();
5266 let open_notification = fake_server
5267 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5268 .await;
5269
5270 // Edit the buffer, moving the content down
5271 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5272 let change_notification_1 = fake_server
5273 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5274 .await;
5275 assert!(
5276 change_notification_1.text_document.version > open_notification.text_document.version
5277 );
5278
5279 // Report some diagnostics for the initial version of the buffer
5280 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5281 lsp::PublishDiagnosticsParams {
5282 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5283 version: Some(open_notification.text_document.version),
5284 diagnostics: vec![
5285 lsp::Diagnostic {
5286 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5287 severity: Some(DiagnosticSeverity::ERROR),
5288 message: "undefined variable 'A'".to_string(),
5289 source: Some("disk".to_string()),
5290 ..Default::default()
5291 },
5292 lsp::Diagnostic {
5293 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5294 severity: Some(DiagnosticSeverity::ERROR),
5295 message: "undefined variable 'BB'".to_string(),
5296 source: Some("disk".to_string()),
5297 ..Default::default()
5298 },
5299 lsp::Diagnostic {
5300 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5301 severity: Some(DiagnosticSeverity::ERROR),
5302 source: Some("disk".to_string()),
5303 message: "undefined variable 'CCC'".to_string(),
5304 ..Default::default()
5305 },
5306 ],
5307 },
5308 );
5309
5310 // The diagnostics have moved down since they were created.
5311 buffer.next_notification(cx).await;
5312 buffer.read_with(cx, |buffer, _| {
5313 assert_eq!(
5314 buffer
5315 .snapshot()
5316 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5317 .collect::<Vec<_>>(),
5318 &[
5319 DiagnosticEntry {
5320 range: Point::new(3, 9)..Point::new(3, 11),
5321 diagnostic: Diagnostic {
5322 severity: DiagnosticSeverity::ERROR,
5323 message: "undefined variable 'BB'".to_string(),
5324 is_disk_based: true,
5325 group_id: 1,
5326 is_primary: true,
5327 ..Default::default()
5328 },
5329 },
5330 DiagnosticEntry {
5331 range: Point::new(4, 9)..Point::new(4, 12),
5332 diagnostic: Diagnostic {
5333 severity: DiagnosticSeverity::ERROR,
5334 message: "undefined variable 'CCC'".to_string(),
5335 is_disk_based: true,
5336 group_id: 2,
5337 is_primary: true,
5338 ..Default::default()
5339 }
5340 }
5341 ]
5342 );
5343 assert_eq!(
5344 chunks_with_diagnostics(buffer, 0..buffer.len()),
5345 [
5346 ("\n\nfn a() { ".to_string(), None),
5347 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5348 (" }\nfn b() { ".to_string(), None),
5349 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5350 (" }\nfn c() { ".to_string(), None),
5351 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5352 (" }\n".to_string(), None),
5353 ]
5354 );
5355 assert_eq!(
5356 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5357 [
5358 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5359 (" }\nfn c() { ".to_string(), None),
5360 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5361 ]
5362 );
5363 });
5364
5365 // Ensure overlapping diagnostics are highlighted correctly.
5366 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5367 lsp::PublishDiagnosticsParams {
5368 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5369 version: Some(open_notification.text_document.version),
5370 diagnostics: vec![
5371 lsp::Diagnostic {
5372 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5373 severity: Some(DiagnosticSeverity::ERROR),
5374 message: "undefined variable 'A'".to_string(),
5375 source: Some("disk".to_string()),
5376 ..Default::default()
5377 },
5378 lsp::Diagnostic {
5379 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5380 severity: Some(DiagnosticSeverity::WARNING),
5381 message: "unreachable statement".to_string(),
5382 source: Some("disk".to_string()),
5383 ..Default::default()
5384 },
5385 ],
5386 },
5387 );
5388
5389 buffer.next_notification(cx).await;
5390 buffer.read_with(cx, |buffer, _| {
5391 assert_eq!(
5392 buffer
5393 .snapshot()
5394 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5395 .collect::<Vec<_>>(),
5396 &[
5397 DiagnosticEntry {
5398 range: Point::new(2, 9)..Point::new(2, 12),
5399 diagnostic: Diagnostic {
5400 severity: DiagnosticSeverity::WARNING,
5401 message: "unreachable statement".to_string(),
5402 is_disk_based: true,
5403 group_id: 1,
5404 is_primary: true,
5405 ..Default::default()
5406 }
5407 },
5408 DiagnosticEntry {
5409 range: Point::new(2, 9)..Point::new(2, 10),
5410 diagnostic: Diagnostic {
5411 severity: DiagnosticSeverity::ERROR,
5412 message: "undefined variable 'A'".to_string(),
5413 is_disk_based: true,
5414 group_id: 0,
5415 is_primary: true,
5416 ..Default::default()
5417 },
5418 }
5419 ]
5420 );
5421 assert_eq!(
5422 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5423 [
5424 ("fn a() { ".to_string(), None),
5425 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5426 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5427 ("\n".to_string(), None),
5428 ]
5429 );
5430 assert_eq!(
5431 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5432 [
5433 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5434 ("\n".to_string(), None),
5435 ]
5436 );
5437 });
5438
5439 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5440 // changes since the last save.
5441 buffer.update(cx, |buffer, cx| {
5442 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5443 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5444 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5445 });
5446 let change_notification_2 = fake_server
5447 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5448 .await;
5449 assert!(
5450 change_notification_2.text_document.version
5451 > change_notification_1.text_document.version
5452 );
5453
5454 // Handle out-of-order diagnostics
5455 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5456 lsp::PublishDiagnosticsParams {
5457 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5458 version: Some(change_notification_2.text_document.version),
5459 diagnostics: vec![
5460 lsp::Diagnostic {
5461 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5462 severity: Some(DiagnosticSeverity::ERROR),
5463 message: "undefined variable 'BB'".to_string(),
5464 source: Some("disk".to_string()),
5465 ..Default::default()
5466 },
5467 lsp::Diagnostic {
5468 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5469 severity: Some(DiagnosticSeverity::WARNING),
5470 message: "undefined variable 'A'".to_string(),
5471 source: Some("disk".to_string()),
5472 ..Default::default()
5473 },
5474 ],
5475 },
5476 );
5477
5478 buffer.next_notification(cx).await;
5479 buffer.read_with(cx, |buffer, _| {
5480 assert_eq!(
5481 buffer
5482 .snapshot()
5483 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5484 .collect::<Vec<_>>(),
5485 &[
5486 DiagnosticEntry {
5487 range: Point::new(2, 21)..Point::new(2, 22),
5488 diagnostic: Diagnostic {
5489 severity: DiagnosticSeverity::WARNING,
5490 message: "undefined variable 'A'".to_string(),
5491 is_disk_based: true,
5492 group_id: 1,
5493 is_primary: true,
5494 ..Default::default()
5495 }
5496 },
5497 DiagnosticEntry {
5498 range: Point::new(3, 9)..Point::new(3, 14),
5499 diagnostic: Diagnostic {
5500 severity: DiagnosticSeverity::ERROR,
5501 message: "undefined variable 'BB'".to_string(),
5502 is_disk_based: true,
5503 group_id: 0,
5504 is_primary: true,
5505 ..Default::default()
5506 },
5507 }
5508 ]
5509 );
5510 });
5511 }
5512
5513 #[gpui::test]
5514 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5515 cx.foreground().forbid_parking();
5516
5517 let text = concat!(
5518 "let one = ;\n", //
5519 "let two = \n",
5520 "let three = 3;\n",
5521 );
5522
5523 let fs = FakeFs::new(cx.background());
5524 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5525
5526 let project = Project::test(fs, cx);
5527 let worktree_id = project
5528 .update(cx, |project, cx| {
5529 project.find_or_create_local_worktree("/dir", true, cx)
5530 })
5531 .await
5532 .unwrap()
5533 .0
5534 .read_with(cx, |tree, _| tree.id());
5535
5536 let buffer = project
5537 .update(cx, |project, cx| {
5538 project.open_buffer((worktree_id, "a.rs"), cx)
5539 })
5540 .await
5541 .unwrap();
5542
5543 project.update(cx, |project, cx| {
5544 project
5545 .update_buffer_diagnostics(
5546 &buffer,
5547 vec![
5548 DiagnosticEntry {
5549 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5550 diagnostic: Diagnostic {
5551 severity: DiagnosticSeverity::ERROR,
5552 message: "syntax error 1".to_string(),
5553 ..Default::default()
5554 },
5555 },
5556 DiagnosticEntry {
5557 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5558 diagnostic: Diagnostic {
5559 severity: DiagnosticSeverity::ERROR,
5560 message: "syntax error 2".to_string(),
5561 ..Default::default()
5562 },
5563 },
5564 ],
5565 None,
5566 cx,
5567 )
5568 .unwrap();
5569 });
5570
5571 // An empty range is extended forward to include the following character.
5572 // At the end of a line, an empty range is extended backward to include
5573 // the preceding character.
5574 buffer.read_with(cx, |buffer, _| {
5575 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5576 assert_eq!(
5577 chunks
5578 .iter()
5579 .map(|(s, d)| (s.as_str(), *d))
5580 .collect::<Vec<_>>(),
5581 &[
5582 ("let one = ", None),
5583 (";", Some(DiagnosticSeverity::ERROR)),
5584 ("\nlet two =", None),
5585 (" ", Some(DiagnosticSeverity::ERROR)),
5586 ("\nlet three = 3;\n", None)
5587 ]
5588 );
5589 });
5590 }
5591
5592 #[gpui::test]
5593 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5594 cx.foreground().forbid_parking();
5595
5596 let mut language = Language::new(
5597 LanguageConfig {
5598 name: "Rust".into(),
5599 path_suffixes: vec!["rs".to_string()],
5600 ..Default::default()
5601 },
5602 Some(tree_sitter_rust::language()),
5603 );
5604 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5605
5606 let text = "
5607 fn a() {
5608 f1();
5609 }
5610 fn b() {
5611 f2();
5612 }
5613 fn c() {
5614 f3();
5615 }
5616 "
5617 .unindent();
5618
5619 let fs = FakeFs::new(cx.background());
5620 fs.insert_tree(
5621 "/dir",
5622 json!({
5623 "a.rs": text.clone(),
5624 }),
5625 )
5626 .await;
5627
5628 let project = Project::test(fs, cx);
5629 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5630
5631 let worktree_id = project
5632 .update(cx, |project, cx| {
5633 project.find_or_create_local_worktree("/dir", true, cx)
5634 })
5635 .await
5636 .unwrap()
5637 .0
5638 .read_with(cx, |tree, _| tree.id());
5639
5640 let buffer = project
5641 .update(cx, |project, cx| {
5642 project.open_buffer((worktree_id, "a.rs"), cx)
5643 })
5644 .await
5645 .unwrap();
5646
5647 let mut fake_server = fake_servers.next().await.unwrap();
5648 let lsp_document_version = fake_server
5649 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5650 .await
5651 .text_document
5652 .version;
5653
5654 // Simulate editing the buffer after the language server computes some edits.
5655 buffer.update(cx, |buffer, cx| {
5656 buffer.edit(
5657 [Point::new(0, 0)..Point::new(0, 0)],
5658 "// above first function\n",
5659 cx,
5660 );
5661 buffer.edit(
5662 [Point::new(2, 0)..Point::new(2, 0)],
5663 " // inside first function\n",
5664 cx,
5665 );
5666 buffer.edit(
5667 [Point::new(6, 4)..Point::new(6, 4)],
5668 "// inside second function ",
5669 cx,
5670 );
5671
5672 assert_eq!(
5673 buffer.text(),
5674 "
5675 // above first function
5676 fn a() {
5677 // inside first function
5678 f1();
5679 }
5680 fn b() {
5681 // inside second function f2();
5682 }
5683 fn c() {
5684 f3();
5685 }
5686 "
5687 .unindent()
5688 );
5689 });
5690
5691 let edits = project
5692 .update(cx, |project, cx| {
5693 project.edits_from_lsp(
5694 &buffer,
5695 vec![
5696 // replace body of first function
5697 lsp::TextEdit {
5698 range: lsp::Range::new(
5699 lsp::Position::new(0, 0),
5700 lsp::Position::new(3, 0),
5701 ),
5702 new_text: "
5703 fn a() {
5704 f10();
5705 }
5706 "
5707 .unindent(),
5708 },
5709 // edit inside second function
5710 lsp::TextEdit {
5711 range: lsp::Range::new(
5712 lsp::Position::new(4, 6),
5713 lsp::Position::new(4, 6),
5714 ),
5715 new_text: "00".into(),
5716 },
5717 // edit inside third function via two distinct edits
5718 lsp::TextEdit {
5719 range: lsp::Range::new(
5720 lsp::Position::new(7, 5),
5721 lsp::Position::new(7, 5),
5722 ),
5723 new_text: "4000".into(),
5724 },
5725 lsp::TextEdit {
5726 range: lsp::Range::new(
5727 lsp::Position::new(7, 5),
5728 lsp::Position::new(7, 6),
5729 ),
5730 new_text: "".into(),
5731 },
5732 ],
5733 Some(lsp_document_version),
5734 cx,
5735 )
5736 })
5737 .await
5738 .unwrap();
5739
5740 buffer.update(cx, |buffer, cx| {
5741 for (range, new_text) in edits {
5742 buffer.edit([range], new_text, cx);
5743 }
5744 assert_eq!(
5745 buffer.text(),
5746 "
5747 // above first function
5748 fn a() {
5749 // inside first function
5750 f10();
5751 }
5752 fn b() {
5753 // inside second function f200();
5754 }
5755 fn c() {
5756 f4000();
5757 }
5758 "
5759 .unindent()
5760 );
5761 });
5762 }
5763
5764 #[gpui::test]
5765 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5766 cx.foreground().forbid_parking();
5767
5768 let text = "
5769 use a::b;
5770 use a::c;
5771
5772 fn f() {
5773 b();
5774 c();
5775 }
5776 "
5777 .unindent();
5778
5779 let fs = FakeFs::new(cx.background());
5780 fs.insert_tree(
5781 "/dir",
5782 json!({
5783 "a.rs": text.clone(),
5784 }),
5785 )
5786 .await;
5787
5788 let project = Project::test(fs, cx);
5789 let worktree_id = project
5790 .update(cx, |project, cx| {
5791 project.find_or_create_local_worktree("/dir", true, cx)
5792 })
5793 .await
5794 .unwrap()
5795 .0
5796 .read_with(cx, |tree, _| tree.id());
5797
5798 let buffer = project
5799 .update(cx, |project, cx| {
5800 project.open_buffer((worktree_id, "a.rs"), cx)
5801 })
5802 .await
5803 .unwrap();
5804
5805 // Simulate the language server sending us a small edit in the form of a very large diff.
5806 // Rust-analyzer does this when performing a merge-imports code action.
5807 let edits = project
5808 .update(cx, |project, cx| {
5809 project.edits_from_lsp(
5810 &buffer,
5811 [
5812 // Replace the first use statement without editing the semicolon.
5813 lsp::TextEdit {
5814 range: lsp::Range::new(
5815 lsp::Position::new(0, 4),
5816 lsp::Position::new(0, 8),
5817 ),
5818 new_text: "a::{b, c}".into(),
5819 },
5820 // Reinsert the remainder of the file between the semicolon and the final
5821 // newline of the file.
5822 lsp::TextEdit {
5823 range: lsp::Range::new(
5824 lsp::Position::new(0, 9),
5825 lsp::Position::new(0, 9),
5826 ),
5827 new_text: "\n\n".into(),
5828 },
5829 lsp::TextEdit {
5830 range: lsp::Range::new(
5831 lsp::Position::new(0, 9),
5832 lsp::Position::new(0, 9),
5833 ),
5834 new_text: "
5835 fn f() {
5836 b();
5837 c();
5838 }"
5839 .unindent(),
5840 },
5841 // Delete everything after the first newline of the file.
5842 lsp::TextEdit {
5843 range: lsp::Range::new(
5844 lsp::Position::new(1, 0),
5845 lsp::Position::new(7, 0),
5846 ),
5847 new_text: "".into(),
5848 },
5849 ],
5850 None,
5851 cx,
5852 )
5853 })
5854 .await
5855 .unwrap();
5856
5857 buffer.update(cx, |buffer, cx| {
5858 let edits = edits
5859 .into_iter()
5860 .map(|(range, text)| {
5861 (
5862 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5863 text,
5864 )
5865 })
5866 .collect::<Vec<_>>();
5867
5868 assert_eq!(
5869 edits,
5870 [
5871 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5872 (Point::new(1, 0)..Point::new(2, 0), "".into())
5873 ]
5874 );
5875
5876 for (range, new_text) in edits {
5877 buffer.edit([range], new_text, cx);
5878 }
5879 assert_eq!(
5880 buffer.text(),
5881 "
5882 use a::{b, c};
5883
5884 fn f() {
5885 b();
5886 c();
5887 }
5888 "
5889 .unindent()
5890 );
5891 });
5892 }
5893
5894 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5895 buffer: &Buffer,
5896 range: Range<T>,
5897 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5898 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5899 for chunk in buffer.snapshot().chunks(range, true) {
5900 if chunks.last().map_or(false, |prev_chunk| {
5901 prev_chunk.1 == chunk.diagnostic_severity
5902 }) {
5903 chunks.last_mut().unwrap().0.push_str(chunk.text);
5904 } else {
5905 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5906 }
5907 }
5908 chunks
5909 }
5910
5911 #[gpui::test]
5912 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5913 let dir = temp_tree(json!({
5914 "root": {
5915 "dir1": {},
5916 "dir2": {
5917 "dir3": {}
5918 }
5919 }
5920 }));
5921
5922 let project = Project::test(Arc::new(RealFs), cx);
5923 let (tree, _) = project
5924 .update(cx, |project, cx| {
5925 project.find_or_create_local_worktree(&dir.path(), true, cx)
5926 })
5927 .await
5928 .unwrap();
5929
5930 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5931 .await;
5932
5933 let cancel_flag = Default::default();
5934 let results = project
5935 .read_with(cx, |project, cx| {
5936 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5937 })
5938 .await;
5939
5940 assert!(results.is_empty());
5941 }
5942
5943 #[gpui::test]
5944 async fn test_definition(cx: &mut gpui::TestAppContext) {
5945 let mut language = Language::new(
5946 LanguageConfig {
5947 name: "Rust".into(),
5948 path_suffixes: vec!["rs".to_string()],
5949 ..Default::default()
5950 },
5951 Some(tree_sitter_rust::language()),
5952 );
5953 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5954
5955 let fs = FakeFs::new(cx.background());
5956 fs.insert_tree(
5957 "/dir",
5958 json!({
5959 "a.rs": "const fn a() { A }",
5960 "b.rs": "const y: i32 = crate::a()",
5961 }),
5962 )
5963 .await;
5964
5965 let project = Project::test(fs, cx);
5966 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5967
5968 let (tree, _) = project
5969 .update(cx, |project, cx| {
5970 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5971 })
5972 .await
5973 .unwrap();
5974 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5975 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5976 .await;
5977
5978 let buffer = project
5979 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
5980 .await
5981 .unwrap();
5982
5983 let fake_server = fake_servers.next().await.unwrap();
5984 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
5985 let params = params.text_document_position_params;
5986 assert_eq!(
5987 params.text_document.uri.to_file_path().unwrap(),
5988 Path::new("/dir/b.rs"),
5989 );
5990 assert_eq!(params.position, lsp::Position::new(0, 22));
5991
5992 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
5993 lsp::Location::new(
5994 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5995 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5996 ),
5997 )))
5998 });
5999
6000 let mut definitions = project
6001 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6002 .await
6003 .unwrap();
6004
6005 assert_eq!(definitions.len(), 1);
6006 let definition = definitions.pop().unwrap();
6007 cx.update(|cx| {
6008 let target_buffer = definition.buffer.read(cx);
6009 assert_eq!(
6010 target_buffer
6011 .file()
6012 .unwrap()
6013 .as_local()
6014 .unwrap()
6015 .abs_path(cx),
6016 Path::new("/dir/a.rs"),
6017 );
6018 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6019 assert_eq!(
6020 list_worktrees(&project, cx),
6021 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6022 );
6023
6024 drop(definition);
6025 });
6026 cx.read(|cx| {
6027 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6028 });
6029
6030 fn list_worktrees<'a>(
6031 project: &'a ModelHandle<Project>,
6032 cx: &'a AppContext,
6033 ) -> Vec<(&'a Path, bool)> {
6034 project
6035 .read(cx)
6036 .worktrees(cx)
6037 .map(|worktree| {
6038 let worktree = worktree.read(cx);
6039 (
6040 worktree.as_local().unwrap().abs_path().as_ref(),
6041 worktree.is_visible(),
6042 )
6043 })
6044 .collect::<Vec<_>>()
6045 }
6046 }
6047
6048 #[gpui::test(iterations = 100)]
6049 async fn test_apply_code_action(cx: &mut gpui::TestAppContext) {
6050 let mut language = Language::new(
6051 LanguageConfig {
6052 name: "TypeScript".into(),
6053 path_suffixes: vec!["ts".to_string()],
6054 ..Default::default()
6055 },
6056 None,
6057 );
6058 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6059
6060 let fs = FakeFs::new(cx.background());
6061 fs.insert_tree(
6062 "/dir",
6063 json!({
6064 "a.ts": "a",
6065 }),
6066 )
6067 .await;
6068
6069 let project = Project::test(fs, cx);
6070 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6071
6072 let (tree, _) = project
6073 .update(cx, |project, cx| {
6074 project.find_or_create_local_worktree("/dir", true, cx)
6075 })
6076 .await
6077 .unwrap();
6078 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6079 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6080 .await;
6081
6082 let buffer = project
6083 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6084 .await
6085 .unwrap();
6086
6087 let fake_server = fake_language_servers.next().await.unwrap();
6088
6089 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6090 fake_server
6091 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6092 Ok(Some(vec![
6093 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6094 title: "The code action".into(),
6095 command: Some(lsp::Command {
6096 title: "The command".into(),
6097 command: "_the/command".into(),
6098 arguments: Some(vec![json!("the-argument")]),
6099 }),
6100 ..Default::default()
6101 }),
6102 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6103 title: "two".into(),
6104 ..Default::default()
6105 }),
6106 ]))
6107 })
6108 .next()
6109 .await;
6110
6111 let action = actions.await.unwrap()[0].clone();
6112 let apply = project.update(cx, |project, cx| {
6113 project.apply_code_action(buffer.clone(), action, true, cx)
6114 });
6115 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6116 |action, _| async move { Ok(action) },
6117 );
6118 fake_server
6119 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6120 let fake = fake_server.clone();
6121 move |params, _| {
6122 assert_eq!(params.command, "_the/command");
6123 let fake = fake.clone();
6124 async move {
6125 fake.server
6126 .request::<lsp::request::ApplyWorkspaceEdit>(
6127 lsp::ApplyWorkspaceEditParams {
6128 label: None,
6129 edit: lsp::WorkspaceEdit {
6130 changes: Some(
6131 [(
6132 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6133 vec![lsp::TextEdit {
6134 range: lsp::Range::new(
6135 lsp::Position::new(0, 0),
6136 lsp::Position::new(0, 0),
6137 ),
6138 new_text: "X".into(),
6139 }],
6140 )]
6141 .into_iter()
6142 .collect(),
6143 ),
6144 ..Default::default()
6145 },
6146 },
6147 )
6148 .await
6149 .unwrap();
6150 Ok(Some(json!(null)))
6151 }
6152 }
6153 })
6154 .next()
6155 .await;
6156
6157 let transaction = apply.await.unwrap();
6158 assert!(transaction.0.contains_key(&buffer));
6159 buffer.update(cx, |buffer, cx| {
6160 assert_eq!(buffer.text(), "Xa");
6161 buffer.undo(cx);
6162 assert_eq!(buffer.text(), "a");
6163 });
6164 }
6165
6166 #[gpui::test]
6167 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6168 let fs = FakeFs::new(cx.background());
6169 fs.insert_tree(
6170 "/dir",
6171 json!({
6172 "file1": "the old contents",
6173 }),
6174 )
6175 .await;
6176
6177 let project = Project::test(fs.clone(), cx);
6178 let worktree_id = project
6179 .update(cx, |p, cx| {
6180 p.find_or_create_local_worktree("/dir", true, cx)
6181 })
6182 .await
6183 .unwrap()
6184 .0
6185 .read_with(cx, |tree, _| tree.id());
6186
6187 let buffer = project
6188 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6189 .await
6190 .unwrap();
6191 buffer
6192 .update(cx, |buffer, cx| {
6193 assert_eq!(buffer.text(), "the old contents");
6194 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6195 buffer.save(cx)
6196 })
6197 .await
6198 .unwrap();
6199
6200 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6201 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6202 }
6203
6204 #[gpui::test]
6205 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6206 let fs = FakeFs::new(cx.background());
6207 fs.insert_tree(
6208 "/dir",
6209 json!({
6210 "file1": "the old contents",
6211 }),
6212 )
6213 .await;
6214
6215 let project = Project::test(fs.clone(), cx);
6216 let worktree_id = project
6217 .update(cx, |p, cx| {
6218 p.find_or_create_local_worktree("/dir/file1", true, cx)
6219 })
6220 .await
6221 .unwrap()
6222 .0
6223 .read_with(cx, |tree, _| tree.id());
6224
6225 let buffer = project
6226 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6227 .await
6228 .unwrap();
6229 buffer
6230 .update(cx, |buffer, cx| {
6231 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6232 buffer.save(cx)
6233 })
6234 .await
6235 .unwrap();
6236
6237 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6238 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6239 }
6240
6241 #[gpui::test]
6242 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6243 let fs = FakeFs::new(cx.background());
6244 fs.insert_tree("/dir", json!({})).await;
6245
6246 let project = Project::test(fs.clone(), cx);
6247 let (worktree, _) = project
6248 .update(cx, |project, cx| {
6249 project.find_or_create_local_worktree("/dir", true, cx)
6250 })
6251 .await
6252 .unwrap();
6253 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6254
6255 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6256 buffer.update(cx, |buffer, cx| {
6257 buffer.edit([0..0], "abc", cx);
6258 assert!(buffer.is_dirty());
6259 assert!(!buffer.has_conflict());
6260 });
6261 project
6262 .update(cx, |project, cx| {
6263 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6264 })
6265 .await
6266 .unwrap();
6267 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6268 buffer.read_with(cx, |buffer, cx| {
6269 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6270 assert!(!buffer.is_dirty());
6271 assert!(!buffer.has_conflict());
6272 });
6273
6274 let opened_buffer = project
6275 .update(cx, |project, cx| {
6276 project.open_buffer((worktree_id, "file1"), cx)
6277 })
6278 .await
6279 .unwrap();
6280 assert_eq!(opened_buffer, buffer);
6281 }
6282
6283 #[gpui::test(retries = 5)]
6284 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6285 let dir = temp_tree(json!({
6286 "a": {
6287 "file1": "",
6288 "file2": "",
6289 "file3": "",
6290 },
6291 "b": {
6292 "c": {
6293 "file4": "",
6294 "file5": "",
6295 }
6296 }
6297 }));
6298
6299 let project = Project::test(Arc::new(RealFs), cx);
6300 let rpc = project.read_with(cx, |p, _| p.client.clone());
6301
6302 let (tree, _) = project
6303 .update(cx, |p, cx| {
6304 p.find_or_create_local_worktree(dir.path(), true, cx)
6305 })
6306 .await
6307 .unwrap();
6308 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6309
6310 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6311 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6312 async move { buffer.await.unwrap() }
6313 };
6314 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6315 tree.read_with(cx, |tree, _| {
6316 tree.entry_for_path(path)
6317 .expect(&format!("no entry for path {}", path))
6318 .id
6319 })
6320 };
6321
6322 let buffer2 = buffer_for_path("a/file2", cx).await;
6323 let buffer3 = buffer_for_path("a/file3", cx).await;
6324 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6325 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6326
6327 let file2_id = id_for_path("a/file2", &cx);
6328 let file3_id = id_for_path("a/file3", &cx);
6329 let file4_id = id_for_path("b/c/file4", &cx);
6330
6331 // Wait for the initial scan.
6332 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6333 .await;
6334
6335 // Create a remote copy of this worktree.
6336 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6337 let (remote, load_task) = cx.update(|cx| {
6338 Worktree::remote(
6339 1,
6340 1,
6341 initial_snapshot.to_proto(&Default::default(), true),
6342 rpc.clone(),
6343 cx,
6344 )
6345 });
6346 load_task.await;
6347
6348 cx.read(|cx| {
6349 assert!(!buffer2.read(cx).is_dirty());
6350 assert!(!buffer3.read(cx).is_dirty());
6351 assert!(!buffer4.read(cx).is_dirty());
6352 assert!(!buffer5.read(cx).is_dirty());
6353 });
6354
6355 // Rename and delete files and directories.
6356 tree.flush_fs_events(&cx).await;
6357 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6358 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6359 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6360 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6361 tree.flush_fs_events(&cx).await;
6362
6363 let expected_paths = vec![
6364 "a",
6365 "a/file1",
6366 "a/file2.new",
6367 "b",
6368 "d",
6369 "d/file3",
6370 "d/file4",
6371 ];
6372
6373 cx.read(|app| {
6374 assert_eq!(
6375 tree.read(app)
6376 .paths()
6377 .map(|p| p.to_str().unwrap())
6378 .collect::<Vec<_>>(),
6379 expected_paths
6380 );
6381
6382 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6383 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6384 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6385
6386 assert_eq!(
6387 buffer2.read(app).file().unwrap().path().as_ref(),
6388 Path::new("a/file2.new")
6389 );
6390 assert_eq!(
6391 buffer3.read(app).file().unwrap().path().as_ref(),
6392 Path::new("d/file3")
6393 );
6394 assert_eq!(
6395 buffer4.read(app).file().unwrap().path().as_ref(),
6396 Path::new("d/file4")
6397 );
6398 assert_eq!(
6399 buffer5.read(app).file().unwrap().path().as_ref(),
6400 Path::new("b/c/file5")
6401 );
6402
6403 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6404 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6405 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6406 assert!(buffer5.read(app).file().unwrap().is_deleted());
6407 });
6408
6409 // Update the remote worktree. Check that it becomes consistent with the
6410 // local worktree.
6411 remote.update(cx, |remote, cx| {
6412 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6413 &initial_snapshot,
6414 1,
6415 1,
6416 true,
6417 );
6418 remote
6419 .as_remote_mut()
6420 .unwrap()
6421 .snapshot
6422 .apply_remote_update(update_message)
6423 .unwrap();
6424
6425 assert_eq!(
6426 remote
6427 .paths()
6428 .map(|p| p.to_str().unwrap())
6429 .collect::<Vec<_>>(),
6430 expected_paths
6431 );
6432 });
6433 }
6434
6435 #[gpui::test]
6436 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6437 let fs = FakeFs::new(cx.background());
6438 fs.insert_tree(
6439 "/the-dir",
6440 json!({
6441 "a.txt": "a-contents",
6442 "b.txt": "b-contents",
6443 }),
6444 )
6445 .await;
6446
6447 let project = Project::test(fs.clone(), cx);
6448 let worktree_id = project
6449 .update(cx, |p, cx| {
6450 p.find_or_create_local_worktree("/the-dir", true, cx)
6451 })
6452 .await
6453 .unwrap()
6454 .0
6455 .read_with(cx, |tree, _| tree.id());
6456
6457 // Spawn multiple tasks to open paths, repeating some paths.
6458 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6459 (
6460 p.open_buffer((worktree_id, "a.txt"), cx),
6461 p.open_buffer((worktree_id, "b.txt"), cx),
6462 p.open_buffer((worktree_id, "a.txt"), cx),
6463 )
6464 });
6465
6466 let buffer_a_1 = buffer_a_1.await.unwrap();
6467 let buffer_a_2 = buffer_a_2.await.unwrap();
6468 let buffer_b = buffer_b.await.unwrap();
6469 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6470 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6471
6472 // There is only one buffer per path.
6473 let buffer_a_id = buffer_a_1.id();
6474 assert_eq!(buffer_a_2.id(), buffer_a_id);
6475
6476 // Open the same path again while it is still open.
6477 drop(buffer_a_1);
6478 let buffer_a_3 = project
6479 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6480 .await
6481 .unwrap();
6482
6483 // There's still only one buffer per path.
6484 assert_eq!(buffer_a_3.id(), buffer_a_id);
6485 }
6486
6487 #[gpui::test]
6488 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6489 use std::fs;
6490
6491 let dir = temp_tree(json!({
6492 "file1": "abc",
6493 "file2": "def",
6494 "file3": "ghi",
6495 }));
6496
6497 let project = Project::test(Arc::new(RealFs), cx);
6498 let (worktree, _) = project
6499 .update(cx, |p, cx| {
6500 p.find_or_create_local_worktree(dir.path(), true, cx)
6501 })
6502 .await
6503 .unwrap();
6504 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6505
6506 worktree.flush_fs_events(&cx).await;
6507 worktree
6508 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6509 .await;
6510
6511 let buffer1 = project
6512 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6513 .await
6514 .unwrap();
6515 let events = Rc::new(RefCell::new(Vec::new()));
6516
6517 // initially, the buffer isn't dirty.
6518 buffer1.update(cx, |buffer, cx| {
6519 cx.subscribe(&buffer1, {
6520 let events = events.clone();
6521 move |_, _, event, _| match event {
6522 BufferEvent::Operation(_) => {}
6523 _ => events.borrow_mut().push(event.clone()),
6524 }
6525 })
6526 .detach();
6527
6528 assert!(!buffer.is_dirty());
6529 assert!(events.borrow().is_empty());
6530
6531 buffer.edit(vec![1..2], "", cx);
6532 });
6533
6534 // after the first edit, the buffer is dirty, and emits a dirtied event.
6535 buffer1.update(cx, |buffer, cx| {
6536 assert!(buffer.text() == "ac");
6537 assert!(buffer.is_dirty());
6538 assert_eq!(
6539 *events.borrow(),
6540 &[language::Event::Edited, language::Event::Dirtied]
6541 );
6542 events.borrow_mut().clear();
6543 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6544 });
6545
6546 // after saving, the buffer is not dirty, and emits a saved event.
6547 buffer1.update(cx, |buffer, cx| {
6548 assert!(!buffer.is_dirty());
6549 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6550 events.borrow_mut().clear();
6551
6552 buffer.edit(vec![1..1], "B", cx);
6553 buffer.edit(vec![2..2], "D", cx);
6554 });
6555
6556 // after editing again, the buffer is dirty, and emits another dirty event.
6557 buffer1.update(cx, |buffer, cx| {
6558 assert!(buffer.text() == "aBDc");
6559 assert!(buffer.is_dirty());
6560 assert_eq!(
6561 *events.borrow(),
6562 &[
6563 language::Event::Edited,
6564 language::Event::Dirtied,
6565 language::Event::Edited,
6566 ],
6567 );
6568 events.borrow_mut().clear();
6569
6570 // TODO - currently, after restoring the buffer to its
6571 // previously-saved state, the is still considered dirty.
6572 buffer.edit([1..3], "", cx);
6573 assert!(buffer.text() == "ac");
6574 assert!(buffer.is_dirty());
6575 });
6576
6577 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6578
6579 // When a file is deleted, the buffer is considered dirty.
6580 let events = Rc::new(RefCell::new(Vec::new()));
6581 let buffer2 = project
6582 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6583 .await
6584 .unwrap();
6585 buffer2.update(cx, |_, cx| {
6586 cx.subscribe(&buffer2, {
6587 let events = events.clone();
6588 move |_, _, event, _| events.borrow_mut().push(event.clone())
6589 })
6590 .detach();
6591 });
6592
6593 fs::remove_file(dir.path().join("file2")).unwrap();
6594 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6595 assert_eq!(
6596 *events.borrow(),
6597 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6598 );
6599
6600 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6601 let events = Rc::new(RefCell::new(Vec::new()));
6602 let buffer3 = project
6603 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6604 .await
6605 .unwrap();
6606 buffer3.update(cx, |_, cx| {
6607 cx.subscribe(&buffer3, {
6608 let events = events.clone();
6609 move |_, _, event, _| events.borrow_mut().push(event.clone())
6610 })
6611 .detach();
6612 });
6613
6614 worktree.flush_fs_events(&cx).await;
6615 buffer3.update(cx, |buffer, cx| {
6616 buffer.edit(Some(0..0), "x", cx);
6617 });
6618 events.borrow_mut().clear();
6619 fs::remove_file(dir.path().join("file3")).unwrap();
6620 buffer3
6621 .condition(&cx, |_, _| !events.borrow().is_empty())
6622 .await;
6623 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6624 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6625 }
6626
6627 #[gpui::test]
6628 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6629 use std::fs;
6630
6631 let initial_contents = "aaa\nbbbbb\nc\n";
6632 let dir = temp_tree(json!({ "the-file": initial_contents }));
6633
6634 let project = Project::test(Arc::new(RealFs), cx);
6635 let (worktree, _) = project
6636 .update(cx, |p, cx| {
6637 p.find_or_create_local_worktree(dir.path(), true, cx)
6638 })
6639 .await
6640 .unwrap();
6641 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6642
6643 worktree
6644 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6645 .await;
6646
6647 let abs_path = dir.path().join("the-file");
6648 let buffer = project
6649 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6650 .await
6651 .unwrap();
6652
6653 // TODO
6654 // Add a cursor on each row.
6655 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6656 // assert!(!buffer.is_dirty());
6657 // buffer.add_selection_set(
6658 // &(0..3)
6659 // .map(|row| Selection {
6660 // id: row as usize,
6661 // start: Point::new(row, 1),
6662 // end: Point::new(row, 1),
6663 // reversed: false,
6664 // goal: SelectionGoal::None,
6665 // })
6666 // .collect::<Vec<_>>(),
6667 // cx,
6668 // )
6669 // });
6670
6671 // Change the file on disk, adding two new lines of text, and removing
6672 // one line.
6673 buffer.read_with(cx, |buffer, _| {
6674 assert!(!buffer.is_dirty());
6675 assert!(!buffer.has_conflict());
6676 });
6677 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6678 fs::write(&abs_path, new_contents).unwrap();
6679
6680 // Because the buffer was not modified, it is reloaded from disk. Its
6681 // contents are edited according to the diff between the old and new
6682 // file contents.
6683 buffer
6684 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6685 .await;
6686
6687 buffer.update(cx, |buffer, _| {
6688 assert_eq!(buffer.text(), new_contents);
6689 assert!(!buffer.is_dirty());
6690 assert!(!buffer.has_conflict());
6691
6692 // TODO
6693 // let cursor_positions = buffer
6694 // .selection_set(selection_set_id)
6695 // .unwrap()
6696 // .selections::<Point>(&*buffer)
6697 // .map(|selection| {
6698 // assert_eq!(selection.start, selection.end);
6699 // selection.start
6700 // })
6701 // .collect::<Vec<_>>();
6702 // assert_eq!(
6703 // cursor_positions,
6704 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6705 // );
6706 });
6707
6708 // Modify the buffer
6709 buffer.update(cx, |buffer, cx| {
6710 buffer.edit(vec![0..0], " ", cx);
6711 assert!(buffer.is_dirty());
6712 assert!(!buffer.has_conflict());
6713 });
6714
6715 // Change the file on disk again, adding blank lines to the beginning.
6716 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6717
6718 // Because the buffer is modified, it doesn't reload from disk, but is
6719 // marked as having a conflict.
6720 buffer
6721 .condition(&cx, |buffer, _| buffer.has_conflict())
6722 .await;
6723 }
6724
6725 #[gpui::test]
6726 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6727 cx.foreground().forbid_parking();
6728
6729 let fs = FakeFs::new(cx.background());
6730 fs.insert_tree(
6731 "/the-dir",
6732 json!({
6733 "a.rs": "
6734 fn foo(mut v: Vec<usize>) {
6735 for x in &v {
6736 v.push(1);
6737 }
6738 }
6739 "
6740 .unindent(),
6741 }),
6742 )
6743 .await;
6744
6745 let project = Project::test(fs.clone(), cx);
6746 let (worktree, _) = project
6747 .update(cx, |p, cx| {
6748 p.find_or_create_local_worktree("/the-dir", true, cx)
6749 })
6750 .await
6751 .unwrap();
6752 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6753
6754 let buffer = project
6755 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6756 .await
6757 .unwrap();
6758
6759 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6760 let message = lsp::PublishDiagnosticsParams {
6761 uri: buffer_uri.clone(),
6762 diagnostics: vec![
6763 lsp::Diagnostic {
6764 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6765 severity: Some(DiagnosticSeverity::WARNING),
6766 message: "error 1".to_string(),
6767 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6768 location: lsp::Location {
6769 uri: buffer_uri.clone(),
6770 range: lsp::Range::new(
6771 lsp::Position::new(1, 8),
6772 lsp::Position::new(1, 9),
6773 ),
6774 },
6775 message: "error 1 hint 1".to_string(),
6776 }]),
6777 ..Default::default()
6778 },
6779 lsp::Diagnostic {
6780 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6781 severity: Some(DiagnosticSeverity::HINT),
6782 message: "error 1 hint 1".to_string(),
6783 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6784 location: lsp::Location {
6785 uri: buffer_uri.clone(),
6786 range: lsp::Range::new(
6787 lsp::Position::new(1, 8),
6788 lsp::Position::new(1, 9),
6789 ),
6790 },
6791 message: "original diagnostic".to_string(),
6792 }]),
6793 ..Default::default()
6794 },
6795 lsp::Diagnostic {
6796 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6797 severity: Some(DiagnosticSeverity::ERROR),
6798 message: "error 2".to_string(),
6799 related_information: Some(vec![
6800 lsp::DiagnosticRelatedInformation {
6801 location: lsp::Location {
6802 uri: buffer_uri.clone(),
6803 range: lsp::Range::new(
6804 lsp::Position::new(1, 13),
6805 lsp::Position::new(1, 15),
6806 ),
6807 },
6808 message: "error 2 hint 1".to_string(),
6809 },
6810 lsp::DiagnosticRelatedInformation {
6811 location: lsp::Location {
6812 uri: buffer_uri.clone(),
6813 range: lsp::Range::new(
6814 lsp::Position::new(1, 13),
6815 lsp::Position::new(1, 15),
6816 ),
6817 },
6818 message: "error 2 hint 2".to_string(),
6819 },
6820 ]),
6821 ..Default::default()
6822 },
6823 lsp::Diagnostic {
6824 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6825 severity: Some(DiagnosticSeverity::HINT),
6826 message: "error 2 hint 1".to_string(),
6827 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6828 location: lsp::Location {
6829 uri: buffer_uri.clone(),
6830 range: lsp::Range::new(
6831 lsp::Position::new(2, 8),
6832 lsp::Position::new(2, 17),
6833 ),
6834 },
6835 message: "original diagnostic".to_string(),
6836 }]),
6837 ..Default::default()
6838 },
6839 lsp::Diagnostic {
6840 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6841 severity: Some(DiagnosticSeverity::HINT),
6842 message: "error 2 hint 2".to_string(),
6843 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6844 location: lsp::Location {
6845 uri: buffer_uri.clone(),
6846 range: lsp::Range::new(
6847 lsp::Position::new(2, 8),
6848 lsp::Position::new(2, 17),
6849 ),
6850 },
6851 message: "original diagnostic".to_string(),
6852 }]),
6853 ..Default::default()
6854 },
6855 ],
6856 version: None,
6857 };
6858
6859 project
6860 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
6861 .unwrap();
6862 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6863
6864 assert_eq!(
6865 buffer
6866 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6867 .collect::<Vec<_>>(),
6868 &[
6869 DiagnosticEntry {
6870 range: Point::new(1, 8)..Point::new(1, 9),
6871 diagnostic: Diagnostic {
6872 severity: DiagnosticSeverity::WARNING,
6873 message: "error 1".to_string(),
6874 group_id: 0,
6875 is_primary: true,
6876 ..Default::default()
6877 }
6878 },
6879 DiagnosticEntry {
6880 range: Point::new(1, 8)..Point::new(1, 9),
6881 diagnostic: Diagnostic {
6882 severity: DiagnosticSeverity::HINT,
6883 message: "error 1 hint 1".to_string(),
6884 group_id: 0,
6885 is_primary: false,
6886 ..Default::default()
6887 }
6888 },
6889 DiagnosticEntry {
6890 range: Point::new(1, 13)..Point::new(1, 15),
6891 diagnostic: Diagnostic {
6892 severity: DiagnosticSeverity::HINT,
6893 message: "error 2 hint 1".to_string(),
6894 group_id: 1,
6895 is_primary: false,
6896 ..Default::default()
6897 }
6898 },
6899 DiagnosticEntry {
6900 range: Point::new(1, 13)..Point::new(1, 15),
6901 diagnostic: Diagnostic {
6902 severity: DiagnosticSeverity::HINT,
6903 message: "error 2 hint 2".to_string(),
6904 group_id: 1,
6905 is_primary: false,
6906 ..Default::default()
6907 }
6908 },
6909 DiagnosticEntry {
6910 range: Point::new(2, 8)..Point::new(2, 17),
6911 diagnostic: Diagnostic {
6912 severity: DiagnosticSeverity::ERROR,
6913 message: "error 2".to_string(),
6914 group_id: 1,
6915 is_primary: true,
6916 ..Default::default()
6917 }
6918 }
6919 ]
6920 );
6921
6922 assert_eq!(
6923 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6924 &[
6925 DiagnosticEntry {
6926 range: Point::new(1, 8)..Point::new(1, 9),
6927 diagnostic: Diagnostic {
6928 severity: DiagnosticSeverity::WARNING,
6929 message: "error 1".to_string(),
6930 group_id: 0,
6931 is_primary: true,
6932 ..Default::default()
6933 }
6934 },
6935 DiagnosticEntry {
6936 range: Point::new(1, 8)..Point::new(1, 9),
6937 diagnostic: Diagnostic {
6938 severity: DiagnosticSeverity::HINT,
6939 message: "error 1 hint 1".to_string(),
6940 group_id: 0,
6941 is_primary: false,
6942 ..Default::default()
6943 }
6944 },
6945 ]
6946 );
6947 assert_eq!(
6948 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6949 &[
6950 DiagnosticEntry {
6951 range: Point::new(1, 13)..Point::new(1, 15),
6952 diagnostic: Diagnostic {
6953 severity: DiagnosticSeverity::HINT,
6954 message: "error 2 hint 1".to_string(),
6955 group_id: 1,
6956 is_primary: false,
6957 ..Default::default()
6958 }
6959 },
6960 DiagnosticEntry {
6961 range: Point::new(1, 13)..Point::new(1, 15),
6962 diagnostic: Diagnostic {
6963 severity: DiagnosticSeverity::HINT,
6964 message: "error 2 hint 2".to_string(),
6965 group_id: 1,
6966 is_primary: false,
6967 ..Default::default()
6968 }
6969 },
6970 DiagnosticEntry {
6971 range: Point::new(2, 8)..Point::new(2, 17),
6972 diagnostic: Diagnostic {
6973 severity: DiagnosticSeverity::ERROR,
6974 message: "error 2".to_string(),
6975 group_id: 1,
6976 is_primary: true,
6977 ..Default::default()
6978 }
6979 }
6980 ]
6981 );
6982 }
6983
6984 #[gpui::test]
6985 async fn test_rename(cx: &mut gpui::TestAppContext) {
6986 cx.foreground().forbid_parking();
6987
6988 let mut language = Language::new(
6989 LanguageConfig {
6990 name: "Rust".into(),
6991 path_suffixes: vec!["rs".to_string()],
6992 ..Default::default()
6993 },
6994 Some(tree_sitter_rust::language()),
6995 );
6996 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6997
6998 let fs = FakeFs::new(cx.background());
6999 fs.insert_tree(
7000 "/dir",
7001 json!({
7002 "one.rs": "const ONE: usize = 1;",
7003 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7004 }),
7005 )
7006 .await;
7007
7008 let project = Project::test(fs.clone(), cx);
7009 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7010
7011 let (tree, _) = project
7012 .update(cx, |project, cx| {
7013 project.find_or_create_local_worktree("/dir", true, cx)
7014 })
7015 .await
7016 .unwrap();
7017 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7018 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7019 .await;
7020
7021 let buffer = project
7022 .update(cx, |project, cx| {
7023 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7024 })
7025 .await
7026 .unwrap();
7027
7028 let fake_server = fake_servers.next().await.unwrap();
7029
7030 let response = project.update(cx, |project, cx| {
7031 project.prepare_rename(buffer.clone(), 7, cx)
7032 });
7033 fake_server
7034 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7035 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7036 assert_eq!(params.position, lsp::Position::new(0, 7));
7037 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7038 lsp::Position::new(0, 6),
7039 lsp::Position::new(0, 9),
7040 ))))
7041 })
7042 .next()
7043 .await
7044 .unwrap();
7045 let range = response.await.unwrap().unwrap();
7046 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7047 assert_eq!(range, 6..9);
7048
7049 let response = project.update(cx, |project, cx| {
7050 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7051 });
7052 fake_server
7053 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7054 assert_eq!(
7055 params.text_document_position.text_document.uri.as_str(),
7056 "file:///dir/one.rs"
7057 );
7058 assert_eq!(
7059 params.text_document_position.position,
7060 lsp::Position::new(0, 7)
7061 );
7062 assert_eq!(params.new_name, "THREE");
7063 Ok(Some(lsp::WorkspaceEdit {
7064 changes: Some(
7065 [
7066 (
7067 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7068 vec![lsp::TextEdit::new(
7069 lsp::Range::new(
7070 lsp::Position::new(0, 6),
7071 lsp::Position::new(0, 9),
7072 ),
7073 "THREE".to_string(),
7074 )],
7075 ),
7076 (
7077 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7078 vec![
7079 lsp::TextEdit::new(
7080 lsp::Range::new(
7081 lsp::Position::new(0, 24),
7082 lsp::Position::new(0, 27),
7083 ),
7084 "THREE".to_string(),
7085 ),
7086 lsp::TextEdit::new(
7087 lsp::Range::new(
7088 lsp::Position::new(0, 35),
7089 lsp::Position::new(0, 38),
7090 ),
7091 "THREE".to_string(),
7092 ),
7093 ],
7094 ),
7095 ]
7096 .into_iter()
7097 .collect(),
7098 ),
7099 ..Default::default()
7100 }))
7101 })
7102 .next()
7103 .await
7104 .unwrap();
7105 let mut transaction = response.await.unwrap().0;
7106 assert_eq!(transaction.len(), 2);
7107 assert_eq!(
7108 transaction
7109 .remove_entry(&buffer)
7110 .unwrap()
7111 .0
7112 .read_with(cx, |buffer, _| buffer.text()),
7113 "const THREE: usize = 1;"
7114 );
7115 assert_eq!(
7116 transaction
7117 .into_keys()
7118 .next()
7119 .unwrap()
7120 .read_with(cx, |buffer, _| buffer.text()),
7121 "const TWO: usize = one::THREE + one::THREE;"
7122 );
7123 }
7124
7125 #[gpui::test]
7126 async fn test_search(cx: &mut gpui::TestAppContext) {
7127 let fs = FakeFs::new(cx.background());
7128 fs.insert_tree(
7129 "/dir",
7130 json!({
7131 "one.rs": "const ONE: usize = 1;",
7132 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7133 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7134 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7135 }),
7136 )
7137 .await;
7138 let project = Project::test(fs.clone(), cx);
7139 let (tree, _) = project
7140 .update(cx, |project, cx| {
7141 project.find_or_create_local_worktree("/dir", true, cx)
7142 })
7143 .await
7144 .unwrap();
7145 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7146 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7147 .await;
7148
7149 assert_eq!(
7150 search(&project, SearchQuery::text("TWO", false, true), cx)
7151 .await
7152 .unwrap(),
7153 HashMap::from_iter([
7154 ("two.rs".to_string(), vec![6..9]),
7155 ("three.rs".to_string(), vec![37..40])
7156 ])
7157 );
7158
7159 let buffer_4 = project
7160 .update(cx, |project, cx| {
7161 project.open_buffer((worktree_id, "four.rs"), cx)
7162 })
7163 .await
7164 .unwrap();
7165 buffer_4.update(cx, |buffer, cx| {
7166 buffer.edit([20..28, 31..43], "two::TWO", cx);
7167 });
7168
7169 assert_eq!(
7170 search(&project, SearchQuery::text("TWO", false, true), cx)
7171 .await
7172 .unwrap(),
7173 HashMap::from_iter([
7174 ("two.rs".to_string(), vec![6..9]),
7175 ("three.rs".to_string(), vec![37..40]),
7176 ("four.rs".to_string(), vec![25..28, 36..39])
7177 ])
7178 );
7179
7180 async fn search(
7181 project: &ModelHandle<Project>,
7182 query: SearchQuery,
7183 cx: &mut gpui::TestAppContext,
7184 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7185 let results = project
7186 .update(cx, |project, cx| project.search(query, cx))
7187 .await?;
7188
7189 Ok(results
7190 .into_iter()
7191 .map(|(buffer, ranges)| {
7192 buffer.read_with(cx, |buffer, _| {
7193 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7194 let ranges = ranges
7195 .into_iter()
7196 .map(|range| range.to_offset(buffer))
7197 .collect::<Vec<_>>();
7198 (path, ranges)
7199 })
7200 })
7201 .collect())
7202 }
7203 }
7204}