1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 hash::Hash,
40 mem,
41 ops::Range,
42 path::{Component, Path, PathBuf},
43 rc::Rc,
44 sync::{
45 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
46 Arc,
47 },
48 time::Instant,
49};
50use util::{post_inc, ResultExt, TryFutureExt as _};
51
52pub use fs::*;
53pub use worktree::*;
54
55pub trait Item: Entity {
56 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
57}
58
59pub struct Project {
60 worktrees: Vec<WorktreeHandle>,
61 active_entry: Option<ProjectEntryId>,
62 languages: Arc<LanguageRegistry>,
63 language_servers:
64 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
65 started_language_servers:
66 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
67 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
68 language_server_settings: Arc<Mutex<serde_json::Value>>,
69 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
70 next_language_server_id: usize,
71 client: Arc<client::Client>,
72 next_entry_id: Arc<AtomicUsize>,
73 user_store: ModelHandle<UserStore>,
74 fs: Arc<dyn Fs>,
75 client_state: ProjectClientState,
76 collaborators: HashMap<PeerId, Collaborator>,
77 subscriptions: Vec<client::Subscription>,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136#[derive(Serialize)]
137pub struct LanguageServerStatus {
138 pub name: String,
139 pub pending_work: BTreeMap<String, LanguageServerProgress>,
140 pub pending_diagnostic_updates: isize,
141}
142
143#[derive(Clone, Debug, Serialize)]
144pub struct LanguageServerProgress {
145 pub message: Option<String>,
146 pub percentage: Option<usize>,
147 #[serde(skip_serializing)]
148 pub last_update_at: Instant,
149}
150
151#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
152pub struct ProjectPath {
153 pub worktree_id: WorktreeId,
154 pub path: Arc<Path>,
155}
156
157#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
158pub struct DiagnosticSummary {
159 pub error_count: usize,
160 pub warning_count: usize,
161}
162
163#[derive(Debug)]
164pub struct Location {
165 pub buffer: ModelHandle<Buffer>,
166 pub range: Range<language::Anchor>,
167}
168
169#[derive(Debug)]
170pub struct DocumentHighlight {
171 pub range: Range<language::Anchor>,
172 pub kind: DocumentHighlightKind,
173}
174
175#[derive(Clone, Debug)]
176pub struct Symbol {
177 pub source_worktree_id: WorktreeId,
178 pub worktree_id: WorktreeId,
179 pub language_server_name: LanguageServerName,
180 pub path: PathBuf,
181 pub label: CodeLabel,
182 pub name: String,
183 pub kind: lsp::SymbolKind,
184 pub range: Range<PointUtf16>,
185 pub signature: [u8; 32],
186}
187
188#[derive(Default)]
189pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
190
191impl DiagnosticSummary {
192 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
193 let mut this = Self {
194 error_count: 0,
195 warning_count: 0,
196 };
197
198 for entry in diagnostics {
199 if entry.diagnostic.is_primary {
200 match entry.diagnostic.severity {
201 DiagnosticSeverity::ERROR => this.error_count += 1,
202 DiagnosticSeverity::WARNING => this.warning_count += 1,
203 _ => {}
204 }
205 }
206 }
207
208 this
209 }
210
211 pub fn is_empty(&self) -> bool {
212 self.error_count == 0 && self.warning_count == 0
213 }
214
215 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
216 proto::DiagnosticSummary {
217 path: path.to_string_lossy().to_string(),
218 error_count: self.error_count as u32,
219 warning_count: self.warning_count as u32,
220 }
221 }
222}
223
224#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
225pub struct ProjectEntryId(usize);
226
227impl ProjectEntryId {
228 pub fn new(counter: &AtomicUsize) -> Self {
229 Self(counter.fetch_add(1, SeqCst))
230 }
231
232 pub fn from_proto(id: u64) -> Self {
233 Self(id as usize)
234 }
235
236 pub fn to_proto(&self) -> u64 {
237 self.0 as u64
238 }
239
240 pub fn to_usize(&self) -> usize {
241 self.0
242 }
243}
244
245impl Project {
246 pub fn init(client: &Arc<Client>) {
247 client.add_model_message_handler(Self::handle_add_collaborator);
248 client.add_model_message_handler(Self::handle_buffer_reloaded);
249 client.add_model_message_handler(Self::handle_buffer_saved);
250 client.add_model_message_handler(Self::handle_start_language_server);
251 client.add_model_message_handler(Self::handle_update_language_server);
252 client.add_model_message_handler(Self::handle_remove_collaborator);
253 client.add_model_message_handler(Self::handle_register_worktree);
254 client.add_model_message_handler(Self::handle_unregister_worktree);
255 client.add_model_message_handler(Self::handle_unshare_project);
256 client.add_model_message_handler(Self::handle_update_buffer_file);
257 client.add_model_message_handler(Self::handle_update_buffer);
258 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
259 client.add_model_message_handler(Self::handle_update_worktree);
260 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
261 client.add_model_request_handler(Self::handle_apply_code_action);
262 client.add_model_request_handler(Self::handle_reload_buffers);
263 client.add_model_request_handler(Self::handle_format_buffers);
264 client.add_model_request_handler(Self::handle_get_code_actions);
265 client.add_model_request_handler(Self::handle_get_completions);
266 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
269 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
271 client.add_model_request_handler(Self::handle_search_project);
272 client.add_model_request_handler(Self::handle_get_project_symbols);
273 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
274 client.add_model_request_handler(Self::handle_open_buffer_by_id);
275 client.add_model_request_handler(Self::handle_open_buffer_by_path);
276 client.add_model_request_handler(Self::handle_save_buffer);
277 }
278
279 pub fn local(
280 client: Arc<Client>,
281 user_store: ModelHandle<UserStore>,
282 languages: Arc<LanguageRegistry>,
283 fs: Arc<dyn Fs>,
284 cx: &mut MutableAppContext,
285 ) -> ModelHandle<Self> {
286 cx.add_model(|cx: &mut ModelContext<Self>| {
287 let (remote_id_tx, remote_id_rx) = watch::channel();
288 let _maintain_remote_id_task = cx.spawn_weak({
289 let rpc = client.clone();
290 move |this, mut cx| {
291 async move {
292 let mut status = rpc.status();
293 while let Some(status) = status.next().await {
294 if let Some(this) = this.upgrade(&cx) {
295 if status.is_connected() {
296 this.update(&mut cx, |this, cx| this.register(cx)).await?;
297 } else {
298 this.update(&mut cx, |this, cx| this.unregister(cx));
299 }
300 }
301 }
302 Ok(())
303 }
304 .log_err()
305 }
306 });
307
308 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
309 Self {
310 worktrees: Default::default(),
311 collaborators: Default::default(),
312 opened_buffers: Default::default(),
313 shared_buffers: Default::default(),
314 loading_buffers: Default::default(),
315 loading_local_worktrees: Default::default(),
316 buffer_snapshots: Default::default(),
317 client_state: ProjectClientState::Local {
318 is_shared: false,
319 remote_id_tx,
320 remote_id_rx,
321 _maintain_remote_id_task,
322 },
323 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
324 subscriptions: Vec::new(),
325 active_entry: None,
326 languages,
327 client,
328 user_store,
329 fs,
330 next_entry_id: Default::default(),
331 language_servers: Default::default(),
332 started_language_servers: Default::default(),
333 language_server_statuses: Default::default(),
334 last_workspace_edits_by_language_server: Default::default(),
335 language_server_settings: Default::default(),
336 next_language_server_id: 0,
337 nonce: StdRng::from_entropy().gen(),
338 }
339 })
340 }
341
342 pub async fn remote(
343 remote_id: u64,
344 client: Arc<Client>,
345 user_store: ModelHandle<UserStore>,
346 languages: Arc<LanguageRegistry>,
347 fs: Arc<dyn Fs>,
348 cx: &mut AsyncAppContext,
349 ) -> Result<ModelHandle<Self>> {
350 client.authenticate_and_connect(true, &cx).await?;
351
352 let response = client
353 .request(proto::JoinProject {
354 project_id: remote_id,
355 })
356 .await?;
357
358 let replica_id = response.replica_id as ReplicaId;
359
360 let mut worktrees = Vec::new();
361 for worktree in response.worktrees {
362 let (worktree, load_task) = cx
363 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
364 worktrees.push(worktree);
365 load_task.detach();
366 }
367
368 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
369 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
370 let mut this = Self {
371 worktrees: Vec::new(),
372 loading_buffers: Default::default(),
373 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
374 shared_buffers: Default::default(),
375 loading_local_worktrees: Default::default(),
376 active_entry: None,
377 collaborators: Default::default(),
378 languages,
379 user_store: user_store.clone(),
380 fs,
381 next_entry_id: Default::default(),
382 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
383 client: client.clone(),
384 client_state: ProjectClientState::Remote {
385 sharing_has_stopped: false,
386 remote_id,
387 replica_id,
388 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
389 async move {
390 let mut status = client.status();
391 let is_connected =
392 status.next().await.map_or(false, |s| s.is_connected());
393 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
394 if !is_connected || status.next().await.is_some() {
395 if let Some(this) = this.upgrade(&cx) {
396 this.update(&mut cx, |this, cx| this.project_unshared(cx))
397 }
398 }
399 Ok(())
400 }
401 .log_err()
402 }),
403 },
404 language_servers: Default::default(),
405 started_language_servers: Default::default(),
406 language_server_settings: Default::default(),
407 language_server_statuses: response
408 .language_servers
409 .into_iter()
410 .map(|server| {
411 (
412 server.id as usize,
413 LanguageServerStatus {
414 name: server.name,
415 pending_work: Default::default(),
416 pending_diagnostic_updates: 0,
417 },
418 )
419 })
420 .collect(),
421 last_workspace_edits_by_language_server: Default::default(),
422 next_language_server_id: 0,
423 opened_buffers: Default::default(),
424 buffer_snapshots: Default::default(),
425 nonce: StdRng::from_entropy().gen(),
426 };
427 for worktree in worktrees {
428 this.add_worktree(&worktree, cx);
429 }
430 this
431 });
432
433 let user_ids = response
434 .collaborators
435 .iter()
436 .map(|peer| peer.user_id)
437 .collect();
438 user_store
439 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
440 .await?;
441 let mut collaborators = HashMap::default();
442 for message in response.collaborators {
443 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
444 collaborators.insert(collaborator.peer_id, collaborator);
445 }
446
447 this.update(cx, |this, _| {
448 this.collaborators = collaborators;
449 });
450
451 Ok(this)
452 }
453
454 #[cfg(any(test, feature = "test-support"))]
455 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
456 let languages = Arc::new(LanguageRegistry::test());
457 let http_client = client::test::FakeHttpClient::with_404_response();
458 let client = client::Client::new(http_client.clone());
459 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
460 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
461 }
462
463 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
464 self.opened_buffers
465 .get(&remote_id)
466 .and_then(|buffer| buffer.upgrade(cx))
467 }
468
469 pub fn languages(&self) -> &Arc<LanguageRegistry> {
470 &self.languages
471 }
472
473 #[cfg(any(test, feature = "test-support"))]
474 pub fn check_invariants(&self, cx: &AppContext) {
475 if self.is_local() {
476 let mut worktree_root_paths = HashMap::default();
477 for worktree in self.worktrees(cx) {
478 let worktree = worktree.read(cx);
479 let abs_path = worktree.as_local().unwrap().abs_path().clone();
480 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
481 assert_eq!(
482 prev_worktree_id,
483 None,
484 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
485 abs_path,
486 worktree.id(),
487 prev_worktree_id
488 )
489 }
490 } else {
491 let replica_id = self.replica_id();
492 for buffer in self.opened_buffers.values() {
493 if let Some(buffer) = buffer.upgrade(cx) {
494 let buffer = buffer.read(cx);
495 assert_eq!(
496 buffer.deferred_ops_len(),
497 0,
498 "replica {}, buffer {} has deferred operations",
499 replica_id,
500 buffer.remote_id()
501 );
502 }
503 }
504 }
505 }
506
507 #[cfg(any(test, feature = "test-support"))]
508 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
509 let path = path.into();
510 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
511 self.opened_buffers.iter().any(|(_, buffer)| {
512 if let Some(buffer) = buffer.upgrade(cx) {
513 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
514 if file.worktree == worktree && file.path() == &path.path {
515 return true;
516 }
517 }
518 }
519 false
520 })
521 } else {
522 false
523 }
524 }
525
526 pub fn fs(&self) -> &Arc<dyn Fs> {
527 &self.fs
528 }
529
530 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
531 self.unshare(cx);
532 for worktree in &self.worktrees {
533 if let Some(worktree) = worktree.upgrade(cx) {
534 worktree.update(cx, |worktree, _| {
535 worktree.as_local_mut().unwrap().unregister();
536 });
537 }
538 }
539
540 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
541 *remote_id_tx.borrow_mut() = None;
542 }
543
544 self.subscriptions.clear();
545 }
546
547 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
548 self.unregister(cx);
549
550 let response = self.client.request(proto::RegisterProject {});
551 cx.spawn(|this, mut cx| async move {
552 let remote_id = response.await?.project_id;
553
554 let mut registrations = Vec::new();
555 this.update(&mut cx, |this, cx| {
556 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
557 *remote_id_tx.borrow_mut() = Some(remote_id);
558 }
559
560 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
561
562 this.subscriptions
563 .push(this.client.add_model_for_remote_entity(remote_id, cx));
564
565 for worktree in &this.worktrees {
566 if let Some(worktree) = worktree.upgrade(cx) {
567 registrations.push(worktree.update(cx, |worktree, cx| {
568 let worktree = worktree.as_local_mut().unwrap();
569 worktree.register(remote_id, cx)
570 }));
571 }
572 }
573 });
574
575 futures::future::try_join_all(registrations).await?;
576 Ok(())
577 })
578 }
579
580 pub fn remote_id(&self) -> Option<u64> {
581 match &self.client_state {
582 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
583 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
584 }
585 }
586
587 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
588 let mut id = None;
589 let mut watch = None;
590 match &self.client_state {
591 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
592 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
593 }
594
595 async move {
596 if let Some(id) = id {
597 return id;
598 }
599 let mut watch = watch.unwrap();
600 loop {
601 let id = *watch.borrow();
602 if let Some(id) = id {
603 return id;
604 }
605 watch.next().await;
606 }
607 }
608 }
609
610 pub fn replica_id(&self) -> ReplicaId {
611 match &self.client_state {
612 ProjectClientState::Local { .. } => 0,
613 ProjectClientState::Remote { replica_id, .. } => *replica_id,
614 }
615 }
616
617 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
618 &self.collaborators
619 }
620
621 pub fn worktrees<'a>(
622 &'a self,
623 cx: &'a AppContext,
624 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
625 self.worktrees
626 .iter()
627 .filter_map(move |worktree| worktree.upgrade(cx))
628 }
629
630 pub fn visible_worktrees<'a>(
631 &'a self,
632 cx: &'a AppContext,
633 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
634 self.worktrees.iter().filter_map(|worktree| {
635 worktree.upgrade(cx).and_then(|worktree| {
636 if worktree.read(cx).is_visible() {
637 Some(worktree)
638 } else {
639 None
640 }
641 })
642 })
643 }
644
645 pub fn worktree_for_id(
646 &self,
647 id: WorktreeId,
648 cx: &AppContext,
649 ) -> Option<ModelHandle<Worktree>> {
650 self.worktrees(cx)
651 .find(|worktree| worktree.read(cx).id() == id)
652 }
653
654 pub fn worktree_for_entry(
655 &self,
656 entry_id: ProjectEntryId,
657 cx: &AppContext,
658 ) -> Option<ModelHandle<Worktree>> {
659 self.worktrees(cx)
660 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
661 }
662
663 pub fn worktree_id_for_entry(
664 &self,
665 entry_id: ProjectEntryId,
666 cx: &AppContext,
667 ) -> Option<WorktreeId> {
668 self.worktree_for_entry(entry_id, cx)
669 .map(|worktree| worktree.read(cx).id())
670 }
671
672 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
673 let rpc = self.client.clone();
674 cx.spawn(|this, mut cx| async move {
675 let project_id = this.update(&mut cx, |this, cx| {
676 if let ProjectClientState::Local {
677 is_shared,
678 remote_id_rx,
679 ..
680 } = &mut this.client_state
681 {
682 *is_shared = true;
683
684 for open_buffer in this.opened_buffers.values_mut() {
685 match open_buffer {
686 OpenBuffer::Strong(_) => {}
687 OpenBuffer::Weak(buffer) => {
688 if let Some(buffer) = buffer.upgrade(cx) {
689 *open_buffer = OpenBuffer::Strong(buffer);
690 }
691 }
692 OpenBuffer::Loading(_) => unreachable!(),
693 }
694 }
695
696 for worktree_handle in this.worktrees.iter_mut() {
697 match worktree_handle {
698 WorktreeHandle::Strong(_) => {}
699 WorktreeHandle::Weak(worktree) => {
700 if let Some(worktree) = worktree.upgrade(cx) {
701 *worktree_handle = WorktreeHandle::Strong(worktree);
702 }
703 }
704 }
705 }
706
707 remote_id_rx
708 .borrow()
709 .ok_or_else(|| anyhow!("no project id"))
710 } else {
711 Err(anyhow!("can't share a remote project"))
712 }
713 })?;
714
715 rpc.request(proto::ShareProject { project_id }).await?;
716
717 let mut tasks = Vec::new();
718 this.update(&mut cx, |this, cx| {
719 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
720 worktree.update(cx, |worktree, cx| {
721 let worktree = worktree.as_local_mut().unwrap();
722 tasks.push(worktree.share(project_id, cx));
723 });
724 }
725 });
726 for task in tasks {
727 task.await?;
728 }
729 this.update(&mut cx, |_, cx| cx.notify());
730 Ok(())
731 })
732 }
733
734 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
735 let rpc = self.client.clone();
736
737 if let ProjectClientState::Local {
738 is_shared,
739 remote_id_rx,
740 ..
741 } = &mut self.client_state
742 {
743 if !*is_shared {
744 return;
745 }
746
747 *is_shared = false;
748 self.collaborators.clear();
749 self.shared_buffers.clear();
750 for worktree_handle in self.worktrees.iter_mut() {
751 if let WorktreeHandle::Strong(worktree) = worktree_handle {
752 let is_visible = worktree.update(cx, |worktree, _| {
753 worktree.as_local_mut().unwrap().unshare();
754 worktree.is_visible()
755 });
756 if !is_visible {
757 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
758 }
759 }
760 }
761
762 for open_buffer in self.opened_buffers.values_mut() {
763 match open_buffer {
764 OpenBuffer::Strong(buffer) => {
765 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
766 }
767 _ => {}
768 }
769 }
770
771 if let Some(project_id) = *remote_id_rx.borrow() {
772 rpc.send(proto::UnshareProject { project_id }).log_err();
773 }
774
775 cx.notify();
776 } else {
777 log::error!("attempted to unshare a remote project");
778 }
779 }
780
781 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
782 if let ProjectClientState::Remote {
783 sharing_has_stopped,
784 ..
785 } = &mut self.client_state
786 {
787 *sharing_has_stopped = true;
788 self.collaborators.clear();
789 cx.notify();
790 }
791 }
792
793 pub fn is_read_only(&self) -> bool {
794 match &self.client_state {
795 ProjectClientState::Local { .. } => false,
796 ProjectClientState::Remote {
797 sharing_has_stopped,
798 ..
799 } => *sharing_has_stopped,
800 }
801 }
802
803 pub fn is_local(&self) -> bool {
804 match &self.client_state {
805 ProjectClientState::Local { .. } => true,
806 ProjectClientState::Remote { .. } => false,
807 }
808 }
809
810 pub fn is_remote(&self) -> bool {
811 !self.is_local()
812 }
813
814 pub fn create_buffer(
815 &mut self,
816 text: &str,
817 language: Option<Arc<Language>>,
818 cx: &mut ModelContext<Self>,
819 ) -> Result<ModelHandle<Buffer>> {
820 if self.is_remote() {
821 return Err(anyhow!("creating buffers as a guest is not supported yet"));
822 }
823
824 let buffer = cx.add_model(|cx| {
825 Buffer::new(self.replica_id(), text, cx)
826 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
827 });
828 self.register_buffer(&buffer, cx)?;
829 Ok(buffer)
830 }
831
832 pub fn open_path(
833 &mut self,
834 path: impl Into<ProjectPath>,
835 cx: &mut ModelContext<Self>,
836 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
837 let task = self.open_buffer(path, cx);
838 cx.spawn_weak(|_, cx| async move {
839 let buffer = task.await?;
840 let project_entry_id = buffer
841 .read_with(&cx, |buffer, cx| {
842 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
843 })
844 .ok_or_else(|| anyhow!("no project entry"))?;
845 Ok((project_entry_id, buffer.into()))
846 })
847 }
848
849 pub fn open_buffer(
850 &mut self,
851 path: impl Into<ProjectPath>,
852 cx: &mut ModelContext<Self>,
853 ) -> Task<Result<ModelHandle<Buffer>>> {
854 let project_path = path.into();
855 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
856 worktree
857 } else {
858 return Task::ready(Err(anyhow!("no such worktree")));
859 };
860
861 // If there is already a buffer for the given path, then return it.
862 let existing_buffer = self.get_open_buffer(&project_path, cx);
863 if let Some(existing_buffer) = existing_buffer {
864 return Task::ready(Ok(existing_buffer));
865 }
866
867 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
868 // If the given path is already being loaded, then wait for that existing
869 // task to complete and return the same buffer.
870 hash_map::Entry::Occupied(e) => e.get().clone(),
871
872 // Otherwise, record the fact that this path is now being loaded.
873 hash_map::Entry::Vacant(entry) => {
874 let (mut tx, rx) = postage::watch::channel();
875 entry.insert(rx.clone());
876
877 let load_buffer = if worktree.read(cx).is_local() {
878 self.open_local_buffer(&project_path.path, &worktree, cx)
879 } else {
880 self.open_remote_buffer(&project_path.path, &worktree, cx)
881 };
882
883 cx.spawn(move |this, mut cx| async move {
884 let load_result = load_buffer.await;
885 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
886 // Record the fact that the buffer is no longer loading.
887 this.loading_buffers.remove(&project_path);
888 let buffer = load_result.map_err(Arc::new)?;
889 Ok(buffer)
890 }));
891 })
892 .detach();
893 rx
894 }
895 };
896
897 cx.foreground().spawn(async move {
898 loop {
899 if let Some(result) = loading_watch.borrow().as_ref() {
900 match result {
901 Ok(buffer) => return Ok(buffer.clone()),
902 Err(error) => return Err(anyhow!("{}", error)),
903 }
904 }
905 loading_watch.next().await;
906 }
907 })
908 }
909
910 fn open_local_buffer(
911 &mut self,
912 path: &Arc<Path>,
913 worktree: &ModelHandle<Worktree>,
914 cx: &mut ModelContext<Self>,
915 ) -> Task<Result<ModelHandle<Buffer>>> {
916 let load_buffer = worktree.update(cx, |worktree, cx| {
917 let worktree = worktree.as_local_mut().unwrap();
918 worktree.load_buffer(path, cx)
919 });
920 cx.spawn(|this, mut cx| async move {
921 let buffer = load_buffer.await?;
922 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
923 Ok(buffer)
924 })
925 }
926
927 fn open_remote_buffer(
928 &mut self,
929 path: &Arc<Path>,
930 worktree: &ModelHandle<Worktree>,
931 cx: &mut ModelContext<Self>,
932 ) -> Task<Result<ModelHandle<Buffer>>> {
933 let rpc = self.client.clone();
934 let project_id = self.remote_id().unwrap();
935 let remote_worktree_id = worktree.read(cx).id();
936 let path = path.clone();
937 let path_string = path.to_string_lossy().to_string();
938 cx.spawn(|this, mut cx| async move {
939 let response = rpc
940 .request(proto::OpenBufferByPath {
941 project_id,
942 worktree_id: remote_worktree_id.to_proto(),
943 path: path_string,
944 })
945 .await?;
946 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
947 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
948 .await
949 })
950 }
951
952 fn open_local_buffer_via_lsp(
953 &mut self,
954 abs_path: lsp::Url,
955 lsp_adapter: Arc<dyn LspAdapter>,
956 lsp_server: Arc<LanguageServer>,
957 cx: &mut ModelContext<Self>,
958 ) -> Task<Result<ModelHandle<Buffer>>> {
959 cx.spawn(|this, mut cx| async move {
960 let abs_path = abs_path
961 .to_file_path()
962 .map_err(|_| anyhow!("can't convert URI to path"))?;
963 let (worktree, relative_path) = if let Some(result) =
964 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
965 {
966 result
967 } else {
968 let worktree = this
969 .update(&mut cx, |this, cx| {
970 this.create_local_worktree(&abs_path, false, cx)
971 })
972 .await?;
973 this.update(&mut cx, |this, cx| {
974 this.language_servers.insert(
975 (worktree.read(cx).id(), lsp_adapter.name()),
976 (lsp_adapter, lsp_server),
977 );
978 });
979 (worktree, PathBuf::new())
980 };
981
982 let project_path = ProjectPath {
983 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
984 path: relative_path.into(),
985 };
986 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
987 .await
988 })
989 }
990
991 pub fn open_buffer_by_id(
992 &mut self,
993 id: u64,
994 cx: &mut ModelContext<Self>,
995 ) -> Task<Result<ModelHandle<Buffer>>> {
996 if let Some(buffer) = self.buffer_for_id(id, cx) {
997 Task::ready(Ok(buffer))
998 } else if self.is_local() {
999 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1000 } else if let Some(project_id) = self.remote_id() {
1001 let request = self
1002 .client
1003 .request(proto::OpenBufferById { project_id, id });
1004 cx.spawn(|this, mut cx| async move {
1005 let buffer = request
1006 .await?
1007 .buffer
1008 .ok_or_else(|| anyhow!("invalid buffer"))?;
1009 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1010 .await
1011 })
1012 } else {
1013 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1014 }
1015 }
1016
1017 pub fn save_buffer_as(
1018 &mut self,
1019 buffer: ModelHandle<Buffer>,
1020 abs_path: PathBuf,
1021 cx: &mut ModelContext<Project>,
1022 ) -> Task<Result<()>> {
1023 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1024 let old_path =
1025 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1026 cx.spawn(|this, mut cx| async move {
1027 if let Some(old_path) = old_path {
1028 this.update(&mut cx, |this, cx| {
1029 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1030 });
1031 }
1032 let (worktree, path) = worktree_task.await?;
1033 worktree
1034 .update(&mut cx, |worktree, cx| {
1035 worktree
1036 .as_local_mut()
1037 .unwrap()
1038 .save_buffer_as(buffer.clone(), path, cx)
1039 })
1040 .await?;
1041 this.update(&mut cx, |this, cx| {
1042 this.assign_language_to_buffer(&buffer, cx);
1043 this.register_buffer_with_language_server(&buffer, cx);
1044 });
1045 Ok(())
1046 })
1047 }
1048
1049 pub fn get_open_buffer(
1050 &mut self,
1051 path: &ProjectPath,
1052 cx: &mut ModelContext<Self>,
1053 ) -> Option<ModelHandle<Buffer>> {
1054 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1055 self.opened_buffers.values().find_map(|buffer| {
1056 let buffer = buffer.upgrade(cx)?;
1057 let file = File::from_dyn(buffer.read(cx).file())?;
1058 if file.worktree == worktree && file.path() == &path.path {
1059 Some(buffer)
1060 } else {
1061 None
1062 }
1063 })
1064 }
1065
1066 fn register_buffer(
1067 &mut self,
1068 buffer: &ModelHandle<Buffer>,
1069 cx: &mut ModelContext<Self>,
1070 ) -> Result<()> {
1071 let remote_id = buffer.read(cx).remote_id();
1072 let open_buffer = if self.is_remote() || self.is_shared() {
1073 OpenBuffer::Strong(buffer.clone())
1074 } else {
1075 OpenBuffer::Weak(buffer.downgrade())
1076 };
1077
1078 match self.opened_buffers.insert(remote_id, open_buffer) {
1079 None => {}
1080 Some(OpenBuffer::Loading(operations)) => {
1081 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1082 }
1083 Some(OpenBuffer::Weak(existing_handle)) => {
1084 if existing_handle.upgrade(cx).is_some() {
1085 Err(anyhow!(
1086 "already registered buffer with remote id {}",
1087 remote_id
1088 ))?
1089 }
1090 }
1091 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1092 "already registered buffer with remote id {}",
1093 remote_id
1094 ))?,
1095 }
1096 cx.subscribe(buffer, |this, buffer, event, cx| {
1097 this.on_buffer_event(buffer, event, cx);
1098 })
1099 .detach();
1100
1101 self.assign_language_to_buffer(buffer, cx);
1102 self.register_buffer_with_language_server(buffer, cx);
1103 cx.observe_release(buffer, |this, buffer, cx| {
1104 if let Some(file) = File::from_dyn(buffer.file()) {
1105 if file.is_local() {
1106 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1107 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1108 server
1109 .notify::<lsp::notification::DidCloseTextDocument>(
1110 lsp::DidCloseTextDocumentParams {
1111 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1112 },
1113 )
1114 .log_err();
1115 }
1116 }
1117 }
1118 })
1119 .detach();
1120
1121 Ok(())
1122 }
1123
1124 fn register_buffer_with_language_server(
1125 &mut self,
1126 buffer_handle: &ModelHandle<Buffer>,
1127 cx: &mut ModelContext<Self>,
1128 ) {
1129 let buffer = buffer_handle.read(cx);
1130 let buffer_id = buffer.remote_id();
1131 if let Some(file) = File::from_dyn(buffer.file()) {
1132 if file.is_local() {
1133 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1134 let initial_snapshot = buffer.text_snapshot();
1135
1136 let mut language_server = None;
1137 let mut language_id = None;
1138 if let Some(language) = buffer.language() {
1139 let worktree_id = file.worktree_id(cx);
1140 if let Some(adapter) = language.lsp_adapter() {
1141 language_id = adapter.id_for_language(language.name().as_ref());
1142 language_server = self
1143 .language_servers
1144 .get(&(worktree_id, adapter.name()))
1145 .cloned();
1146 }
1147 }
1148
1149 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1150 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1151 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1152 .log_err();
1153 }
1154 }
1155
1156 if let Some((_, server)) = language_server {
1157 server
1158 .notify::<lsp::notification::DidOpenTextDocument>(
1159 lsp::DidOpenTextDocumentParams {
1160 text_document: lsp::TextDocumentItem::new(
1161 uri,
1162 language_id.unwrap_or_default(),
1163 0,
1164 initial_snapshot.text(),
1165 ),
1166 }
1167 .clone(),
1168 )
1169 .log_err();
1170 buffer_handle.update(cx, |buffer, cx| {
1171 buffer.set_completion_triggers(
1172 server
1173 .capabilities()
1174 .completion_provider
1175 .as_ref()
1176 .and_then(|provider| provider.trigger_characters.clone())
1177 .unwrap_or(Vec::new()),
1178 cx,
1179 )
1180 });
1181 self.buffer_snapshots
1182 .insert(buffer_id, vec![(0, initial_snapshot)]);
1183 }
1184 }
1185 }
1186 }
1187
1188 fn unregister_buffer_from_language_server(
1189 &mut self,
1190 buffer: &ModelHandle<Buffer>,
1191 old_path: PathBuf,
1192 cx: &mut ModelContext<Self>,
1193 ) {
1194 buffer.update(cx, |buffer, cx| {
1195 buffer.update_diagnostics(Default::default(), cx);
1196 self.buffer_snapshots.remove(&buffer.remote_id());
1197 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1198 language_server
1199 .notify::<lsp::notification::DidCloseTextDocument>(
1200 lsp::DidCloseTextDocumentParams {
1201 text_document: lsp::TextDocumentIdentifier::new(
1202 lsp::Url::from_file_path(old_path).unwrap(),
1203 ),
1204 },
1205 )
1206 .log_err();
1207 }
1208 });
1209 }
1210
1211 fn on_buffer_event(
1212 &mut self,
1213 buffer: ModelHandle<Buffer>,
1214 event: &BufferEvent,
1215 cx: &mut ModelContext<Self>,
1216 ) -> Option<()> {
1217 match event {
1218 BufferEvent::Operation(operation) => {
1219 let project_id = self.remote_id()?;
1220 let request = self.client.request(proto::UpdateBuffer {
1221 project_id,
1222 buffer_id: buffer.read(cx).remote_id(),
1223 operations: vec![language::proto::serialize_operation(&operation)],
1224 });
1225 cx.background().spawn(request).detach_and_log_err(cx);
1226 }
1227 BufferEvent::Edited { .. } => {
1228 let (_, language_server) = self
1229 .language_server_for_buffer(buffer.read(cx), cx)?
1230 .clone();
1231 let buffer = buffer.read(cx);
1232 let file = File::from_dyn(buffer.file())?;
1233 let abs_path = file.as_local()?.abs_path(cx);
1234 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1235 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1236 let (version, prev_snapshot) = buffer_snapshots.last()?;
1237 let next_snapshot = buffer.text_snapshot();
1238 let next_version = version + 1;
1239
1240 let content_changes = buffer
1241 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1242 .map(|edit| {
1243 let edit_start = edit.new.start.0;
1244 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1245 let new_text = next_snapshot
1246 .text_for_range(edit.new.start.1..edit.new.end.1)
1247 .collect();
1248 lsp::TextDocumentContentChangeEvent {
1249 range: Some(lsp::Range::new(
1250 point_to_lsp(edit_start),
1251 point_to_lsp(edit_end),
1252 )),
1253 range_length: None,
1254 text: new_text,
1255 }
1256 })
1257 .collect();
1258
1259 buffer_snapshots.push((next_version, next_snapshot));
1260
1261 language_server
1262 .notify::<lsp::notification::DidChangeTextDocument>(
1263 lsp::DidChangeTextDocumentParams {
1264 text_document: lsp::VersionedTextDocumentIdentifier::new(
1265 uri,
1266 next_version,
1267 ),
1268 content_changes,
1269 },
1270 )
1271 .log_err();
1272 }
1273 BufferEvent::Saved => {
1274 let file = File::from_dyn(buffer.read(cx).file())?;
1275 let worktree_id = file.worktree_id(cx);
1276 let abs_path = file.as_local()?.abs_path(cx);
1277 let text_document = lsp::TextDocumentIdentifier {
1278 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1279 };
1280
1281 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1282 server
1283 .notify::<lsp::notification::DidSaveTextDocument>(
1284 lsp::DidSaveTextDocumentParams {
1285 text_document: text_document.clone(),
1286 text: None,
1287 },
1288 )
1289 .log_err();
1290 }
1291 }
1292 _ => {}
1293 }
1294
1295 None
1296 }
1297
1298 fn language_servers_for_worktree(
1299 &self,
1300 worktree_id: WorktreeId,
1301 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1302 self.language_servers.iter().filter_map(
1303 move |((language_server_worktree_id, _), server)| {
1304 if *language_server_worktree_id == worktree_id {
1305 Some(server)
1306 } else {
1307 None
1308 }
1309 },
1310 )
1311 }
1312
1313 fn assign_language_to_buffer(
1314 &mut self,
1315 buffer: &ModelHandle<Buffer>,
1316 cx: &mut ModelContext<Self>,
1317 ) -> Option<()> {
1318 // If the buffer has a language, set it and start the language server if we haven't already.
1319 let full_path = buffer.read(cx).file()?.full_path(cx);
1320 let language = self.languages.select_language(&full_path)?;
1321 buffer.update(cx, |buffer, cx| {
1322 buffer.set_language(Some(language.clone()), cx);
1323 });
1324
1325 let file = File::from_dyn(buffer.read(cx).file())?;
1326 let worktree = file.worktree.read(cx).as_local()?;
1327 let worktree_id = worktree.id();
1328 let worktree_abs_path = worktree.abs_path().clone();
1329 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1330
1331 None
1332 }
1333
1334 fn start_language_server(
1335 &mut self,
1336 worktree_id: WorktreeId,
1337 worktree_path: Arc<Path>,
1338 language: Arc<Language>,
1339 cx: &mut ModelContext<Self>,
1340 ) {
1341 let adapter = if let Some(adapter) = language.lsp_adapter() {
1342 adapter
1343 } else {
1344 return;
1345 };
1346 let key = (worktree_id, adapter.name());
1347 self.started_language_servers
1348 .entry(key.clone())
1349 .or_insert_with(|| {
1350 let server_id = post_inc(&mut self.next_language_server_id);
1351 let language_server = self.languages.start_language_server(
1352 server_id,
1353 language.clone(),
1354 worktree_path,
1355 self.client.http_client(),
1356 cx,
1357 );
1358 cx.spawn_weak(|this, mut cx| async move {
1359 let language_server = language_server?.await.log_err()?;
1360 let language_server = language_server
1361 .initialize(adapter.initialization_options())
1362 .await
1363 .log_err()?;
1364 let this = this.upgrade(&cx)?;
1365 let disk_based_diagnostics_progress_token =
1366 adapter.disk_based_diagnostics_progress_token();
1367
1368 language_server
1369 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1370 let this = this.downgrade();
1371 let adapter = adapter.clone();
1372 move |params, mut cx| {
1373 if let Some(this) = this.upgrade(&cx) {
1374 this.update(&mut cx, |this, cx| {
1375 this.on_lsp_diagnostics_published(
1376 server_id,
1377 params,
1378 &adapter,
1379 disk_based_diagnostics_progress_token,
1380 cx,
1381 );
1382 });
1383 }
1384 }
1385 })
1386 .detach();
1387
1388 language_server
1389 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1390 let settings = this
1391 .read_with(&cx, |this, _| this.language_server_settings.clone());
1392 move |params, _| {
1393 let settings = settings.lock().clone();
1394 async move {
1395 Ok(params
1396 .items
1397 .into_iter()
1398 .map(|item| {
1399 if let Some(section) = &item.section {
1400 settings
1401 .get(section)
1402 .cloned()
1403 .unwrap_or(serde_json::Value::Null)
1404 } else {
1405 settings.clone()
1406 }
1407 })
1408 .collect())
1409 }
1410 }
1411 })
1412 .detach();
1413
1414 language_server
1415 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1416 let this = this.downgrade();
1417 let adapter = adapter.clone();
1418 let language_server = language_server.clone();
1419 move |params, cx| {
1420 Self::on_lsp_workspace_edit(
1421 this,
1422 params,
1423 server_id,
1424 adapter.clone(),
1425 language_server.clone(),
1426 cx,
1427 )
1428 }
1429 })
1430 .detach();
1431
1432 language_server
1433 .on_notification::<lsp::notification::Progress, _>({
1434 let this = this.downgrade();
1435 move |params, mut cx| {
1436 if let Some(this) = this.upgrade(&cx) {
1437 this.update(&mut cx, |this, cx| {
1438 this.on_lsp_progress(
1439 params,
1440 server_id,
1441 disk_based_diagnostics_progress_token,
1442 cx,
1443 );
1444 });
1445 }
1446 }
1447 })
1448 .detach();
1449
1450 this.update(&mut cx, |this, cx| {
1451 this.language_servers
1452 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1453 this.language_server_statuses.insert(
1454 server_id,
1455 LanguageServerStatus {
1456 name: language_server.name().to_string(),
1457 pending_work: Default::default(),
1458 pending_diagnostic_updates: 0,
1459 },
1460 );
1461 language_server
1462 .notify::<lsp::notification::DidChangeConfiguration>(
1463 lsp::DidChangeConfigurationParams {
1464 settings: this.language_server_settings.lock().clone(),
1465 },
1466 )
1467 .ok();
1468
1469 if let Some(project_id) = this.remote_id() {
1470 this.client
1471 .send(proto::StartLanguageServer {
1472 project_id,
1473 server: Some(proto::LanguageServer {
1474 id: server_id as u64,
1475 name: language_server.name().to_string(),
1476 }),
1477 })
1478 .log_err();
1479 }
1480
1481 // Tell the language server about every open buffer in the worktree that matches the language.
1482 for buffer in this.opened_buffers.values() {
1483 if let Some(buffer_handle) = buffer.upgrade(cx) {
1484 let buffer = buffer_handle.read(cx);
1485 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1486 file
1487 } else {
1488 continue;
1489 };
1490 let language = if let Some(language) = buffer.language() {
1491 language
1492 } else {
1493 continue;
1494 };
1495 if file.worktree.read(cx).id() != key.0
1496 || language.lsp_adapter().map(|a| a.name())
1497 != Some(key.1.clone())
1498 {
1499 continue;
1500 }
1501
1502 let file = file.as_local()?;
1503 let versions = this
1504 .buffer_snapshots
1505 .entry(buffer.remote_id())
1506 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1507 let (version, initial_snapshot) = versions.last().unwrap();
1508 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1509 let language_id = adapter.id_for_language(language.name().as_ref());
1510 language_server
1511 .notify::<lsp::notification::DidOpenTextDocument>(
1512 lsp::DidOpenTextDocumentParams {
1513 text_document: lsp::TextDocumentItem::new(
1514 uri,
1515 language_id.unwrap_or_default(),
1516 *version,
1517 initial_snapshot.text(),
1518 ),
1519 },
1520 )
1521 .log_err()?;
1522 buffer_handle.update(cx, |buffer, cx| {
1523 buffer.set_completion_triggers(
1524 language_server
1525 .capabilities()
1526 .completion_provider
1527 .as_ref()
1528 .and_then(|provider| {
1529 provider.trigger_characters.clone()
1530 })
1531 .unwrap_or(Vec::new()),
1532 cx,
1533 )
1534 });
1535 }
1536 }
1537
1538 cx.notify();
1539 Some(())
1540 });
1541
1542 Some(language_server)
1543 })
1544 });
1545 }
1546
1547 pub fn restart_language_servers_for_buffers(
1548 &mut self,
1549 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1550 cx: &mut ModelContext<Self>,
1551 ) -> Option<()> {
1552 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1553 .into_iter()
1554 .filter_map(|buffer| {
1555 let file = File::from_dyn(buffer.read(cx).file())?;
1556 let worktree = file.worktree.read(cx).as_local()?;
1557 let worktree_id = worktree.id();
1558 let worktree_abs_path = worktree.abs_path().clone();
1559 let full_path = file.full_path(cx);
1560 Some((worktree_id, worktree_abs_path, full_path))
1561 })
1562 .collect();
1563 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1564 let language = self.languages.select_language(&full_path)?;
1565 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1566 }
1567
1568 None
1569 }
1570
1571 fn restart_language_server(
1572 &mut self,
1573 worktree_id: WorktreeId,
1574 worktree_path: Arc<Path>,
1575 language: Arc<Language>,
1576 cx: &mut ModelContext<Self>,
1577 ) {
1578 let adapter = if let Some(adapter) = language.lsp_adapter() {
1579 adapter
1580 } else {
1581 return;
1582 };
1583 let key = (worktree_id, adapter.name());
1584 let server_to_shutdown = self.language_servers.remove(&key);
1585 self.started_language_servers.remove(&key);
1586 server_to_shutdown
1587 .as_ref()
1588 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1589 cx.spawn_weak(|this, mut cx| async move {
1590 if let Some(this) = this.upgrade(&cx) {
1591 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1592 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1593 shutdown_task.await;
1594 }
1595 }
1596
1597 this.update(&mut cx, |this, cx| {
1598 this.start_language_server(worktree_id, worktree_path, language, cx);
1599 });
1600 }
1601 })
1602 .detach();
1603 }
1604
1605 fn on_lsp_diagnostics_published(
1606 &mut self,
1607 server_id: usize,
1608 mut params: lsp::PublishDiagnosticsParams,
1609 adapter: &Arc<dyn LspAdapter>,
1610 disk_based_diagnostics_progress_token: Option<&str>,
1611 cx: &mut ModelContext<Self>,
1612 ) {
1613 adapter.process_diagnostics(&mut params);
1614 if disk_based_diagnostics_progress_token.is_none() {
1615 self.disk_based_diagnostics_started(cx);
1616 self.broadcast_language_server_update(
1617 server_id,
1618 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1619 proto::LspDiskBasedDiagnosticsUpdating {},
1620 ),
1621 );
1622 }
1623 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1624 .log_err();
1625 if disk_based_diagnostics_progress_token.is_none() {
1626 self.disk_based_diagnostics_finished(cx);
1627 self.broadcast_language_server_update(
1628 server_id,
1629 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1630 proto::LspDiskBasedDiagnosticsUpdated {},
1631 ),
1632 );
1633 }
1634 }
1635
1636 fn on_lsp_progress(
1637 &mut self,
1638 progress: lsp::ProgressParams,
1639 server_id: usize,
1640 disk_based_diagnostics_progress_token: Option<&str>,
1641 cx: &mut ModelContext<Self>,
1642 ) {
1643 let token = match progress.token {
1644 lsp::NumberOrString::String(token) => token,
1645 lsp::NumberOrString::Number(token) => {
1646 log::info!("skipping numeric progress token {}", token);
1647 return;
1648 }
1649 };
1650 let progress = match progress.value {
1651 lsp::ProgressParamsValue::WorkDone(value) => value,
1652 };
1653 let language_server_status =
1654 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1655 status
1656 } else {
1657 return;
1658 };
1659 match progress {
1660 lsp::WorkDoneProgress::Begin(_) => {
1661 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1662 language_server_status.pending_diagnostic_updates += 1;
1663 if language_server_status.pending_diagnostic_updates == 1 {
1664 self.disk_based_diagnostics_started(cx);
1665 self.broadcast_language_server_update(
1666 server_id,
1667 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1668 proto::LspDiskBasedDiagnosticsUpdating {},
1669 ),
1670 );
1671 }
1672 } else {
1673 self.on_lsp_work_start(server_id, token.clone(), cx);
1674 self.broadcast_language_server_update(
1675 server_id,
1676 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1677 token,
1678 }),
1679 );
1680 }
1681 }
1682 lsp::WorkDoneProgress::Report(report) => {
1683 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1684 self.on_lsp_work_progress(
1685 server_id,
1686 token.clone(),
1687 LanguageServerProgress {
1688 message: report.message.clone(),
1689 percentage: report.percentage.map(|p| p as usize),
1690 last_update_at: Instant::now(),
1691 },
1692 cx,
1693 );
1694 self.broadcast_language_server_update(
1695 server_id,
1696 proto::update_language_server::Variant::WorkProgress(
1697 proto::LspWorkProgress {
1698 token,
1699 message: report.message,
1700 percentage: report.percentage.map(|p| p as u32),
1701 },
1702 ),
1703 );
1704 }
1705 }
1706 lsp::WorkDoneProgress::End(_) => {
1707 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1708 language_server_status.pending_diagnostic_updates -= 1;
1709 if language_server_status.pending_diagnostic_updates == 0 {
1710 self.disk_based_diagnostics_finished(cx);
1711 self.broadcast_language_server_update(
1712 server_id,
1713 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1714 proto::LspDiskBasedDiagnosticsUpdated {},
1715 ),
1716 );
1717 }
1718 } else {
1719 self.on_lsp_work_end(server_id, token.clone(), cx);
1720 self.broadcast_language_server_update(
1721 server_id,
1722 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1723 token,
1724 }),
1725 );
1726 }
1727 }
1728 }
1729 }
1730
1731 fn on_lsp_work_start(
1732 &mut self,
1733 language_server_id: usize,
1734 token: String,
1735 cx: &mut ModelContext<Self>,
1736 ) {
1737 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1738 status.pending_work.insert(
1739 token,
1740 LanguageServerProgress {
1741 message: None,
1742 percentage: None,
1743 last_update_at: Instant::now(),
1744 },
1745 );
1746 cx.notify();
1747 }
1748 }
1749
1750 fn on_lsp_work_progress(
1751 &mut self,
1752 language_server_id: usize,
1753 token: String,
1754 progress: LanguageServerProgress,
1755 cx: &mut ModelContext<Self>,
1756 ) {
1757 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1758 status.pending_work.insert(token, progress);
1759 cx.notify();
1760 }
1761 }
1762
1763 fn on_lsp_work_end(
1764 &mut self,
1765 language_server_id: usize,
1766 token: String,
1767 cx: &mut ModelContext<Self>,
1768 ) {
1769 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1770 status.pending_work.remove(&token);
1771 cx.notify();
1772 }
1773 }
1774
1775 async fn on_lsp_workspace_edit(
1776 this: WeakModelHandle<Self>,
1777 params: lsp::ApplyWorkspaceEditParams,
1778 server_id: usize,
1779 adapter: Arc<dyn LspAdapter>,
1780 language_server: Arc<LanguageServer>,
1781 mut cx: AsyncAppContext,
1782 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1783 let this = this
1784 .upgrade(&cx)
1785 .ok_or_else(|| anyhow!("project project closed"))?;
1786 let transaction = Self::deserialize_workspace_edit(
1787 this.clone(),
1788 params.edit,
1789 true,
1790 adapter.clone(),
1791 language_server.clone(),
1792 &mut cx,
1793 )
1794 .await
1795 .log_err();
1796 this.update(&mut cx, |this, _| {
1797 if let Some(transaction) = transaction {
1798 this.last_workspace_edits_by_language_server
1799 .insert(server_id, transaction);
1800 }
1801 });
1802 Ok(lsp::ApplyWorkspaceEditResponse {
1803 applied: true,
1804 failed_change: None,
1805 failure_reason: None,
1806 })
1807 }
1808
1809 fn broadcast_language_server_update(
1810 &self,
1811 language_server_id: usize,
1812 event: proto::update_language_server::Variant,
1813 ) {
1814 if let Some(project_id) = self.remote_id() {
1815 self.client
1816 .send(proto::UpdateLanguageServer {
1817 project_id,
1818 language_server_id: language_server_id as u64,
1819 variant: Some(event),
1820 })
1821 .log_err();
1822 }
1823 }
1824
1825 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1826 for (_, server) in self.language_servers.values() {
1827 server
1828 .notify::<lsp::notification::DidChangeConfiguration>(
1829 lsp::DidChangeConfigurationParams {
1830 settings: settings.clone(),
1831 },
1832 )
1833 .ok();
1834 }
1835 *self.language_server_settings.lock() = settings;
1836 }
1837
1838 pub fn language_server_statuses(
1839 &self,
1840 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1841 self.language_server_statuses.values()
1842 }
1843
1844 pub fn update_diagnostics(
1845 &mut self,
1846 params: lsp::PublishDiagnosticsParams,
1847 disk_based_sources: &[&str],
1848 cx: &mut ModelContext<Self>,
1849 ) -> Result<()> {
1850 let abs_path = params
1851 .uri
1852 .to_file_path()
1853 .map_err(|_| anyhow!("URI is not a file"))?;
1854 let mut next_group_id = 0;
1855 let mut diagnostics = Vec::default();
1856 let mut primary_diagnostic_group_ids = HashMap::default();
1857 let mut sources_by_group_id = HashMap::default();
1858 let mut supporting_diagnostics = HashMap::default();
1859 for diagnostic in ¶ms.diagnostics {
1860 let source = diagnostic.source.as_ref();
1861 let code = diagnostic.code.as_ref().map(|code| match code {
1862 lsp::NumberOrString::Number(code) => code.to_string(),
1863 lsp::NumberOrString::String(code) => code.clone(),
1864 });
1865 let range = range_from_lsp(diagnostic.range);
1866 let is_supporting = diagnostic
1867 .related_information
1868 .as_ref()
1869 .map_or(false, |infos| {
1870 infos.iter().any(|info| {
1871 primary_diagnostic_group_ids.contains_key(&(
1872 source,
1873 code.clone(),
1874 range_from_lsp(info.location.range),
1875 ))
1876 })
1877 });
1878
1879 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1880 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1881 });
1882
1883 if is_supporting {
1884 supporting_diagnostics.insert(
1885 (source, code.clone(), range),
1886 (diagnostic.severity, is_unnecessary),
1887 );
1888 } else {
1889 let group_id = post_inc(&mut next_group_id);
1890 let is_disk_based = source.map_or(false, |source| {
1891 disk_based_sources.contains(&source.as_str())
1892 });
1893
1894 sources_by_group_id.insert(group_id, source);
1895 primary_diagnostic_group_ids
1896 .insert((source, code.clone(), range.clone()), group_id);
1897
1898 diagnostics.push(DiagnosticEntry {
1899 range,
1900 diagnostic: Diagnostic {
1901 code: code.clone(),
1902 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1903 message: diagnostic.message.clone(),
1904 group_id,
1905 is_primary: true,
1906 is_valid: true,
1907 is_disk_based,
1908 is_unnecessary,
1909 },
1910 });
1911 if let Some(infos) = &diagnostic.related_information {
1912 for info in infos {
1913 if info.location.uri == params.uri && !info.message.is_empty() {
1914 let range = range_from_lsp(info.location.range);
1915 diagnostics.push(DiagnosticEntry {
1916 range,
1917 diagnostic: Diagnostic {
1918 code: code.clone(),
1919 severity: DiagnosticSeverity::INFORMATION,
1920 message: info.message.clone(),
1921 group_id,
1922 is_primary: false,
1923 is_valid: true,
1924 is_disk_based,
1925 is_unnecessary: false,
1926 },
1927 });
1928 }
1929 }
1930 }
1931 }
1932 }
1933
1934 for entry in &mut diagnostics {
1935 let diagnostic = &mut entry.diagnostic;
1936 if !diagnostic.is_primary {
1937 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1938 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1939 source,
1940 diagnostic.code.clone(),
1941 entry.range.clone(),
1942 )) {
1943 if let Some(severity) = severity {
1944 diagnostic.severity = severity;
1945 }
1946 diagnostic.is_unnecessary = is_unnecessary;
1947 }
1948 }
1949 }
1950
1951 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1952 Ok(())
1953 }
1954
1955 pub fn update_diagnostic_entries(
1956 &mut self,
1957 abs_path: PathBuf,
1958 version: Option<i32>,
1959 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1960 cx: &mut ModelContext<Project>,
1961 ) -> Result<(), anyhow::Error> {
1962 let (worktree, relative_path) = self
1963 .find_local_worktree(&abs_path, cx)
1964 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1965 if !worktree.read(cx).is_visible() {
1966 return Ok(());
1967 }
1968
1969 let project_path = ProjectPath {
1970 worktree_id: worktree.read(cx).id(),
1971 path: relative_path.into(),
1972 };
1973 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
1974 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1975 }
1976
1977 let updated = worktree.update(cx, |worktree, cx| {
1978 worktree
1979 .as_local_mut()
1980 .ok_or_else(|| anyhow!("not a local worktree"))?
1981 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1982 })?;
1983 if updated {
1984 cx.emit(Event::DiagnosticsUpdated(project_path));
1985 }
1986 Ok(())
1987 }
1988
1989 fn update_buffer_diagnostics(
1990 &mut self,
1991 buffer: &ModelHandle<Buffer>,
1992 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1993 version: Option<i32>,
1994 cx: &mut ModelContext<Self>,
1995 ) -> Result<()> {
1996 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1997 Ordering::Equal
1998 .then_with(|| b.is_primary.cmp(&a.is_primary))
1999 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2000 .then_with(|| a.severity.cmp(&b.severity))
2001 .then_with(|| a.message.cmp(&b.message))
2002 }
2003
2004 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2005
2006 diagnostics.sort_unstable_by(|a, b| {
2007 Ordering::Equal
2008 .then_with(|| a.range.start.cmp(&b.range.start))
2009 .then_with(|| b.range.end.cmp(&a.range.end))
2010 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2011 });
2012
2013 let mut sanitized_diagnostics = Vec::new();
2014 let edits_since_save = Patch::new(
2015 snapshot
2016 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2017 .collect(),
2018 );
2019 for entry in diagnostics {
2020 let start;
2021 let end;
2022 if entry.diagnostic.is_disk_based {
2023 // Some diagnostics are based on files on disk instead of buffers'
2024 // current contents. Adjust these diagnostics' ranges to reflect
2025 // any unsaved edits.
2026 start = edits_since_save.old_to_new(entry.range.start);
2027 end = edits_since_save.old_to_new(entry.range.end);
2028 } else {
2029 start = entry.range.start;
2030 end = entry.range.end;
2031 }
2032
2033 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2034 ..snapshot.clip_point_utf16(end, Bias::Right);
2035
2036 // Expand empty ranges by one character
2037 if range.start == range.end {
2038 range.end.column += 1;
2039 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2040 if range.start == range.end && range.end.column > 0 {
2041 range.start.column -= 1;
2042 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2043 }
2044 }
2045
2046 sanitized_diagnostics.push(DiagnosticEntry {
2047 range,
2048 diagnostic: entry.diagnostic,
2049 });
2050 }
2051 drop(edits_since_save);
2052
2053 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2054 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2055 Ok(())
2056 }
2057
2058 pub fn reload_buffers(
2059 &self,
2060 buffers: HashSet<ModelHandle<Buffer>>,
2061 push_to_history: bool,
2062 cx: &mut ModelContext<Self>,
2063 ) -> Task<Result<ProjectTransaction>> {
2064 let mut local_buffers = Vec::new();
2065 let mut remote_buffers = None;
2066 for buffer_handle in buffers {
2067 let buffer = buffer_handle.read(cx);
2068 if buffer.is_dirty() {
2069 if let Some(file) = File::from_dyn(buffer.file()) {
2070 if file.is_local() {
2071 local_buffers.push(buffer_handle);
2072 } else {
2073 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2074 }
2075 }
2076 }
2077 }
2078
2079 let remote_buffers = self.remote_id().zip(remote_buffers);
2080 let client = self.client.clone();
2081
2082 cx.spawn(|this, mut cx| async move {
2083 let mut project_transaction = ProjectTransaction::default();
2084
2085 if let Some((project_id, remote_buffers)) = remote_buffers {
2086 let response = client
2087 .request(proto::ReloadBuffers {
2088 project_id,
2089 buffer_ids: remote_buffers
2090 .iter()
2091 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2092 .collect(),
2093 })
2094 .await?
2095 .transaction
2096 .ok_or_else(|| anyhow!("missing transaction"))?;
2097 project_transaction = this
2098 .update(&mut cx, |this, cx| {
2099 this.deserialize_project_transaction(response, push_to_history, cx)
2100 })
2101 .await?;
2102 }
2103
2104 for buffer in local_buffers {
2105 let transaction = buffer
2106 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2107 .await?;
2108 buffer.update(&mut cx, |buffer, cx| {
2109 if let Some(transaction) = transaction {
2110 if !push_to_history {
2111 buffer.forget_transaction(transaction.id);
2112 }
2113 project_transaction.0.insert(cx.handle(), transaction);
2114 }
2115 });
2116 }
2117
2118 Ok(project_transaction)
2119 })
2120 }
2121
2122 pub fn format(
2123 &self,
2124 buffers: HashSet<ModelHandle<Buffer>>,
2125 push_to_history: bool,
2126 cx: &mut ModelContext<Project>,
2127 ) -> Task<Result<ProjectTransaction>> {
2128 let mut local_buffers = Vec::new();
2129 let mut remote_buffers = None;
2130 for buffer_handle in buffers {
2131 let buffer = buffer_handle.read(cx);
2132 if let Some(file) = File::from_dyn(buffer.file()) {
2133 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2134 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2135 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2136 }
2137 } else {
2138 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2139 }
2140 } else {
2141 return Task::ready(Ok(Default::default()));
2142 }
2143 }
2144
2145 let remote_buffers = self.remote_id().zip(remote_buffers);
2146 let client = self.client.clone();
2147
2148 cx.spawn(|this, mut cx| async move {
2149 let mut project_transaction = ProjectTransaction::default();
2150
2151 if let Some((project_id, remote_buffers)) = remote_buffers {
2152 let response = client
2153 .request(proto::FormatBuffers {
2154 project_id,
2155 buffer_ids: remote_buffers
2156 .iter()
2157 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2158 .collect(),
2159 })
2160 .await?
2161 .transaction
2162 .ok_or_else(|| anyhow!("missing transaction"))?;
2163 project_transaction = this
2164 .update(&mut cx, |this, cx| {
2165 this.deserialize_project_transaction(response, push_to_history, cx)
2166 })
2167 .await?;
2168 }
2169
2170 for (buffer, buffer_abs_path, language_server) in local_buffers {
2171 let text_document = lsp::TextDocumentIdentifier::new(
2172 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2173 );
2174 let capabilities = &language_server.capabilities();
2175 let tab_size = cx.update(|cx| {
2176 let language_name = buffer.read(cx).language().map(|language| language.name());
2177 cx.global::<Settings>().tab_size(language_name.as_deref())
2178 });
2179 let lsp_edits = if capabilities
2180 .document_formatting_provider
2181 .as_ref()
2182 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2183 {
2184 language_server
2185 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2186 text_document,
2187 options: lsp::FormattingOptions {
2188 tab_size,
2189 insert_spaces: true,
2190 insert_final_newline: Some(true),
2191 ..Default::default()
2192 },
2193 work_done_progress_params: Default::default(),
2194 })
2195 .await?
2196 } else if capabilities
2197 .document_range_formatting_provider
2198 .as_ref()
2199 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2200 {
2201 let buffer_start = lsp::Position::new(0, 0);
2202 let buffer_end =
2203 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2204 language_server
2205 .request::<lsp::request::RangeFormatting>(
2206 lsp::DocumentRangeFormattingParams {
2207 text_document,
2208 range: lsp::Range::new(buffer_start, buffer_end),
2209 options: lsp::FormattingOptions {
2210 tab_size: 4,
2211 insert_spaces: true,
2212 insert_final_newline: Some(true),
2213 ..Default::default()
2214 },
2215 work_done_progress_params: Default::default(),
2216 },
2217 )
2218 .await?
2219 } else {
2220 continue;
2221 };
2222
2223 if let Some(lsp_edits) = lsp_edits {
2224 let edits = this
2225 .update(&mut cx, |this, cx| {
2226 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2227 })
2228 .await?;
2229 buffer.update(&mut cx, |buffer, cx| {
2230 buffer.finalize_last_transaction();
2231 buffer.start_transaction();
2232 for (range, text) in edits {
2233 buffer.edit([range], text, cx);
2234 }
2235 if buffer.end_transaction(cx).is_some() {
2236 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2237 if !push_to_history {
2238 buffer.forget_transaction(transaction.id);
2239 }
2240 project_transaction.0.insert(cx.handle(), transaction);
2241 }
2242 });
2243 }
2244 }
2245
2246 Ok(project_transaction)
2247 })
2248 }
2249
2250 pub fn definition<T: ToPointUtf16>(
2251 &self,
2252 buffer: &ModelHandle<Buffer>,
2253 position: T,
2254 cx: &mut ModelContext<Self>,
2255 ) -> Task<Result<Vec<Location>>> {
2256 let position = position.to_point_utf16(buffer.read(cx));
2257 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2258 }
2259
2260 pub fn references<T: ToPointUtf16>(
2261 &self,
2262 buffer: &ModelHandle<Buffer>,
2263 position: T,
2264 cx: &mut ModelContext<Self>,
2265 ) -> Task<Result<Vec<Location>>> {
2266 let position = position.to_point_utf16(buffer.read(cx));
2267 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2268 }
2269
2270 pub fn document_highlights<T: ToPointUtf16>(
2271 &self,
2272 buffer: &ModelHandle<Buffer>,
2273 position: T,
2274 cx: &mut ModelContext<Self>,
2275 ) -> Task<Result<Vec<DocumentHighlight>>> {
2276 let position = position.to_point_utf16(buffer.read(cx));
2277
2278 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2279 }
2280
2281 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2282 if self.is_local() {
2283 let mut requests = Vec::new();
2284 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2285 let worktree_id = *worktree_id;
2286 if let Some(worktree) = self
2287 .worktree_for_id(worktree_id, cx)
2288 .and_then(|worktree| worktree.read(cx).as_local())
2289 {
2290 let lsp_adapter = lsp_adapter.clone();
2291 let worktree_abs_path = worktree.abs_path().clone();
2292 requests.push(
2293 language_server
2294 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2295 query: query.to_string(),
2296 ..Default::default()
2297 })
2298 .log_err()
2299 .map(move |response| {
2300 (
2301 lsp_adapter,
2302 worktree_id,
2303 worktree_abs_path,
2304 response.unwrap_or_default(),
2305 )
2306 }),
2307 );
2308 }
2309 }
2310
2311 cx.spawn_weak(|this, cx| async move {
2312 let responses = futures::future::join_all(requests).await;
2313 let this = if let Some(this) = this.upgrade(&cx) {
2314 this
2315 } else {
2316 return Ok(Default::default());
2317 };
2318 this.read_with(&cx, |this, cx| {
2319 let mut symbols = Vec::new();
2320 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2321 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2322 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2323 let mut worktree_id = source_worktree_id;
2324 let path;
2325 if let Some((worktree, rel_path)) =
2326 this.find_local_worktree(&abs_path, cx)
2327 {
2328 worktree_id = worktree.read(cx).id();
2329 path = rel_path;
2330 } else {
2331 path = relativize_path(&worktree_abs_path, &abs_path);
2332 }
2333
2334 let label = this
2335 .languages
2336 .select_language(&path)
2337 .and_then(|language| {
2338 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2339 })
2340 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2341 let signature = this.symbol_signature(worktree_id, &path);
2342
2343 Some(Symbol {
2344 source_worktree_id,
2345 worktree_id,
2346 language_server_name: adapter.name(),
2347 name: lsp_symbol.name,
2348 kind: lsp_symbol.kind,
2349 label,
2350 path,
2351 range: range_from_lsp(lsp_symbol.location.range),
2352 signature,
2353 })
2354 }));
2355 }
2356 Ok(symbols)
2357 })
2358 })
2359 } else if let Some(project_id) = self.remote_id() {
2360 let request = self.client.request(proto::GetProjectSymbols {
2361 project_id,
2362 query: query.to_string(),
2363 });
2364 cx.spawn_weak(|this, cx| async move {
2365 let response = request.await?;
2366 let mut symbols = Vec::new();
2367 if let Some(this) = this.upgrade(&cx) {
2368 this.read_with(&cx, |this, _| {
2369 symbols.extend(
2370 response
2371 .symbols
2372 .into_iter()
2373 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2374 );
2375 })
2376 }
2377 Ok(symbols)
2378 })
2379 } else {
2380 Task::ready(Ok(Default::default()))
2381 }
2382 }
2383
2384 pub fn open_buffer_for_symbol(
2385 &mut self,
2386 symbol: &Symbol,
2387 cx: &mut ModelContext<Self>,
2388 ) -> Task<Result<ModelHandle<Buffer>>> {
2389 if self.is_local() {
2390 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2391 symbol.source_worktree_id,
2392 symbol.language_server_name.clone(),
2393 )) {
2394 server.clone()
2395 } else {
2396 return Task::ready(Err(anyhow!(
2397 "language server for worktree and language not found"
2398 )));
2399 };
2400
2401 let worktree_abs_path = if let Some(worktree_abs_path) = self
2402 .worktree_for_id(symbol.worktree_id, cx)
2403 .and_then(|worktree| worktree.read(cx).as_local())
2404 .map(|local_worktree| local_worktree.abs_path())
2405 {
2406 worktree_abs_path
2407 } else {
2408 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2409 };
2410 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2411 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2412 uri
2413 } else {
2414 return Task::ready(Err(anyhow!("invalid symbol path")));
2415 };
2416
2417 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2418 } else if let Some(project_id) = self.remote_id() {
2419 let request = self.client.request(proto::OpenBufferForSymbol {
2420 project_id,
2421 symbol: Some(serialize_symbol(symbol)),
2422 });
2423 cx.spawn(|this, mut cx| async move {
2424 let response = request.await?;
2425 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2426 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2427 .await
2428 })
2429 } else {
2430 Task::ready(Err(anyhow!("project does not have a remote id")))
2431 }
2432 }
2433
2434 pub fn completions<T: ToPointUtf16>(
2435 &self,
2436 source_buffer_handle: &ModelHandle<Buffer>,
2437 position: T,
2438 cx: &mut ModelContext<Self>,
2439 ) -> Task<Result<Vec<Completion>>> {
2440 let source_buffer_handle = source_buffer_handle.clone();
2441 let source_buffer = source_buffer_handle.read(cx);
2442 let buffer_id = source_buffer.remote_id();
2443 let language = source_buffer.language().cloned();
2444 let worktree;
2445 let buffer_abs_path;
2446 if let Some(file) = File::from_dyn(source_buffer.file()) {
2447 worktree = file.worktree.clone();
2448 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2449 } else {
2450 return Task::ready(Ok(Default::default()));
2451 };
2452
2453 let position = position.to_point_utf16(source_buffer);
2454 let anchor = source_buffer.anchor_after(position);
2455
2456 if worktree.read(cx).as_local().is_some() {
2457 let buffer_abs_path = buffer_abs_path.unwrap();
2458 let (_, lang_server) =
2459 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2460 server.clone()
2461 } else {
2462 return Task::ready(Ok(Default::default()));
2463 };
2464
2465 cx.spawn(|_, cx| async move {
2466 let completions = lang_server
2467 .request::<lsp::request::Completion>(lsp::CompletionParams {
2468 text_document_position: lsp::TextDocumentPositionParams::new(
2469 lsp::TextDocumentIdentifier::new(
2470 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2471 ),
2472 point_to_lsp(position),
2473 ),
2474 context: Default::default(),
2475 work_done_progress_params: Default::default(),
2476 partial_result_params: Default::default(),
2477 })
2478 .await
2479 .context("lsp completion request failed")?;
2480
2481 let completions = if let Some(completions) = completions {
2482 match completions {
2483 lsp::CompletionResponse::Array(completions) => completions,
2484 lsp::CompletionResponse::List(list) => list.items,
2485 }
2486 } else {
2487 Default::default()
2488 };
2489
2490 source_buffer_handle.read_with(&cx, |this, _| {
2491 Ok(completions
2492 .into_iter()
2493 .filter_map(|lsp_completion| {
2494 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2495 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2496 (range_from_lsp(edit.range), edit.new_text.clone())
2497 }
2498 None => {
2499 let clipped_position =
2500 this.clip_point_utf16(position, Bias::Left);
2501 if position != clipped_position {
2502 log::info!("completion out of expected range");
2503 return None;
2504 }
2505 let text = lsp_completion
2506 .insert_text
2507 .as_ref()
2508 .unwrap_or(&lsp_completion.label)
2509 .clone();
2510 (this.common_prefix_at(clipped_position, &text), text.clone())
2511 }
2512 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2513 log::info!("unsupported insert/replace completion");
2514 return None;
2515 }
2516 };
2517
2518 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2519 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2520 if clipped_start == old_range.start && clipped_end == old_range.end {
2521 Some(Completion {
2522 old_range: this.anchor_before(old_range.start)
2523 ..this.anchor_after(old_range.end),
2524 new_text,
2525 label: language
2526 .as_ref()
2527 .and_then(|l| l.label_for_completion(&lsp_completion))
2528 .unwrap_or_else(|| {
2529 CodeLabel::plain(
2530 lsp_completion.label.clone(),
2531 lsp_completion.filter_text.as_deref(),
2532 )
2533 }),
2534 lsp_completion,
2535 })
2536 } else {
2537 log::info!("completion out of expected range");
2538 None
2539 }
2540 })
2541 .collect())
2542 })
2543 })
2544 } else if let Some(project_id) = self.remote_id() {
2545 let rpc = self.client.clone();
2546 let message = proto::GetCompletions {
2547 project_id,
2548 buffer_id,
2549 position: Some(language::proto::serialize_anchor(&anchor)),
2550 version: serialize_version(&source_buffer.version()),
2551 };
2552 cx.spawn_weak(|_, mut cx| async move {
2553 let response = rpc.request(message).await?;
2554
2555 source_buffer_handle
2556 .update(&mut cx, |buffer, _| {
2557 buffer.wait_for_version(deserialize_version(response.version))
2558 })
2559 .await;
2560
2561 response
2562 .completions
2563 .into_iter()
2564 .map(|completion| {
2565 language::proto::deserialize_completion(completion, language.as_ref())
2566 })
2567 .collect()
2568 })
2569 } else {
2570 Task::ready(Ok(Default::default()))
2571 }
2572 }
2573
2574 pub fn apply_additional_edits_for_completion(
2575 &self,
2576 buffer_handle: ModelHandle<Buffer>,
2577 completion: Completion,
2578 push_to_history: bool,
2579 cx: &mut ModelContext<Self>,
2580 ) -> Task<Result<Option<Transaction>>> {
2581 let buffer = buffer_handle.read(cx);
2582 let buffer_id = buffer.remote_id();
2583
2584 if self.is_local() {
2585 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2586 {
2587 server.clone()
2588 } else {
2589 return Task::ready(Ok(Default::default()));
2590 };
2591
2592 cx.spawn(|this, mut cx| async move {
2593 let resolved_completion = lang_server
2594 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2595 .await?;
2596 if let Some(edits) = resolved_completion.additional_text_edits {
2597 let edits = this
2598 .update(&mut cx, |this, cx| {
2599 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2600 })
2601 .await?;
2602 buffer_handle.update(&mut cx, |buffer, cx| {
2603 buffer.finalize_last_transaction();
2604 buffer.start_transaction();
2605 for (range, text) in edits {
2606 buffer.edit([range], text, cx);
2607 }
2608 let transaction = if buffer.end_transaction(cx).is_some() {
2609 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2610 if !push_to_history {
2611 buffer.forget_transaction(transaction.id);
2612 }
2613 Some(transaction)
2614 } else {
2615 None
2616 };
2617 Ok(transaction)
2618 })
2619 } else {
2620 Ok(None)
2621 }
2622 })
2623 } else if let Some(project_id) = self.remote_id() {
2624 let client = self.client.clone();
2625 cx.spawn(|_, mut cx| async move {
2626 let response = client
2627 .request(proto::ApplyCompletionAdditionalEdits {
2628 project_id,
2629 buffer_id,
2630 completion: Some(language::proto::serialize_completion(&completion)),
2631 })
2632 .await?;
2633
2634 if let Some(transaction) = response.transaction {
2635 let transaction = language::proto::deserialize_transaction(transaction)?;
2636 buffer_handle
2637 .update(&mut cx, |buffer, _| {
2638 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2639 })
2640 .await;
2641 if push_to_history {
2642 buffer_handle.update(&mut cx, |buffer, _| {
2643 buffer.push_transaction(transaction.clone(), Instant::now());
2644 });
2645 }
2646 Ok(Some(transaction))
2647 } else {
2648 Ok(None)
2649 }
2650 })
2651 } else {
2652 Task::ready(Err(anyhow!("project does not have a remote id")))
2653 }
2654 }
2655
2656 pub fn code_actions<T: Clone + ToOffset>(
2657 &self,
2658 buffer_handle: &ModelHandle<Buffer>,
2659 range: Range<T>,
2660 cx: &mut ModelContext<Self>,
2661 ) -> Task<Result<Vec<CodeAction>>> {
2662 let buffer_handle = buffer_handle.clone();
2663 let buffer = buffer_handle.read(cx);
2664 let snapshot = buffer.snapshot();
2665 let relevant_diagnostics = snapshot
2666 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2667 .map(|entry| entry.to_lsp_diagnostic_stub())
2668 .collect();
2669 let buffer_id = buffer.remote_id();
2670 let worktree;
2671 let buffer_abs_path;
2672 if let Some(file) = File::from_dyn(buffer.file()) {
2673 worktree = file.worktree.clone();
2674 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2675 } else {
2676 return Task::ready(Ok(Default::default()));
2677 };
2678 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2679
2680 if worktree.read(cx).as_local().is_some() {
2681 let buffer_abs_path = buffer_abs_path.unwrap();
2682 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2683 {
2684 server.clone()
2685 } else {
2686 return Task::ready(Ok(Default::default()));
2687 };
2688
2689 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2690 cx.foreground().spawn(async move {
2691 if !lang_server.capabilities().code_action_provider.is_some() {
2692 return Ok(Default::default());
2693 }
2694
2695 Ok(lang_server
2696 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2697 text_document: lsp::TextDocumentIdentifier::new(
2698 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2699 ),
2700 range: lsp_range,
2701 work_done_progress_params: Default::default(),
2702 partial_result_params: Default::default(),
2703 context: lsp::CodeActionContext {
2704 diagnostics: relevant_diagnostics,
2705 only: Some(vec![
2706 lsp::CodeActionKind::QUICKFIX,
2707 lsp::CodeActionKind::REFACTOR,
2708 lsp::CodeActionKind::REFACTOR_EXTRACT,
2709 lsp::CodeActionKind::SOURCE,
2710 ]),
2711 },
2712 })
2713 .await?
2714 .unwrap_or_default()
2715 .into_iter()
2716 .filter_map(|entry| {
2717 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2718 Some(CodeAction {
2719 range: range.clone(),
2720 lsp_action,
2721 })
2722 } else {
2723 None
2724 }
2725 })
2726 .collect())
2727 })
2728 } else if let Some(project_id) = self.remote_id() {
2729 let rpc = self.client.clone();
2730 let version = buffer.version();
2731 cx.spawn_weak(|_, mut cx| async move {
2732 let response = rpc
2733 .request(proto::GetCodeActions {
2734 project_id,
2735 buffer_id,
2736 start: Some(language::proto::serialize_anchor(&range.start)),
2737 end: Some(language::proto::serialize_anchor(&range.end)),
2738 version: serialize_version(&version),
2739 })
2740 .await?;
2741
2742 buffer_handle
2743 .update(&mut cx, |buffer, _| {
2744 buffer.wait_for_version(deserialize_version(response.version))
2745 })
2746 .await;
2747
2748 response
2749 .actions
2750 .into_iter()
2751 .map(language::proto::deserialize_code_action)
2752 .collect()
2753 })
2754 } else {
2755 Task::ready(Ok(Default::default()))
2756 }
2757 }
2758
2759 pub fn apply_code_action(
2760 &self,
2761 buffer_handle: ModelHandle<Buffer>,
2762 mut action: CodeAction,
2763 push_to_history: bool,
2764 cx: &mut ModelContext<Self>,
2765 ) -> Task<Result<ProjectTransaction>> {
2766 if self.is_local() {
2767 let buffer = buffer_handle.read(cx);
2768 let (lsp_adapter, lang_server) =
2769 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2770 server.clone()
2771 } else {
2772 return Task::ready(Ok(Default::default()));
2773 };
2774 let range = action.range.to_point_utf16(buffer);
2775
2776 cx.spawn(|this, mut cx| async move {
2777 if let Some(lsp_range) = action
2778 .lsp_action
2779 .data
2780 .as_mut()
2781 .and_then(|d| d.get_mut("codeActionParams"))
2782 .and_then(|d| d.get_mut("range"))
2783 {
2784 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2785 action.lsp_action = lang_server
2786 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2787 .await?;
2788 } else {
2789 let actions = this
2790 .update(&mut cx, |this, cx| {
2791 this.code_actions(&buffer_handle, action.range, cx)
2792 })
2793 .await?;
2794 action.lsp_action = actions
2795 .into_iter()
2796 .find(|a| a.lsp_action.title == action.lsp_action.title)
2797 .ok_or_else(|| anyhow!("code action is outdated"))?
2798 .lsp_action;
2799 }
2800
2801 if let Some(edit) = action.lsp_action.edit {
2802 Self::deserialize_workspace_edit(
2803 this,
2804 edit,
2805 push_to_history,
2806 lsp_adapter,
2807 lang_server,
2808 &mut cx,
2809 )
2810 .await
2811 } else if let Some(command) = action.lsp_action.command {
2812 this.update(&mut cx, |this, _| {
2813 this.last_workspace_edits_by_language_server
2814 .remove(&lang_server.server_id());
2815 });
2816 lang_server
2817 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2818 command: command.command,
2819 arguments: command.arguments.unwrap_or_default(),
2820 ..Default::default()
2821 })
2822 .await?;
2823 Ok(this.update(&mut cx, |this, _| {
2824 this.last_workspace_edits_by_language_server
2825 .remove(&lang_server.server_id())
2826 .unwrap_or_default()
2827 }))
2828 } else {
2829 Ok(ProjectTransaction::default())
2830 }
2831 })
2832 } else if let Some(project_id) = self.remote_id() {
2833 let client = self.client.clone();
2834 let request = proto::ApplyCodeAction {
2835 project_id,
2836 buffer_id: buffer_handle.read(cx).remote_id(),
2837 action: Some(language::proto::serialize_code_action(&action)),
2838 };
2839 cx.spawn(|this, mut cx| async move {
2840 let response = client
2841 .request(request)
2842 .await?
2843 .transaction
2844 .ok_or_else(|| anyhow!("missing transaction"))?;
2845 this.update(&mut cx, |this, cx| {
2846 this.deserialize_project_transaction(response, push_to_history, cx)
2847 })
2848 .await
2849 })
2850 } else {
2851 Task::ready(Err(anyhow!("project does not have a remote id")))
2852 }
2853 }
2854
2855 async fn deserialize_workspace_edit(
2856 this: ModelHandle<Self>,
2857 edit: lsp::WorkspaceEdit,
2858 push_to_history: bool,
2859 lsp_adapter: Arc<dyn LspAdapter>,
2860 language_server: Arc<LanguageServer>,
2861 cx: &mut AsyncAppContext,
2862 ) -> Result<ProjectTransaction> {
2863 let fs = this.read_with(cx, |this, _| this.fs.clone());
2864 let mut operations = Vec::new();
2865 if let Some(document_changes) = edit.document_changes {
2866 match document_changes {
2867 lsp::DocumentChanges::Edits(edits) => {
2868 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2869 }
2870 lsp::DocumentChanges::Operations(ops) => operations = ops,
2871 }
2872 } else if let Some(changes) = edit.changes {
2873 operations.extend(changes.into_iter().map(|(uri, edits)| {
2874 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2875 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2876 uri,
2877 version: None,
2878 },
2879 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2880 })
2881 }));
2882 }
2883
2884 let mut project_transaction = ProjectTransaction::default();
2885 for operation in operations {
2886 match operation {
2887 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2888 let abs_path = op
2889 .uri
2890 .to_file_path()
2891 .map_err(|_| anyhow!("can't convert URI to path"))?;
2892
2893 if let Some(parent_path) = abs_path.parent() {
2894 fs.create_dir(parent_path).await?;
2895 }
2896 if abs_path.ends_with("/") {
2897 fs.create_dir(&abs_path).await?;
2898 } else {
2899 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2900 .await?;
2901 }
2902 }
2903 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2904 let source_abs_path = op
2905 .old_uri
2906 .to_file_path()
2907 .map_err(|_| anyhow!("can't convert URI to path"))?;
2908 let target_abs_path = op
2909 .new_uri
2910 .to_file_path()
2911 .map_err(|_| anyhow!("can't convert URI to path"))?;
2912 fs.rename(
2913 &source_abs_path,
2914 &target_abs_path,
2915 op.options.map(Into::into).unwrap_or_default(),
2916 )
2917 .await?;
2918 }
2919 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2920 let abs_path = op
2921 .uri
2922 .to_file_path()
2923 .map_err(|_| anyhow!("can't convert URI to path"))?;
2924 let options = op.options.map(Into::into).unwrap_or_default();
2925 if abs_path.ends_with("/") {
2926 fs.remove_dir(&abs_path, options).await?;
2927 } else {
2928 fs.remove_file(&abs_path, options).await?;
2929 }
2930 }
2931 lsp::DocumentChangeOperation::Edit(op) => {
2932 let buffer_to_edit = this
2933 .update(cx, |this, cx| {
2934 this.open_local_buffer_via_lsp(
2935 op.text_document.uri,
2936 lsp_adapter.clone(),
2937 language_server.clone(),
2938 cx,
2939 )
2940 })
2941 .await?;
2942
2943 let edits = this
2944 .update(cx, |this, cx| {
2945 let edits = op.edits.into_iter().map(|edit| match edit {
2946 lsp::OneOf::Left(edit) => edit,
2947 lsp::OneOf::Right(edit) => edit.text_edit,
2948 });
2949 this.edits_from_lsp(
2950 &buffer_to_edit,
2951 edits,
2952 op.text_document.version,
2953 cx,
2954 )
2955 })
2956 .await?;
2957
2958 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2959 buffer.finalize_last_transaction();
2960 buffer.start_transaction();
2961 for (range, text) in edits {
2962 buffer.edit([range], text, cx);
2963 }
2964 let transaction = if buffer.end_transaction(cx).is_some() {
2965 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2966 if !push_to_history {
2967 buffer.forget_transaction(transaction.id);
2968 }
2969 Some(transaction)
2970 } else {
2971 None
2972 };
2973
2974 transaction
2975 });
2976 if let Some(transaction) = transaction {
2977 project_transaction.0.insert(buffer_to_edit, transaction);
2978 }
2979 }
2980 }
2981 }
2982
2983 Ok(project_transaction)
2984 }
2985
2986 pub fn prepare_rename<T: ToPointUtf16>(
2987 &self,
2988 buffer: ModelHandle<Buffer>,
2989 position: T,
2990 cx: &mut ModelContext<Self>,
2991 ) -> Task<Result<Option<Range<Anchor>>>> {
2992 let position = position.to_point_utf16(buffer.read(cx));
2993 self.request_lsp(buffer, PrepareRename { position }, cx)
2994 }
2995
2996 pub fn perform_rename<T: ToPointUtf16>(
2997 &self,
2998 buffer: ModelHandle<Buffer>,
2999 position: T,
3000 new_name: String,
3001 push_to_history: bool,
3002 cx: &mut ModelContext<Self>,
3003 ) -> Task<Result<ProjectTransaction>> {
3004 let position = position.to_point_utf16(buffer.read(cx));
3005 self.request_lsp(
3006 buffer,
3007 PerformRename {
3008 position,
3009 new_name,
3010 push_to_history,
3011 },
3012 cx,
3013 )
3014 }
3015
3016 pub fn search(
3017 &self,
3018 query: SearchQuery,
3019 cx: &mut ModelContext<Self>,
3020 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3021 if self.is_local() {
3022 let snapshots = self
3023 .visible_worktrees(cx)
3024 .filter_map(|tree| {
3025 let tree = tree.read(cx).as_local()?;
3026 Some(tree.snapshot())
3027 })
3028 .collect::<Vec<_>>();
3029
3030 let background = cx.background().clone();
3031 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3032 if path_count == 0 {
3033 return Task::ready(Ok(Default::default()));
3034 }
3035 let workers = background.num_cpus().min(path_count);
3036 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3037 cx.background()
3038 .spawn({
3039 let fs = self.fs.clone();
3040 let background = cx.background().clone();
3041 let query = query.clone();
3042 async move {
3043 let fs = &fs;
3044 let query = &query;
3045 let matching_paths_tx = &matching_paths_tx;
3046 let paths_per_worker = (path_count + workers - 1) / workers;
3047 let snapshots = &snapshots;
3048 background
3049 .scoped(|scope| {
3050 for worker_ix in 0..workers {
3051 let worker_start_ix = worker_ix * paths_per_worker;
3052 let worker_end_ix = worker_start_ix + paths_per_worker;
3053 scope.spawn(async move {
3054 let mut snapshot_start_ix = 0;
3055 let mut abs_path = PathBuf::new();
3056 for snapshot in snapshots {
3057 let snapshot_end_ix =
3058 snapshot_start_ix + snapshot.visible_file_count();
3059 if worker_end_ix <= snapshot_start_ix {
3060 break;
3061 } else if worker_start_ix > snapshot_end_ix {
3062 snapshot_start_ix = snapshot_end_ix;
3063 continue;
3064 } else {
3065 let start_in_snapshot = worker_start_ix
3066 .saturating_sub(snapshot_start_ix);
3067 let end_in_snapshot =
3068 cmp::min(worker_end_ix, snapshot_end_ix)
3069 - snapshot_start_ix;
3070
3071 for entry in snapshot
3072 .files(false, start_in_snapshot)
3073 .take(end_in_snapshot - start_in_snapshot)
3074 {
3075 if matching_paths_tx.is_closed() {
3076 break;
3077 }
3078
3079 abs_path.clear();
3080 abs_path.push(&snapshot.abs_path());
3081 abs_path.push(&entry.path);
3082 let matches = if let Some(file) =
3083 fs.open_sync(&abs_path).await.log_err()
3084 {
3085 query.detect(file).unwrap_or(false)
3086 } else {
3087 false
3088 };
3089
3090 if matches {
3091 let project_path =
3092 (snapshot.id(), entry.path.clone());
3093 if matching_paths_tx
3094 .send(project_path)
3095 .await
3096 .is_err()
3097 {
3098 break;
3099 }
3100 }
3101 }
3102
3103 snapshot_start_ix = snapshot_end_ix;
3104 }
3105 }
3106 });
3107 }
3108 })
3109 .await;
3110 }
3111 })
3112 .detach();
3113
3114 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3115 let open_buffers = self
3116 .opened_buffers
3117 .values()
3118 .filter_map(|b| b.upgrade(cx))
3119 .collect::<HashSet<_>>();
3120 cx.spawn(|this, cx| async move {
3121 for buffer in &open_buffers {
3122 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3123 buffers_tx.send((buffer.clone(), snapshot)).await?;
3124 }
3125
3126 let open_buffers = Rc::new(RefCell::new(open_buffers));
3127 while let Some(project_path) = matching_paths_rx.next().await {
3128 if buffers_tx.is_closed() {
3129 break;
3130 }
3131
3132 let this = this.clone();
3133 let open_buffers = open_buffers.clone();
3134 let buffers_tx = buffers_tx.clone();
3135 cx.spawn(|mut cx| async move {
3136 if let Some(buffer) = this
3137 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3138 .await
3139 .log_err()
3140 {
3141 if open_buffers.borrow_mut().insert(buffer.clone()) {
3142 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3143 buffers_tx.send((buffer, snapshot)).await?;
3144 }
3145 }
3146
3147 Ok::<_, anyhow::Error>(())
3148 })
3149 .detach();
3150 }
3151
3152 Ok::<_, anyhow::Error>(())
3153 })
3154 .detach_and_log_err(cx);
3155
3156 let background = cx.background().clone();
3157 cx.background().spawn(async move {
3158 let query = &query;
3159 let mut matched_buffers = Vec::new();
3160 for _ in 0..workers {
3161 matched_buffers.push(HashMap::default());
3162 }
3163 background
3164 .scoped(|scope| {
3165 for worker_matched_buffers in matched_buffers.iter_mut() {
3166 let mut buffers_rx = buffers_rx.clone();
3167 scope.spawn(async move {
3168 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3169 let buffer_matches = query
3170 .search(snapshot.as_rope())
3171 .await
3172 .iter()
3173 .map(|range| {
3174 snapshot.anchor_before(range.start)
3175 ..snapshot.anchor_after(range.end)
3176 })
3177 .collect::<Vec<_>>();
3178 if !buffer_matches.is_empty() {
3179 worker_matched_buffers
3180 .insert(buffer.clone(), buffer_matches);
3181 }
3182 }
3183 });
3184 }
3185 })
3186 .await;
3187 Ok(matched_buffers.into_iter().flatten().collect())
3188 })
3189 } else if let Some(project_id) = self.remote_id() {
3190 let request = self.client.request(query.to_proto(project_id));
3191 cx.spawn(|this, mut cx| async move {
3192 let response = request.await?;
3193 let mut result = HashMap::default();
3194 for location in response.locations {
3195 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3196 let target_buffer = this
3197 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3198 .await?;
3199 let start = location
3200 .start
3201 .and_then(deserialize_anchor)
3202 .ok_or_else(|| anyhow!("missing target start"))?;
3203 let end = location
3204 .end
3205 .and_then(deserialize_anchor)
3206 .ok_or_else(|| anyhow!("missing target end"))?;
3207 result
3208 .entry(target_buffer)
3209 .or_insert(Vec::new())
3210 .push(start..end)
3211 }
3212 Ok(result)
3213 })
3214 } else {
3215 Task::ready(Ok(Default::default()))
3216 }
3217 }
3218
3219 fn request_lsp<R: LspCommand>(
3220 &self,
3221 buffer_handle: ModelHandle<Buffer>,
3222 request: R,
3223 cx: &mut ModelContext<Self>,
3224 ) -> Task<Result<R::Response>>
3225 where
3226 <R::LspRequest as lsp::request::Request>::Result: Send,
3227 {
3228 let buffer = buffer_handle.read(cx);
3229 if self.is_local() {
3230 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3231 if let Some((file, (_, language_server))) =
3232 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3233 {
3234 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3235 return cx.spawn(|this, cx| async move {
3236 if !request.check_capabilities(&language_server.capabilities()) {
3237 return Ok(Default::default());
3238 }
3239
3240 let response = language_server
3241 .request::<R::LspRequest>(lsp_params)
3242 .await
3243 .context("lsp request failed")?;
3244 request
3245 .response_from_lsp(response, this, buffer_handle, cx)
3246 .await
3247 });
3248 }
3249 } else if let Some(project_id) = self.remote_id() {
3250 let rpc = self.client.clone();
3251 let message = request.to_proto(project_id, buffer);
3252 return cx.spawn(|this, cx| async move {
3253 let response = rpc.request(message).await?;
3254 request
3255 .response_from_proto(response, this, buffer_handle, cx)
3256 .await
3257 });
3258 }
3259 Task::ready(Ok(Default::default()))
3260 }
3261
3262 pub fn find_or_create_local_worktree(
3263 &mut self,
3264 abs_path: impl AsRef<Path>,
3265 visible: bool,
3266 cx: &mut ModelContext<Self>,
3267 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3268 let abs_path = abs_path.as_ref();
3269 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3270 Task::ready(Ok((tree.clone(), relative_path.into())))
3271 } else {
3272 let worktree = self.create_local_worktree(abs_path, visible, cx);
3273 cx.foreground()
3274 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3275 }
3276 }
3277
3278 pub fn find_local_worktree(
3279 &self,
3280 abs_path: &Path,
3281 cx: &AppContext,
3282 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3283 for tree in self.worktrees(cx) {
3284 if let Some(relative_path) = tree
3285 .read(cx)
3286 .as_local()
3287 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3288 {
3289 return Some((tree.clone(), relative_path.into()));
3290 }
3291 }
3292 None
3293 }
3294
3295 pub fn is_shared(&self) -> bool {
3296 match &self.client_state {
3297 ProjectClientState::Local { is_shared, .. } => *is_shared,
3298 ProjectClientState::Remote { .. } => false,
3299 }
3300 }
3301
3302 fn create_local_worktree(
3303 &mut self,
3304 abs_path: impl AsRef<Path>,
3305 visible: bool,
3306 cx: &mut ModelContext<Self>,
3307 ) -> Task<Result<ModelHandle<Worktree>>> {
3308 let fs = self.fs.clone();
3309 let client = self.client.clone();
3310 let next_entry_id = self.next_entry_id.clone();
3311 let path: Arc<Path> = abs_path.as_ref().into();
3312 let task = self
3313 .loading_local_worktrees
3314 .entry(path.clone())
3315 .or_insert_with(|| {
3316 cx.spawn(|project, mut cx| {
3317 async move {
3318 let worktree = Worktree::local(
3319 client.clone(),
3320 path.clone(),
3321 visible,
3322 fs,
3323 next_entry_id,
3324 &mut cx,
3325 )
3326 .await;
3327 project.update(&mut cx, |project, _| {
3328 project.loading_local_worktrees.remove(&path);
3329 });
3330 let worktree = worktree?;
3331
3332 let (remote_project_id, is_shared) =
3333 project.update(&mut cx, |project, cx| {
3334 project.add_worktree(&worktree, cx);
3335 (project.remote_id(), project.is_shared())
3336 });
3337
3338 if let Some(project_id) = remote_project_id {
3339 if is_shared {
3340 worktree
3341 .update(&mut cx, |worktree, cx| {
3342 worktree.as_local_mut().unwrap().share(project_id, cx)
3343 })
3344 .await?;
3345 } else {
3346 worktree
3347 .update(&mut cx, |worktree, cx| {
3348 worktree.as_local_mut().unwrap().register(project_id, cx)
3349 })
3350 .await?;
3351 }
3352 }
3353
3354 Ok(worktree)
3355 }
3356 .map_err(|err| Arc::new(err))
3357 })
3358 .shared()
3359 })
3360 .clone();
3361 cx.foreground().spawn(async move {
3362 match task.await {
3363 Ok(worktree) => Ok(worktree),
3364 Err(err) => Err(anyhow!("{}", err)),
3365 }
3366 })
3367 }
3368
3369 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3370 self.worktrees.retain(|worktree| {
3371 worktree
3372 .upgrade(cx)
3373 .map_or(false, |w| w.read(cx).id() != id)
3374 });
3375 cx.notify();
3376 }
3377
3378 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3379 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3380 if worktree.read(cx).is_local() {
3381 cx.subscribe(&worktree, |this, worktree, _, cx| {
3382 this.update_local_worktree_buffers(worktree, cx);
3383 })
3384 .detach();
3385 }
3386
3387 let push_strong_handle = {
3388 let worktree = worktree.read(cx);
3389 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3390 };
3391 if push_strong_handle {
3392 self.worktrees
3393 .push(WorktreeHandle::Strong(worktree.clone()));
3394 } else {
3395 cx.observe_release(&worktree, |this, _, cx| {
3396 this.worktrees
3397 .retain(|worktree| worktree.upgrade(cx).is_some());
3398 cx.notify();
3399 })
3400 .detach();
3401 self.worktrees
3402 .push(WorktreeHandle::Weak(worktree.downgrade()));
3403 }
3404 cx.notify();
3405 }
3406
3407 fn update_local_worktree_buffers(
3408 &mut self,
3409 worktree_handle: ModelHandle<Worktree>,
3410 cx: &mut ModelContext<Self>,
3411 ) {
3412 let snapshot = worktree_handle.read(cx).snapshot();
3413 let mut buffers_to_delete = Vec::new();
3414 let mut renamed_buffers = Vec::new();
3415 for (buffer_id, buffer) in &self.opened_buffers {
3416 if let Some(buffer) = buffer.upgrade(cx) {
3417 buffer.update(cx, |buffer, cx| {
3418 if let Some(old_file) = File::from_dyn(buffer.file()) {
3419 if old_file.worktree != worktree_handle {
3420 return;
3421 }
3422
3423 let new_file = if let Some(entry) = old_file
3424 .entry_id
3425 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3426 {
3427 File {
3428 is_local: true,
3429 entry_id: Some(entry.id),
3430 mtime: entry.mtime,
3431 path: entry.path.clone(),
3432 worktree: worktree_handle.clone(),
3433 }
3434 } else if let Some(entry) =
3435 snapshot.entry_for_path(old_file.path().as_ref())
3436 {
3437 File {
3438 is_local: true,
3439 entry_id: Some(entry.id),
3440 mtime: entry.mtime,
3441 path: entry.path.clone(),
3442 worktree: worktree_handle.clone(),
3443 }
3444 } else {
3445 File {
3446 is_local: true,
3447 entry_id: None,
3448 path: old_file.path().clone(),
3449 mtime: old_file.mtime(),
3450 worktree: worktree_handle.clone(),
3451 }
3452 };
3453
3454 let old_path = old_file.abs_path(cx);
3455 if new_file.abs_path(cx) != old_path {
3456 renamed_buffers.push((cx.handle(), old_path));
3457 }
3458
3459 if let Some(project_id) = self.remote_id() {
3460 self.client
3461 .send(proto::UpdateBufferFile {
3462 project_id,
3463 buffer_id: *buffer_id as u64,
3464 file: Some(new_file.to_proto()),
3465 })
3466 .log_err();
3467 }
3468 buffer.file_updated(Box::new(new_file), cx).detach();
3469 }
3470 });
3471 } else {
3472 buffers_to_delete.push(*buffer_id);
3473 }
3474 }
3475
3476 for buffer_id in buffers_to_delete {
3477 self.opened_buffers.remove(&buffer_id);
3478 }
3479
3480 for (buffer, old_path) in renamed_buffers {
3481 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3482 self.assign_language_to_buffer(&buffer, cx);
3483 self.register_buffer_with_language_server(&buffer, cx);
3484 }
3485 }
3486
3487 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3488 let new_active_entry = entry.and_then(|project_path| {
3489 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3490 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3491 Some(entry.id)
3492 });
3493 if new_active_entry != self.active_entry {
3494 self.active_entry = new_active_entry;
3495 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3496 }
3497 }
3498
3499 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3500 self.language_server_statuses
3501 .values()
3502 .any(|status| status.pending_diagnostic_updates > 0)
3503 }
3504
3505 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3506 let mut summary = DiagnosticSummary::default();
3507 for (_, path_summary) in self.diagnostic_summaries(cx) {
3508 summary.error_count += path_summary.error_count;
3509 summary.warning_count += path_summary.warning_count;
3510 }
3511 summary
3512 }
3513
3514 pub fn diagnostic_summaries<'a>(
3515 &'a self,
3516 cx: &'a AppContext,
3517 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3518 self.worktrees(cx).flat_map(move |worktree| {
3519 let worktree = worktree.read(cx);
3520 let worktree_id = worktree.id();
3521 worktree
3522 .diagnostic_summaries()
3523 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3524 })
3525 }
3526
3527 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3528 if self
3529 .language_server_statuses
3530 .values()
3531 .map(|status| status.pending_diagnostic_updates)
3532 .sum::<isize>()
3533 == 1
3534 {
3535 cx.emit(Event::DiskBasedDiagnosticsStarted);
3536 }
3537 }
3538
3539 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3540 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3541 if self
3542 .language_server_statuses
3543 .values()
3544 .map(|status| status.pending_diagnostic_updates)
3545 .sum::<isize>()
3546 == 0
3547 {
3548 cx.emit(Event::DiskBasedDiagnosticsFinished);
3549 }
3550 }
3551
3552 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3553 self.active_entry
3554 }
3555
3556 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3557 self.worktree_for_id(path.worktree_id, cx)?
3558 .read(cx)
3559 .entry_for_path(&path.path)
3560 .map(|entry| entry.id)
3561 }
3562
3563 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3564 let worktree = self.worktree_for_entry(entry_id, cx)?;
3565 let worktree = worktree.read(cx);
3566 let worktree_id = worktree.id();
3567 let path = worktree.entry_for_id(entry_id)?.path.clone();
3568 Some(ProjectPath { worktree_id, path })
3569 }
3570
3571 // RPC message handlers
3572
3573 async fn handle_unshare_project(
3574 this: ModelHandle<Self>,
3575 _: TypedEnvelope<proto::UnshareProject>,
3576 _: Arc<Client>,
3577 mut cx: AsyncAppContext,
3578 ) -> Result<()> {
3579 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3580 Ok(())
3581 }
3582
3583 async fn handle_add_collaborator(
3584 this: ModelHandle<Self>,
3585 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3586 _: Arc<Client>,
3587 mut cx: AsyncAppContext,
3588 ) -> Result<()> {
3589 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3590 let collaborator = envelope
3591 .payload
3592 .collaborator
3593 .take()
3594 .ok_or_else(|| anyhow!("empty collaborator"))?;
3595
3596 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3597 this.update(&mut cx, |this, cx| {
3598 this.collaborators
3599 .insert(collaborator.peer_id, collaborator);
3600 cx.notify();
3601 });
3602
3603 Ok(())
3604 }
3605
3606 async fn handle_remove_collaborator(
3607 this: ModelHandle<Self>,
3608 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3609 _: Arc<Client>,
3610 mut cx: AsyncAppContext,
3611 ) -> Result<()> {
3612 this.update(&mut cx, |this, cx| {
3613 let peer_id = PeerId(envelope.payload.peer_id);
3614 let replica_id = this
3615 .collaborators
3616 .remove(&peer_id)
3617 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3618 .replica_id;
3619 for (_, buffer) in &this.opened_buffers {
3620 if let Some(buffer) = buffer.upgrade(cx) {
3621 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3622 }
3623 }
3624 cx.emit(Event::CollaboratorLeft(peer_id));
3625 cx.notify();
3626 Ok(())
3627 })
3628 }
3629
3630 async fn handle_register_worktree(
3631 this: ModelHandle<Self>,
3632 envelope: TypedEnvelope<proto::RegisterWorktree>,
3633 client: Arc<Client>,
3634 mut cx: AsyncAppContext,
3635 ) -> Result<()> {
3636 this.update(&mut cx, |this, cx| {
3637 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3638 let replica_id = this.replica_id();
3639 let worktree = proto::Worktree {
3640 id: envelope.payload.worktree_id,
3641 root_name: envelope.payload.root_name,
3642 entries: Default::default(),
3643 diagnostic_summaries: Default::default(),
3644 visible: envelope.payload.visible,
3645 };
3646 let (worktree, load_task) =
3647 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3648 this.add_worktree(&worktree, cx);
3649 load_task.detach();
3650 Ok(())
3651 })
3652 }
3653
3654 async fn handle_unregister_worktree(
3655 this: ModelHandle<Self>,
3656 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3657 _: Arc<Client>,
3658 mut cx: AsyncAppContext,
3659 ) -> Result<()> {
3660 this.update(&mut cx, |this, cx| {
3661 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3662 this.remove_worktree(worktree_id, cx);
3663 Ok(())
3664 })
3665 }
3666
3667 async fn handle_update_worktree(
3668 this: ModelHandle<Self>,
3669 envelope: TypedEnvelope<proto::UpdateWorktree>,
3670 _: Arc<Client>,
3671 mut cx: AsyncAppContext,
3672 ) -> Result<()> {
3673 this.update(&mut cx, |this, cx| {
3674 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3675 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3676 worktree.update(cx, |worktree, _| {
3677 let worktree = worktree.as_remote_mut().unwrap();
3678 worktree.update_from_remote(envelope)
3679 })?;
3680 }
3681 Ok(())
3682 })
3683 }
3684
3685 async fn handle_update_diagnostic_summary(
3686 this: ModelHandle<Self>,
3687 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3688 _: Arc<Client>,
3689 mut cx: AsyncAppContext,
3690 ) -> Result<()> {
3691 this.update(&mut cx, |this, cx| {
3692 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3693 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3694 if let Some(summary) = envelope.payload.summary {
3695 let project_path = ProjectPath {
3696 worktree_id,
3697 path: Path::new(&summary.path).into(),
3698 };
3699 worktree.update(cx, |worktree, _| {
3700 worktree
3701 .as_remote_mut()
3702 .unwrap()
3703 .update_diagnostic_summary(project_path.path.clone(), &summary);
3704 });
3705 cx.emit(Event::DiagnosticsUpdated(project_path));
3706 }
3707 }
3708 Ok(())
3709 })
3710 }
3711
3712 async fn handle_start_language_server(
3713 this: ModelHandle<Self>,
3714 envelope: TypedEnvelope<proto::StartLanguageServer>,
3715 _: Arc<Client>,
3716 mut cx: AsyncAppContext,
3717 ) -> Result<()> {
3718 let server = envelope
3719 .payload
3720 .server
3721 .ok_or_else(|| anyhow!("invalid server"))?;
3722 this.update(&mut cx, |this, cx| {
3723 this.language_server_statuses.insert(
3724 server.id as usize,
3725 LanguageServerStatus {
3726 name: server.name,
3727 pending_work: Default::default(),
3728 pending_diagnostic_updates: 0,
3729 },
3730 );
3731 cx.notify();
3732 });
3733 Ok(())
3734 }
3735
3736 async fn handle_update_language_server(
3737 this: ModelHandle<Self>,
3738 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3739 _: Arc<Client>,
3740 mut cx: AsyncAppContext,
3741 ) -> Result<()> {
3742 let language_server_id = envelope.payload.language_server_id as usize;
3743 match envelope
3744 .payload
3745 .variant
3746 .ok_or_else(|| anyhow!("invalid variant"))?
3747 {
3748 proto::update_language_server::Variant::WorkStart(payload) => {
3749 this.update(&mut cx, |this, cx| {
3750 this.on_lsp_work_start(language_server_id, payload.token, cx);
3751 })
3752 }
3753 proto::update_language_server::Variant::WorkProgress(payload) => {
3754 this.update(&mut cx, |this, cx| {
3755 this.on_lsp_work_progress(
3756 language_server_id,
3757 payload.token,
3758 LanguageServerProgress {
3759 message: payload.message,
3760 percentage: payload.percentage.map(|p| p as usize),
3761 last_update_at: Instant::now(),
3762 },
3763 cx,
3764 );
3765 })
3766 }
3767 proto::update_language_server::Variant::WorkEnd(payload) => {
3768 this.update(&mut cx, |this, cx| {
3769 this.on_lsp_work_end(language_server_id, payload.token, cx);
3770 })
3771 }
3772 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3773 this.update(&mut cx, |this, cx| {
3774 this.disk_based_diagnostics_started(cx);
3775 })
3776 }
3777 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3778 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3779 }
3780 }
3781
3782 Ok(())
3783 }
3784
3785 async fn handle_update_buffer(
3786 this: ModelHandle<Self>,
3787 envelope: TypedEnvelope<proto::UpdateBuffer>,
3788 _: Arc<Client>,
3789 mut cx: AsyncAppContext,
3790 ) -> Result<()> {
3791 this.update(&mut cx, |this, cx| {
3792 let payload = envelope.payload.clone();
3793 let buffer_id = payload.buffer_id;
3794 let ops = payload
3795 .operations
3796 .into_iter()
3797 .map(|op| language::proto::deserialize_operation(op))
3798 .collect::<Result<Vec<_>, _>>()?;
3799 match this.opened_buffers.entry(buffer_id) {
3800 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3801 OpenBuffer::Strong(buffer) => {
3802 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3803 }
3804 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3805 OpenBuffer::Weak(_) => {}
3806 },
3807 hash_map::Entry::Vacant(e) => {
3808 e.insert(OpenBuffer::Loading(ops));
3809 }
3810 }
3811 Ok(())
3812 })
3813 }
3814
3815 async fn handle_update_buffer_file(
3816 this: ModelHandle<Self>,
3817 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3818 _: Arc<Client>,
3819 mut cx: AsyncAppContext,
3820 ) -> Result<()> {
3821 this.update(&mut cx, |this, cx| {
3822 let payload = envelope.payload.clone();
3823 let buffer_id = payload.buffer_id;
3824 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3825 let worktree = this
3826 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3827 .ok_or_else(|| anyhow!("no such worktree"))?;
3828 let file = File::from_proto(file, worktree.clone(), cx)?;
3829 let buffer = this
3830 .opened_buffers
3831 .get_mut(&buffer_id)
3832 .and_then(|b| b.upgrade(cx))
3833 .ok_or_else(|| anyhow!("no such buffer"))?;
3834 buffer.update(cx, |buffer, cx| {
3835 buffer.file_updated(Box::new(file), cx).detach();
3836 });
3837 Ok(())
3838 })
3839 }
3840
3841 async fn handle_save_buffer(
3842 this: ModelHandle<Self>,
3843 envelope: TypedEnvelope<proto::SaveBuffer>,
3844 _: Arc<Client>,
3845 mut cx: AsyncAppContext,
3846 ) -> Result<proto::BufferSaved> {
3847 let buffer_id = envelope.payload.buffer_id;
3848 let requested_version = deserialize_version(envelope.payload.version);
3849
3850 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3851 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3852 let buffer = this
3853 .opened_buffers
3854 .get(&buffer_id)
3855 .and_then(|buffer| buffer.upgrade(cx))
3856 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3857 Ok::<_, anyhow::Error>((project_id, buffer))
3858 })?;
3859 buffer
3860 .update(&mut cx, |buffer, _| {
3861 buffer.wait_for_version(requested_version)
3862 })
3863 .await;
3864
3865 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3866 Ok(proto::BufferSaved {
3867 project_id,
3868 buffer_id,
3869 version: serialize_version(&saved_version),
3870 mtime: Some(mtime.into()),
3871 })
3872 }
3873
3874 async fn handle_reload_buffers(
3875 this: ModelHandle<Self>,
3876 envelope: TypedEnvelope<proto::ReloadBuffers>,
3877 _: Arc<Client>,
3878 mut cx: AsyncAppContext,
3879 ) -> Result<proto::ReloadBuffersResponse> {
3880 let sender_id = envelope.original_sender_id()?;
3881 let reload = this.update(&mut cx, |this, cx| {
3882 let mut buffers = HashSet::default();
3883 for buffer_id in &envelope.payload.buffer_ids {
3884 buffers.insert(
3885 this.opened_buffers
3886 .get(buffer_id)
3887 .and_then(|buffer| buffer.upgrade(cx))
3888 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3889 );
3890 }
3891 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3892 })?;
3893
3894 let project_transaction = reload.await?;
3895 let project_transaction = this.update(&mut cx, |this, cx| {
3896 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3897 });
3898 Ok(proto::ReloadBuffersResponse {
3899 transaction: Some(project_transaction),
3900 })
3901 }
3902
3903 async fn handle_format_buffers(
3904 this: ModelHandle<Self>,
3905 envelope: TypedEnvelope<proto::FormatBuffers>,
3906 _: Arc<Client>,
3907 mut cx: AsyncAppContext,
3908 ) -> Result<proto::FormatBuffersResponse> {
3909 let sender_id = envelope.original_sender_id()?;
3910 let format = this.update(&mut cx, |this, cx| {
3911 let mut buffers = HashSet::default();
3912 for buffer_id in &envelope.payload.buffer_ids {
3913 buffers.insert(
3914 this.opened_buffers
3915 .get(buffer_id)
3916 .and_then(|buffer| buffer.upgrade(cx))
3917 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3918 );
3919 }
3920 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3921 })?;
3922
3923 let project_transaction = format.await?;
3924 let project_transaction = this.update(&mut cx, |this, cx| {
3925 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3926 });
3927 Ok(proto::FormatBuffersResponse {
3928 transaction: Some(project_transaction),
3929 })
3930 }
3931
3932 async fn handle_get_completions(
3933 this: ModelHandle<Self>,
3934 envelope: TypedEnvelope<proto::GetCompletions>,
3935 _: Arc<Client>,
3936 mut cx: AsyncAppContext,
3937 ) -> Result<proto::GetCompletionsResponse> {
3938 let position = envelope
3939 .payload
3940 .position
3941 .and_then(language::proto::deserialize_anchor)
3942 .ok_or_else(|| anyhow!("invalid position"))?;
3943 let version = deserialize_version(envelope.payload.version);
3944 let buffer = this.read_with(&cx, |this, cx| {
3945 this.opened_buffers
3946 .get(&envelope.payload.buffer_id)
3947 .and_then(|buffer| buffer.upgrade(cx))
3948 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3949 })?;
3950 buffer
3951 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3952 .await;
3953 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3954 let completions = this
3955 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3956 .await?;
3957
3958 Ok(proto::GetCompletionsResponse {
3959 completions: completions
3960 .iter()
3961 .map(language::proto::serialize_completion)
3962 .collect(),
3963 version: serialize_version(&version),
3964 })
3965 }
3966
3967 async fn handle_apply_additional_edits_for_completion(
3968 this: ModelHandle<Self>,
3969 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3970 _: Arc<Client>,
3971 mut cx: AsyncAppContext,
3972 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3973 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3974 let buffer = this
3975 .opened_buffers
3976 .get(&envelope.payload.buffer_id)
3977 .and_then(|buffer| buffer.upgrade(cx))
3978 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3979 let language = buffer.read(cx).language();
3980 let completion = language::proto::deserialize_completion(
3981 envelope
3982 .payload
3983 .completion
3984 .ok_or_else(|| anyhow!("invalid completion"))?,
3985 language,
3986 )?;
3987 Ok::<_, anyhow::Error>(
3988 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3989 )
3990 })?;
3991
3992 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3993 transaction: apply_additional_edits
3994 .await?
3995 .as_ref()
3996 .map(language::proto::serialize_transaction),
3997 })
3998 }
3999
4000 async fn handle_get_code_actions(
4001 this: ModelHandle<Self>,
4002 envelope: TypedEnvelope<proto::GetCodeActions>,
4003 _: Arc<Client>,
4004 mut cx: AsyncAppContext,
4005 ) -> Result<proto::GetCodeActionsResponse> {
4006 let start = envelope
4007 .payload
4008 .start
4009 .and_then(language::proto::deserialize_anchor)
4010 .ok_or_else(|| anyhow!("invalid start"))?;
4011 let end = envelope
4012 .payload
4013 .end
4014 .and_then(language::proto::deserialize_anchor)
4015 .ok_or_else(|| anyhow!("invalid end"))?;
4016 let buffer = this.update(&mut cx, |this, cx| {
4017 this.opened_buffers
4018 .get(&envelope.payload.buffer_id)
4019 .and_then(|buffer| buffer.upgrade(cx))
4020 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4021 })?;
4022 buffer
4023 .update(&mut cx, |buffer, _| {
4024 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4025 })
4026 .await;
4027
4028 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4029 let code_actions = this.update(&mut cx, |this, cx| {
4030 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4031 })?;
4032
4033 Ok(proto::GetCodeActionsResponse {
4034 actions: code_actions
4035 .await?
4036 .iter()
4037 .map(language::proto::serialize_code_action)
4038 .collect(),
4039 version: serialize_version(&version),
4040 })
4041 }
4042
4043 async fn handle_apply_code_action(
4044 this: ModelHandle<Self>,
4045 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4046 _: Arc<Client>,
4047 mut cx: AsyncAppContext,
4048 ) -> Result<proto::ApplyCodeActionResponse> {
4049 let sender_id = envelope.original_sender_id()?;
4050 let action = language::proto::deserialize_code_action(
4051 envelope
4052 .payload
4053 .action
4054 .ok_or_else(|| anyhow!("invalid action"))?,
4055 )?;
4056 let apply_code_action = this.update(&mut cx, |this, cx| {
4057 let buffer = this
4058 .opened_buffers
4059 .get(&envelope.payload.buffer_id)
4060 .and_then(|buffer| buffer.upgrade(cx))
4061 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4062 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4063 })?;
4064
4065 let project_transaction = apply_code_action.await?;
4066 let project_transaction = this.update(&mut cx, |this, cx| {
4067 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4068 });
4069 Ok(proto::ApplyCodeActionResponse {
4070 transaction: Some(project_transaction),
4071 })
4072 }
4073
4074 async fn handle_lsp_command<T: LspCommand>(
4075 this: ModelHandle<Self>,
4076 envelope: TypedEnvelope<T::ProtoRequest>,
4077 _: Arc<Client>,
4078 mut cx: AsyncAppContext,
4079 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4080 where
4081 <T::LspRequest as lsp::request::Request>::Result: Send,
4082 {
4083 let sender_id = envelope.original_sender_id()?;
4084 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4085 let buffer_handle = this.read_with(&cx, |this, _| {
4086 this.opened_buffers
4087 .get(&buffer_id)
4088 .and_then(|buffer| buffer.upgrade(&cx))
4089 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4090 })?;
4091 let request = T::from_proto(
4092 envelope.payload,
4093 this.clone(),
4094 buffer_handle.clone(),
4095 cx.clone(),
4096 )
4097 .await?;
4098 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4099 let response = this
4100 .update(&mut cx, |this, cx| {
4101 this.request_lsp(buffer_handle, request, cx)
4102 })
4103 .await?;
4104 this.update(&mut cx, |this, cx| {
4105 Ok(T::response_to_proto(
4106 response,
4107 this,
4108 sender_id,
4109 &buffer_version,
4110 cx,
4111 ))
4112 })
4113 }
4114
4115 async fn handle_get_project_symbols(
4116 this: ModelHandle<Self>,
4117 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4118 _: Arc<Client>,
4119 mut cx: AsyncAppContext,
4120 ) -> Result<proto::GetProjectSymbolsResponse> {
4121 let symbols = this
4122 .update(&mut cx, |this, cx| {
4123 this.symbols(&envelope.payload.query, cx)
4124 })
4125 .await?;
4126
4127 Ok(proto::GetProjectSymbolsResponse {
4128 symbols: symbols.iter().map(serialize_symbol).collect(),
4129 })
4130 }
4131
4132 async fn handle_search_project(
4133 this: ModelHandle<Self>,
4134 envelope: TypedEnvelope<proto::SearchProject>,
4135 _: Arc<Client>,
4136 mut cx: AsyncAppContext,
4137 ) -> Result<proto::SearchProjectResponse> {
4138 let peer_id = envelope.original_sender_id()?;
4139 let query = SearchQuery::from_proto(envelope.payload)?;
4140 let result = this
4141 .update(&mut cx, |this, cx| this.search(query, cx))
4142 .await?;
4143
4144 this.update(&mut cx, |this, cx| {
4145 let mut locations = Vec::new();
4146 for (buffer, ranges) in result {
4147 for range in ranges {
4148 let start = serialize_anchor(&range.start);
4149 let end = serialize_anchor(&range.end);
4150 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4151 locations.push(proto::Location {
4152 buffer: Some(buffer),
4153 start: Some(start),
4154 end: Some(end),
4155 });
4156 }
4157 }
4158 Ok(proto::SearchProjectResponse { locations })
4159 })
4160 }
4161
4162 async fn handle_open_buffer_for_symbol(
4163 this: ModelHandle<Self>,
4164 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4165 _: Arc<Client>,
4166 mut cx: AsyncAppContext,
4167 ) -> Result<proto::OpenBufferForSymbolResponse> {
4168 let peer_id = envelope.original_sender_id()?;
4169 let symbol = envelope
4170 .payload
4171 .symbol
4172 .ok_or_else(|| anyhow!("invalid symbol"))?;
4173 let symbol = this.read_with(&cx, |this, _| {
4174 let symbol = this.deserialize_symbol(symbol)?;
4175 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4176 if signature == symbol.signature {
4177 Ok(symbol)
4178 } else {
4179 Err(anyhow!("invalid symbol signature"))
4180 }
4181 })?;
4182 let buffer = this
4183 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4184 .await?;
4185
4186 Ok(proto::OpenBufferForSymbolResponse {
4187 buffer: Some(this.update(&mut cx, |this, cx| {
4188 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4189 })),
4190 })
4191 }
4192
4193 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4194 let mut hasher = Sha256::new();
4195 hasher.update(worktree_id.to_proto().to_be_bytes());
4196 hasher.update(path.to_string_lossy().as_bytes());
4197 hasher.update(self.nonce.to_be_bytes());
4198 hasher.finalize().as_slice().try_into().unwrap()
4199 }
4200
4201 async fn handle_open_buffer_by_id(
4202 this: ModelHandle<Self>,
4203 envelope: TypedEnvelope<proto::OpenBufferById>,
4204 _: Arc<Client>,
4205 mut cx: AsyncAppContext,
4206 ) -> Result<proto::OpenBufferResponse> {
4207 let peer_id = envelope.original_sender_id()?;
4208 let buffer = this
4209 .update(&mut cx, |this, cx| {
4210 this.open_buffer_by_id(envelope.payload.id, cx)
4211 })
4212 .await?;
4213 this.update(&mut cx, |this, cx| {
4214 Ok(proto::OpenBufferResponse {
4215 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4216 })
4217 })
4218 }
4219
4220 async fn handle_open_buffer_by_path(
4221 this: ModelHandle<Self>,
4222 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4223 _: Arc<Client>,
4224 mut cx: AsyncAppContext,
4225 ) -> Result<proto::OpenBufferResponse> {
4226 let peer_id = envelope.original_sender_id()?;
4227 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4228 let open_buffer = this.update(&mut cx, |this, cx| {
4229 this.open_buffer(
4230 ProjectPath {
4231 worktree_id,
4232 path: PathBuf::from(envelope.payload.path).into(),
4233 },
4234 cx,
4235 )
4236 });
4237
4238 let buffer = open_buffer.await?;
4239 this.update(&mut cx, |this, cx| {
4240 Ok(proto::OpenBufferResponse {
4241 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4242 })
4243 })
4244 }
4245
4246 fn serialize_project_transaction_for_peer(
4247 &mut self,
4248 project_transaction: ProjectTransaction,
4249 peer_id: PeerId,
4250 cx: &AppContext,
4251 ) -> proto::ProjectTransaction {
4252 let mut serialized_transaction = proto::ProjectTransaction {
4253 buffers: Default::default(),
4254 transactions: Default::default(),
4255 };
4256 for (buffer, transaction) in project_transaction.0 {
4257 serialized_transaction
4258 .buffers
4259 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4260 serialized_transaction
4261 .transactions
4262 .push(language::proto::serialize_transaction(&transaction));
4263 }
4264 serialized_transaction
4265 }
4266
4267 fn deserialize_project_transaction(
4268 &mut self,
4269 message: proto::ProjectTransaction,
4270 push_to_history: bool,
4271 cx: &mut ModelContext<Self>,
4272 ) -> Task<Result<ProjectTransaction>> {
4273 cx.spawn(|this, mut cx| async move {
4274 let mut project_transaction = ProjectTransaction::default();
4275 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4276 let buffer = this
4277 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4278 .await?;
4279 let transaction = language::proto::deserialize_transaction(transaction)?;
4280 project_transaction.0.insert(buffer, transaction);
4281 }
4282
4283 for (buffer, transaction) in &project_transaction.0 {
4284 buffer
4285 .update(&mut cx, |buffer, _| {
4286 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4287 })
4288 .await;
4289
4290 if push_to_history {
4291 buffer.update(&mut cx, |buffer, _| {
4292 buffer.push_transaction(transaction.clone(), Instant::now());
4293 });
4294 }
4295 }
4296
4297 Ok(project_transaction)
4298 })
4299 }
4300
4301 fn serialize_buffer_for_peer(
4302 &mut self,
4303 buffer: &ModelHandle<Buffer>,
4304 peer_id: PeerId,
4305 cx: &AppContext,
4306 ) -> proto::Buffer {
4307 let buffer_id = buffer.read(cx).remote_id();
4308 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4309 if shared_buffers.insert(buffer_id) {
4310 proto::Buffer {
4311 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4312 }
4313 } else {
4314 proto::Buffer {
4315 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4316 }
4317 }
4318 }
4319
4320 fn deserialize_buffer(
4321 &mut self,
4322 buffer: proto::Buffer,
4323 cx: &mut ModelContext<Self>,
4324 ) -> Task<Result<ModelHandle<Buffer>>> {
4325 let replica_id = self.replica_id();
4326
4327 let opened_buffer_tx = self.opened_buffer.0.clone();
4328 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4329 cx.spawn(|this, mut cx| async move {
4330 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4331 proto::buffer::Variant::Id(id) => {
4332 let buffer = loop {
4333 let buffer = this.read_with(&cx, |this, cx| {
4334 this.opened_buffers
4335 .get(&id)
4336 .and_then(|buffer| buffer.upgrade(cx))
4337 });
4338 if let Some(buffer) = buffer {
4339 break buffer;
4340 }
4341 opened_buffer_rx
4342 .next()
4343 .await
4344 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4345 };
4346 Ok(buffer)
4347 }
4348 proto::buffer::Variant::State(mut buffer) => {
4349 let mut buffer_worktree = None;
4350 let mut buffer_file = None;
4351 if let Some(file) = buffer.file.take() {
4352 this.read_with(&cx, |this, cx| {
4353 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4354 let worktree =
4355 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4356 anyhow!("no worktree found for id {}", file.worktree_id)
4357 })?;
4358 buffer_file =
4359 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4360 as Box<dyn language::File>);
4361 buffer_worktree = Some(worktree);
4362 Ok::<_, anyhow::Error>(())
4363 })?;
4364 }
4365
4366 let buffer = cx.add_model(|cx| {
4367 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4368 });
4369
4370 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4371
4372 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4373 Ok(buffer)
4374 }
4375 }
4376 })
4377 }
4378
4379 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4380 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4381 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4382 let start = serialized_symbol
4383 .start
4384 .ok_or_else(|| anyhow!("invalid start"))?;
4385 let end = serialized_symbol
4386 .end
4387 .ok_or_else(|| anyhow!("invalid end"))?;
4388 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4389 let path = PathBuf::from(serialized_symbol.path);
4390 let language = self.languages.select_language(&path);
4391 Ok(Symbol {
4392 source_worktree_id,
4393 worktree_id,
4394 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4395 label: language
4396 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4397 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4398 name: serialized_symbol.name,
4399 path,
4400 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4401 kind,
4402 signature: serialized_symbol
4403 .signature
4404 .try_into()
4405 .map_err(|_| anyhow!("invalid signature"))?,
4406 })
4407 }
4408
4409 async fn handle_buffer_saved(
4410 this: ModelHandle<Self>,
4411 envelope: TypedEnvelope<proto::BufferSaved>,
4412 _: Arc<Client>,
4413 mut cx: AsyncAppContext,
4414 ) -> Result<()> {
4415 let version = deserialize_version(envelope.payload.version);
4416 let mtime = envelope
4417 .payload
4418 .mtime
4419 .ok_or_else(|| anyhow!("missing mtime"))?
4420 .into();
4421
4422 this.update(&mut cx, |this, cx| {
4423 let buffer = this
4424 .opened_buffers
4425 .get(&envelope.payload.buffer_id)
4426 .and_then(|buffer| buffer.upgrade(cx));
4427 if let Some(buffer) = buffer {
4428 buffer.update(cx, |buffer, cx| {
4429 buffer.did_save(version, mtime, None, cx);
4430 });
4431 }
4432 Ok(())
4433 })
4434 }
4435
4436 async fn handle_buffer_reloaded(
4437 this: ModelHandle<Self>,
4438 envelope: TypedEnvelope<proto::BufferReloaded>,
4439 _: Arc<Client>,
4440 mut cx: AsyncAppContext,
4441 ) -> Result<()> {
4442 let payload = envelope.payload.clone();
4443 let version = deserialize_version(payload.version);
4444 let mtime = payload
4445 .mtime
4446 .ok_or_else(|| anyhow!("missing mtime"))?
4447 .into();
4448 this.update(&mut cx, |this, cx| {
4449 let buffer = this
4450 .opened_buffers
4451 .get(&payload.buffer_id)
4452 .and_then(|buffer| buffer.upgrade(cx));
4453 if let Some(buffer) = buffer {
4454 buffer.update(cx, |buffer, cx| {
4455 buffer.did_reload(version, mtime, cx);
4456 });
4457 }
4458 Ok(())
4459 })
4460 }
4461
4462 pub fn match_paths<'a>(
4463 &self,
4464 query: &'a str,
4465 include_ignored: bool,
4466 smart_case: bool,
4467 max_results: usize,
4468 cancel_flag: &'a AtomicBool,
4469 cx: &AppContext,
4470 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4471 let worktrees = self
4472 .worktrees(cx)
4473 .filter(|worktree| worktree.read(cx).is_visible())
4474 .collect::<Vec<_>>();
4475 let include_root_name = worktrees.len() > 1;
4476 let candidate_sets = worktrees
4477 .into_iter()
4478 .map(|worktree| CandidateSet {
4479 snapshot: worktree.read(cx).snapshot(),
4480 include_ignored,
4481 include_root_name,
4482 })
4483 .collect::<Vec<_>>();
4484
4485 let background = cx.background().clone();
4486 async move {
4487 fuzzy::match_paths(
4488 candidate_sets.as_slice(),
4489 query,
4490 smart_case,
4491 max_results,
4492 cancel_flag,
4493 background,
4494 )
4495 .await
4496 }
4497 }
4498
4499 fn edits_from_lsp(
4500 &mut self,
4501 buffer: &ModelHandle<Buffer>,
4502 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4503 version: Option<i32>,
4504 cx: &mut ModelContext<Self>,
4505 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4506 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4507 cx.background().spawn(async move {
4508 let snapshot = snapshot?;
4509 let mut lsp_edits = lsp_edits
4510 .into_iter()
4511 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4512 .peekable();
4513
4514 let mut edits = Vec::new();
4515 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4516 // Combine any LSP edits that are adjacent.
4517 //
4518 // Also, combine LSP edits that are separated from each other by only
4519 // a newline. This is important because for some code actions,
4520 // Rust-analyzer rewrites the entire buffer via a series of edits that
4521 // are separated by unchanged newline characters.
4522 //
4523 // In order for the diffing logic below to work properly, any edits that
4524 // cancel each other out must be combined into one.
4525 while let Some((next_range, next_text)) = lsp_edits.peek() {
4526 if next_range.start > range.end {
4527 if next_range.start.row > range.end.row + 1
4528 || next_range.start.column > 0
4529 || snapshot.clip_point_utf16(
4530 PointUtf16::new(range.end.row, u32::MAX),
4531 Bias::Left,
4532 ) > range.end
4533 {
4534 break;
4535 }
4536 new_text.push('\n');
4537 }
4538 range.end = next_range.end;
4539 new_text.push_str(&next_text);
4540 lsp_edits.next();
4541 }
4542
4543 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4544 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4545 {
4546 return Err(anyhow!("invalid edits received from language server"));
4547 }
4548
4549 // For multiline edits, perform a diff of the old and new text so that
4550 // we can identify the changes more precisely, preserving the locations
4551 // of any anchors positioned in the unchanged regions.
4552 if range.end.row > range.start.row {
4553 let mut offset = range.start.to_offset(&snapshot);
4554 let old_text = snapshot.text_for_range(range).collect::<String>();
4555
4556 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4557 let mut moved_since_edit = true;
4558 for change in diff.iter_all_changes() {
4559 let tag = change.tag();
4560 let value = change.value();
4561 match tag {
4562 ChangeTag::Equal => {
4563 offset += value.len();
4564 moved_since_edit = true;
4565 }
4566 ChangeTag::Delete => {
4567 let start = snapshot.anchor_after(offset);
4568 let end = snapshot.anchor_before(offset + value.len());
4569 if moved_since_edit {
4570 edits.push((start..end, String::new()));
4571 } else {
4572 edits.last_mut().unwrap().0.end = end;
4573 }
4574 offset += value.len();
4575 moved_since_edit = false;
4576 }
4577 ChangeTag::Insert => {
4578 if moved_since_edit {
4579 let anchor = snapshot.anchor_after(offset);
4580 edits.push((anchor.clone()..anchor, value.to_string()));
4581 } else {
4582 edits.last_mut().unwrap().1.push_str(value);
4583 }
4584 moved_since_edit = false;
4585 }
4586 }
4587 }
4588 } else if range.end == range.start {
4589 let anchor = snapshot.anchor_after(range.start);
4590 edits.push((anchor.clone()..anchor, new_text));
4591 } else {
4592 let edit_start = snapshot.anchor_after(range.start);
4593 let edit_end = snapshot.anchor_before(range.end);
4594 edits.push((edit_start..edit_end, new_text));
4595 }
4596 }
4597
4598 Ok(edits)
4599 })
4600 }
4601
4602 fn buffer_snapshot_for_lsp_version(
4603 &mut self,
4604 buffer: &ModelHandle<Buffer>,
4605 version: Option<i32>,
4606 cx: &AppContext,
4607 ) -> Result<TextBufferSnapshot> {
4608 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4609
4610 if let Some(version) = version {
4611 let buffer_id = buffer.read(cx).remote_id();
4612 let snapshots = self
4613 .buffer_snapshots
4614 .get_mut(&buffer_id)
4615 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4616 let mut found_snapshot = None;
4617 snapshots.retain(|(snapshot_version, snapshot)| {
4618 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4619 false
4620 } else {
4621 if *snapshot_version == version {
4622 found_snapshot = Some(snapshot.clone());
4623 }
4624 true
4625 }
4626 });
4627
4628 found_snapshot.ok_or_else(|| {
4629 anyhow!(
4630 "snapshot not found for buffer {} at version {}",
4631 buffer_id,
4632 version
4633 )
4634 })
4635 } else {
4636 Ok((buffer.read(cx)).text_snapshot())
4637 }
4638 }
4639
4640 fn language_server_for_buffer(
4641 &self,
4642 buffer: &Buffer,
4643 cx: &AppContext,
4644 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4645 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4646 let worktree_id = file.worktree_id(cx);
4647 self.language_servers
4648 .get(&(worktree_id, language.lsp_adapter()?.name()))
4649 } else {
4650 None
4651 }
4652 }
4653}
4654
4655impl WorktreeHandle {
4656 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4657 match self {
4658 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4659 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4660 }
4661 }
4662}
4663
4664impl OpenBuffer {
4665 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4666 match self {
4667 OpenBuffer::Strong(handle) => Some(handle.clone()),
4668 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4669 OpenBuffer::Loading(_) => None,
4670 }
4671 }
4672}
4673
4674struct CandidateSet {
4675 snapshot: Snapshot,
4676 include_ignored: bool,
4677 include_root_name: bool,
4678}
4679
4680impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4681 type Candidates = CandidateSetIter<'a>;
4682
4683 fn id(&self) -> usize {
4684 self.snapshot.id().to_usize()
4685 }
4686
4687 fn len(&self) -> usize {
4688 if self.include_ignored {
4689 self.snapshot.file_count()
4690 } else {
4691 self.snapshot.visible_file_count()
4692 }
4693 }
4694
4695 fn prefix(&self) -> Arc<str> {
4696 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4697 self.snapshot.root_name().into()
4698 } else if self.include_root_name {
4699 format!("{}/", self.snapshot.root_name()).into()
4700 } else {
4701 "".into()
4702 }
4703 }
4704
4705 fn candidates(&'a self, start: usize) -> Self::Candidates {
4706 CandidateSetIter {
4707 traversal: self.snapshot.files(self.include_ignored, start),
4708 }
4709 }
4710}
4711
4712struct CandidateSetIter<'a> {
4713 traversal: Traversal<'a>,
4714}
4715
4716impl<'a> Iterator for CandidateSetIter<'a> {
4717 type Item = PathMatchCandidate<'a>;
4718
4719 fn next(&mut self) -> Option<Self::Item> {
4720 self.traversal.next().map(|entry| {
4721 if let EntryKind::File(char_bag) = entry.kind {
4722 PathMatchCandidate {
4723 path: &entry.path,
4724 char_bag,
4725 }
4726 } else {
4727 unreachable!()
4728 }
4729 })
4730 }
4731}
4732
4733impl Entity for Project {
4734 type Event = Event;
4735
4736 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4737 match &self.client_state {
4738 ProjectClientState::Local { remote_id_rx, .. } => {
4739 if let Some(project_id) = *remote_id_rx.borrow() {
4740 self.client
4741 .send(proto::UnregisterProject { project_id })
4742 .log_err();
4743 }
4744 }
4745 ProjectClientState::Remote { remote_id, .. } => {
4746 self.client
4747 .send(proto::LeaveProject {
4748 project_id: *remote_id,
4749 })
4750 .log_err();
4751 }
4752 }
4753 }
4754
4755 fn app_will_quit(
4756 &mut self,
4757 _: &mut MutableAppContext,
4758 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4759 let shutdown_futures = self
4760 .language_servers
4761 .drain()
4762 .filter_map(|(_, (_, server))| server.shutdown())
4763 .collect::<Vec<_>>();
4764 Some(
4765 async move {
4766 futures::future::join_all(shutdown_futures).await;
4767 }
4768 .boxed(),
4769 )
4770 }
4771}
4772
4773impl Collaborator {
4774 fn from_proto(
4775 message: proto::Collaborator,
4776 user_store: &ModelHandle<UserStore>,
4777 cx: &mut AsyncAppContext,
4778 ) -> impl Future<Output = Result<Self>> {
4779 let user = user_store.update(cx, |user_store, cx| {
4780 user_store.fetch_user(message.user_id, cx)
4781 });
4782
4783 async move {
4784 Ok(Self {
4785 peer_id: PeerId(message.peer_id),
4786 user: user.await?,
4787 replica_id: message.replica_id as ReplicaId,
4788 })
4789 }
4790 }
4791}
4792
4793impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4794 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4795 Self {
4796 worktree_id,
4797 path: path.as_ref().into(),
4798 }
4799 }
4800}
4801
4802impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4803 fn from(options: lsp::CreateFileOptions) -> Self {
4804 Self {
4805 overwrite: options.overwrite.unwrap_or(false),
4806 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4807 }
4808 }
4809}
4810
4811impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4812 fn from(options: lsp::RenameFileOptions) -> Self {
4813 Self {
4814 overwrite: options.overwrite.unwrap_or(false),
4815 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4816 }
4817 }
4818}
4819
4820impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4821 fn from(options: lsp::DeleteFileOptions) -> Self {
4822 Self {
4823 recursive: options.recursive.unwrap_or(false),
4824 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4825 }
4826 }
4827}
4828
4829fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4830 proto::Symbol {
4831 source_worktree_id: symbol.source_worktree_id.to_proto(),
4832 worktree_id: symbol.worktree_id.to_proto(),
4833 language_server_name: symbol.language_server_name.0.to_string(),
4834 name: symbol.name.clone(),
4835 kind: unsafe { mem::transmute(symbol.kind) },
4836 path: symbol.path.to_string_lossy().to_string(),
4837 start: Some(proto::Point {
4838 row: symbol.range.start.row,
4839 column: symbol.range.start.column,
4840 }),
4841 end: Some(proto::Point {
4842 row: symbol.range.end.row,
4843 column: symbol.range.end.column,
4844 }),
4845 signature: symbol.signature.to_vec(),
4846 }
4847}
4848
4849fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4850 let mut path_components = path.components();
4851 let mut base_components = base.components();
4852 let mut components: Vec<Component> = Vec::new();
4853 loop {
4854 match (path_components.next(), base_components.next()) {
4855 (None, None) => break,
4856 (Some(a), None) => {
4857 components.push(a);
4858 components.extend(path_components.by_ref());
4859 break;
4860 }
4861 (None, _) => components.push(Component::ParentDir),
4862 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4863 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4864 (Some(a), Some(_)) => {
4865 components.push(Component::ParentDir);
4866 for _ in base_components {
4867 components.push(Component::ParentDir);
4868 }
4869 components.push(a);
4870 components.extend(path_components.by_ref());
4871 break;
4872 }
4873 }
4874 }
4875 components.iter().map(|c| c.as_os_str()).collect()
4876}
4877
4878impl Item for Buffer {
4879 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4880 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4881 }
4882}
4883
4884#[cfg(test)]
4885mod tests {
4886 use super::{Event, *};
4887 use fs::RealFs;
4888 use futures::{future, StreamExt};
4889 use gpui::test::subscribe;
4890 use language::{
4891 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4892 ToPoint,
4893 };
4894 use lsp::Url;
4895 use serde_json::json;
4896 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
4897 use unindent::Unindent as _;
4898 use util::{assert_set_eq, test::temp_tree};
4899 use worktree::WorktreeHandle as _;
4900
4901 #[gpui::test]
4902 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4903 let dir = temp_tree(json!({
4904 "root": {
4905 "apple": "",
4906 "banana": {
4907 "carrot": {
4908 "date": "",
4909 "endive": "",
4910 }
4911 },
4912 "fennel": {
4913 "grape": "",
4914 }
4915 }
4916 }));
4917
4918 let root_link_path = dir.path().join("root_link");
4919 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4920 unix::fs::symlink(
4921 &dir.path().join("root/fennel"),
4922 &dir.path().join("root/finnochio"),
4923 )
4924 .unwrap();
4925
4926 let project = Project::test(Arc::new(RealFs), cx);
4927
4928 let (tree, _) = project
4929 .update(cx, |project, cx| {
4930 project.find_or_create_local_worktree(&root_link_path, true, cx)
4931 })
4932 .await
4933 .unwrap();
4934
4935 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4936 .await;
4937 cx.read(|cx| {
4938 let tree = tree.read(cx);
4939 assert_eq!(tree.file_count(), 5);
4940 assert_eq!(
4941 tree.inode_for_path("fennel/grape"),
4942 tree.inode_for_path("finnochio/grape")
4943 );
4944 });
4945
4946 let cancel_flag = Default::default();
4947 let results = project
4948 .read_with(cx, |project, cx| {
4949 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4950 })
4951 .await;
4952 assert_eq!(
4953 results
4954 .into_iter()
4955 .map(|result| result.path)
4956 .collect::<Vec<Arc<Path>>>(),
4957 vec![
4958 PathBuf::from("banana/carrot/date").into(),
4959 PathBuf::from("banana/carrot/endive").into(),
4960 ]
4961 );
4962 }
4963
4964 #[gpui::test]
4965 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4966 cx.foreground().forbid_parking();
4967
4968 let mut rust_language = Language::new(
4969 LanguageConfig {
4970 name: "Rust".into(),
4971 path_suffixes: vec!["rs".to_string()],
4972 ..Default::default()
4973 },
4974 Some(tree_sitter_rust::language()),
4975 );
4976 let mut json_language = Language::new(
4977 LanguageConfig {
4978 name: "JSON".into(),
4979 path_suffixes: vec!["json".to_string()],
4980 ..Default::default()
4981 },
4982 None,
4983 );
4984 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4985 name: "the-rust-language-server",
4986 capabilities: lsp::ServerCapabilities {
4987 completion_provider: Some(lsp::CompletionOptions {
4988 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4989 ..Default::default()
4990 }),
4991 ..Default::default()
4992 },
4993 ..Default::default()
4994 });
4995 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4996 name: "the-json-language-server",
4997 capabilities: lsp::ServerCapabilities {
4998 completion_provider: Some(lsp::CompletionOptions {
4999 trigger_characters: Some(vec![":".to_string()]),
5000 ..Default::default()
5001 }),
5002 ..Default::default()
5003 },
5004 ..Default::default()
5005 });
5006
5007 let fs = FakeFs::new(cx.background());
5008 fs.insert_tree(
5009 "/the-root",
5010 json!({
5011 "test.rs": "const A: i32 = 1;",
5012 "test2.rs": "",
5013 "Cargo.toml": "a = 1",
5014 "package.json": "{\"a\": 1}",
5015 }),
5016 )
5017 .await;
5018
5019 let project = Project::test(fs.clone(), cx);
5020 project.update(cx, |project, _| {
5021 project.languages.add(Arc::new(rust_language));
5022 project.languages.add(Arc::new(json_language));
5023 });
5024
5025 let worktree_id = project
5026 .update(cx, |project, cx| {
5027 project.find_or_create_local_worktree("/the-root", true, cx)
5028 })
5029 .await
5030 .unwrap()
5031 .0
5032 .read_with(cx, |tree, _| tree.id());
5033
5034 // Open a buffer without an associated language server.
5035 let toml_buffer = project
5036 .update(cx, |project, cx| {
5037 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5038 })
5039 .await
5040 .unwrap();
5041
5042 // Open a buffer with an associated language server.
5043 let rust_buffer = project
5044 .update(cx, |project, cx| {
5045 project.open_buffer((worktree_id, "test.rs"), cx)
5046 })
5047 .await
5048 .unwrap();
5049
5050 // A server is started up, and it is notified about Rust files.
5051 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5052 assert_eq!(
5053 fake_rust_server
5054 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5055 .await
5056 .text_document,
5057 lsp::TextDocumentItem {
5058 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5059 version: 0,
5060 text: "const A: i32 = 1;".to_string(),
5061 language_id: Default::default()
5062 }
5063 );
5064
5065 // The buffer is configured based on the language server's capabilities.
5066 rust_buffer.read_with(cx, |buffer, _| {
5067 assert_eq!(
5068 buffer.completion_triggers(),
5069 &[".".to_string(), "::".to_string()]
5070 );
5071 });
5072 toml_buffer.read_with(cx, |buffer, _| {
5073 assert!(buffer.completion_triggers().is_empty());
5074 });
5075
5076 // Edit a buffer. The changes are reported to the language server.
5077 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5078 assert_eq!(
5079 fake_rust_server
5080 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5081 .await
5082 .text_document,
5083 lsp::VersionedTextDocumentIdentifier::new(
5084 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5085 1
5086 )
5087 );
5088
5089 // Open a third buffer with a different associated language server.
5090 let json_buffer = project
5091 .update(cx, |project, cx| {
5092 project.open_buffer((worktree_id, "package.json"), cx)
5093 })
5094 .await
5095 .unwrap();
5096
5097 // A json language server is started up and is only notified about the json buffer.
5098 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5099 assert_eq!(
5100 fake_json_server
5101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5102 .await
5103 .text_document,
5104 lsp::TextDocumentItem {
5105 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5106 version: 0,
5107 text: "{\"a\": 1}".to_string(),
5108 language_id: Default::default()
5109 }
5110 );
5111
5112 // This buffer is configured based on the second language server's
5113 // capabilities.
5114 json_buffer.read_with(cx, |buffer, _| {
5115 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5116 });
5117
5118 // When opening another buffer whose language server is already running,
5119 // it is also configured based on the existing language server's capabilities.
5120 let rust_buffer2 = project
5121 .update(cx, |project, cx| {
5122 project.open_buffer((worktree_id, "test2.rs"), cx)
5123 })
5124 .await
5125 .unwrap();
5126 rust_buffer2.read_with(cx, |buffer, _| {
5127 assert_eq!(
5128 buffer.completion_triggers(),
5129 &[".".to_string(), "::".to_string()]
5130 );
5131 });
5132
5133 // Changes are reported only to servers matching the buffer's language.
5134 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5135 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5136 assert_eq!(
5137 fake_rust_server
5138 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5139 .await
5140 .text_document,
5141 lsp::VersionedTextDocumentIdentifier::new(
5142 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5143 1
5144 )
5145 );
5146
5147 // Save notifications are reported to all servers.
5148 toml_buffer
5149 .update(cx, |buffer, cx| buffer.save(cx))
5150 .await
5151 .unwrap();
5152 assert_eq!(
5153 fake_rust_server
5154 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5155 .await
5156 .text_document,
5157 lsp::TextDocumentIdentifier::new(
5158 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5159 )
5160 );
5161 assert_eq!(
5162 fake_json_server
5163 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5164 .await
5165 .text_document,
5166 lsp::TextDocumentIdentifier::new(
5167 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5168 )
5169 );
5170
5171 // Renames are reported only to servers matching the buffer's language.
5172 fs.rename(
5173 Path::new("/the-root/test2.rs"),
5174 Path::new("/the-root/test3.rs"),
5175 Default::default(),
5176 )
5177 .await
5178 .unwrap();
5179 assert_eq!(
5180 fake_rust_server
5181 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5182 .await
5183 .text_document,
5184 lsp::TextDocumentIdentifier::new(
5185 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5186 ),
5187 );
5188 assert_eq!(
5189 fake_rust_server
5190 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5191 .await
5192 .text_document,
5193 lsp::TextDocumentItem {
5194 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5195 version: 0,
5196 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5197 language_id: Default::default()
5198 },
5199 );
5200
5201 rust_buffer2.update(cx, |buffer, cx| {
5202 buffer.update_diagnostics(
5203 DiagnosticSet::from_sorted_entries(
5204 vec![DiagnosticEntry {
5205 diagnostic: Default::default(),
5206 range: Anchor::MIN..Anchor::MAX,
5207 }],
5208 &buffer.snapshot(),
5209 ),
5210 cx,
5211 );
5212 assert_eq!(
5213 buffer
5214 .snapshot()
5215 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5216 .count(),
5217 1
5218 );
5219 });
5220
5221 // When the rename changes the extension of the file, the buffer gets closed on the old
5222 // language server and gets opened on the new one.
5223 fs.rename(
5224 Path::new("/the-root/test3.rs"),
5225 Path::new("/the-root/test3.json"),
5226 Default::default(),
5227 )
5228 .await
5229 .unwrap();
5230 assert_eq!(
5231 fake_rust_server
5232 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5233 .await
5234 .text_document,
5235 lsp::TextDocumentIdentifier::new(
5236 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5237 ),
5238 );
5239 assert_eq!(
5240 fake_json_server
5241 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5242 .await
5243 .text_document,
5244 lsp::TextDocumentItem {
5245 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5246 version: 0,
5247 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5248 language_id: Default::default()
5249 },
5250 );
5251 // We clear the diagnostics, since the language has changed.
5252 rust_buffer2.read_with(cx, |buffer, _| {
5253 assert_eq!(
5254 buffer
5255 .snapshot()
5256 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5257 .count(),
5258 0
5259 );
5260 });
5261
5262 // The renamed file's version resets after changing language server.
5263 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5264 assert_eq!(
5265 fake_json_server
5266 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5267 .await
5268 .text_document,
5269 lsp::VersionedTextDocumentIdentifier::new(
5270 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5271 1
5272 )
5273 );
5274
5275 // Restart language servers
5276 project.update(cx, |project, cx| {
5277 project.restart_language_servers_for_buffers(
5278 vec![rust_buffer.clone(), json_buffer.clone()],
5279 cx,
5280 );
5281 });
5282
5283 let mut rust_shutdown_requests = fake_rust_server
5284 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5285 let mut json_shutdown_requests = fake_json_server
5286 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5287 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5288
5289 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5290 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5291
5292 // Ensure rust document is reopened in new rust language server
5293 assert_eq!(
5294 fake_rust_server
5295 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5296 .await
5297 .text_document,
5298 lsp::TextDocumentItem {
5299 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5300 version: 1,
5301 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5302 language_id: Default::default()
5303 }
5304 );
5305
5306 // Ensure json documents are reopened in new json language server
5307 assert_set_eq!(
5308 [
5309 fake_json_server
5310 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5311 .await
5312 .text_document,
5313 fake_json_server
5314 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5315 .await
5316 .text_document,
5317 ],
5318 [
5319 lsp::TextDocumentItem {
5320 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5321 version: 0,
5322 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5323 language_id: Default::default()
5324 },
5325 lsp::TextDocumentItem {
5326 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5327 version: 1,
5328 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5329 language_id: Default::default()
5330 }
5331 ]
5332 );
5333
5334 // Close notifications are reported only to servers matching the buffer's language.
5335 cx.update(|_| drop(json_buffer));
5336 let close_message = lsp::DidCloseTextDocumentParams {
5337 text_document: lsp::TextDocumentIdentifier::new(
5338 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5339 ),
5340 };
5341 assert_eq!(
5342 fake_json_server
5343 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5344 .await,
5345 close_message,
5346 );
5347 }
5348
5349 #[gpui::test]
5350 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5351 cx.foreground().forbid_parking();
5352
5353 let fs = FakeFs::new(cx.background());
5354 fs.insert_tree(
5355 "/dir",
5356 json!({
5357 "a.rs": "let a = 1;",
5358 "b.rs": "let b = 2;"
5359 }),
5360 )
5361 .await;
5362
5363 let project = Project::test(fs, cx);
5364 let worktree_a_id = project
5365 .update(cx, |project, cx| {
5366 project.find_or_create_local_worktree("/dir/a.rs", true, cx)
5367 })
5368 .await
5369 .unwrap()
5370 .0
5371 .read_with(cx, |tree, _| tree.id());
5372 let worktree_b_id = project
5373 .update(cx, |project, cx| {
5374 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5375 })
5376 .await
5377 .unwrap()
5378 .0
5379 .read_with(cx, |tree, _| tree.id());
5380
5381 let buffer_a = project
5382 .update(cx, |project, cx| {
5383 project.open_buffer((worktree_a_id, ""), cx)
5384 })
5385 .await
5386 .unwrap();
5387 let buffer_b = project
5388 .update(cx, |project, cx| {
5389 project.open_buffer((worktree_b_id, ""), cx)
5390 })
5391 .await
5392 .unwrap();
5393
5394 project.update(cx, |project, cx| {
5395 project
5396 .update_diagnostics(
5397 lsp::PublishDiagnosticsParams {
5398 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5399 version: None,
5400 diagnostics: vec![lsp::Diagnostic {
5401 range: lsp::Range::new(
5402 lsp::Position::new(0, 4),
5403 lsp::Position::new(0, 5),
5404 ),
5405 severity: Some(lsp::DiagnosticSeverity::ERROR),
5406 message: "error 1".to_string(),
5407 ..Default::default()
5408 }],
5409 },
5410 &[],
5411 cx,
5412 )
5413 .unwrap();
5414 project
5415 .update_diagnostics(
5416 lsp::PublishDiagnosticsParams {
5417 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5418 version: None,
5419 diagnostics: vec![lsp::Diagnostic {
5420 range: lsp::Range::new(
5421 lsp::Position::new(0, 4),
5422 lsp::Position::new(0, 5),
5423 ),
5424 severity: Some(lsp::DiagnosticSeverity::WARNING),
5425 message: "error 2".to_string(),
5426 ..Default::default()
5427 }],
5428 },
5429 &[],
5430 cx,
5431 )
5432 .unwrap();
5433 });
5434
5435 buffer_a.read_with(cx, |buffer, _| {
5436 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5437 assert_eq!(
5438 chunks
5439 .iter()
5440 .map(|(s, d)| (s.as_str(), *d))
5441 .collect::<Vec<_>>(),
5442 &[
5443 ("let ", None),
5444 ("a", Some(DiagnosticSeverity::ERROR)),
5445 (" = 1;", None),
5446 ]
5447 );
5448 });
5449 buffer_b.read_with(cx, |buffer, _| {
5450 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5451 assert_eq!(
5452 chunks
5453 .iter()
5454 .map(|(s, d)| (s.as_str(), *d))
5455 .collect::<Vec<_>>(),
5456 &[
5457 ("let ", None),
5458 ("b", Some(DiagnosticSeverity::WARNING)),
5459 (" = 2;", None),
5460 ]
5461 );
5462 });
5463 }
5464
5465 #[gpui::test]
5466 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5467 cx.foreground().forbid_parking();
5468
5469 let progress_token = "the-progress-token";
5470 let mut language = Language::new(
5471 LanguageConfig {
5472 name: "Rust".into(),
5473 path_suffixes: vec!["rs".to_string()],
5474 ..Default::default()
5475 },
5476 Some(tree_sitter_rust::language()),
5477 );
5478 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5479 disk_based_diagnostics_progress_token: Some(progress_token),
5480 disk_based_diagnostics_sources: &["disk"],
5481 ..Default::default()
5482 });
5483
5484 let fs = FakeFs::new(cx.background());
5485 fs.insert_tree(
5486 "/dir",
5487 json!({
5488 "a.rs": "fn a() { A }",
5489 "b.rs": "const y: i32 = 1",
5490 }),
5491 )
5492 .await;
5493
5494 let project = Project::test(fs, cx);
5495 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5496
5497 let (tree, _) = project
5498 .update(cx, |project, cx| {
5499 project.find_or_create_local_worktree("/dir", true, cx)
5500 })
5501 .await
5502 .unwrap();
5503 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5504
5505 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5506 .await;
5507
5508 // Cause worktree to start the fake language server
5509 let _buffer = project
5510 .update(cx, |project, cx| {
5511 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5512 })
5513 .await
5514 .unwrap();
5515
5516 let mut events = subscribe(&project, cx);
5517
5518 let mut fake_server = fake_servers.next().await.unwrap();
5519 fake_server.start_progress(progress_token).await;
5520 assert_eq!(
5521 events.next().await.unwrap(),
5522 Event::DiskBasedDiagnosticsStarted
5523 );
5524
5525 fake_server.start_progress(progress_token).await;
5526 fake_server.end_progress(progress_token).await;
5527 fake_server.start_progress(progress_token).await;
5528
5529 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5530 lsp::PublishDiagnosticsParams {
5531 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5532 version: None,
5533 diagnostics: vec![lsp::Diagnostic {
5534 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5535 severity: Some(lsp::DiagnosticSeverity::ERROR),
5536 message: "undefined variable 'A'".to_string(),
5537 ..Default::default()
5538 }],
5539 },
5540 );
5541 assert_eq!(
5542 events.next().await.unwrap(),
5543 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5544 );
5545
5546 fake_server.end_progress(progress_token).await;
5547 fake_server.end_progress(progress_token).await;
5548 assert_eq!(
5549 events.next().await.unwrap(),
5550 Event::DiskBasedDiagnosticsUpdated
5551 );
5552 assert_eq!(
5553 events.next().await.unwrap(),
5554 Event::DiskBasedDiagnosticsFinished
5555 );
5556
5557 let buffer = project
5558 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5559 .await
5560 .unwrap();
5561
5562 buffer.read_with(cx, |buffer, _| {
5563 let snapshot = buffer.snapshot();
5564 let diagnostics = snapshot
5565 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5566 .collect::<Vec<_>>();
5567 assert_eq!(
5568 diagnostics,
5569 &[DiagnosticEntry {
5570 range: Point::new(0, 9)..Point::new(0, 10),
5571 diagnostic: Diagnostic {
5572 severity: lsp::DiagnosticSeverity::ERROR,
5573 message: "undefined variable 'A'".to_string(),
5574 group_id: 0,
5575 is_primary: true,
5576 ..Default::default()
5577 }
5578 }]
5579 )
5580 });
5581
5582 // Ensure publishing empty diagnostics twice only results in one update event.
5583 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5584 lsp::PublishDiagnosticsParams {
5585 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5586 version: None,
5587 diagnostics: Default::default(),
5588 },
5589 );
5590 assert_eq!(
5591 events.next().await.unwrap(),
5592 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5593 );
5594
5595 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5596 lsp::PublishDiagnosticsParams {
5597 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5598 version: None,
5599 diagnostics: Default::default(),
5600 },
5601 );
5602 cx.foreground().run_until_parked();
5603 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5604 }
5605
5606 #[gpui::test]
5607 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5608 cx.foreground().forbid_parking();
5609
5610 let progress_token = "the-progress-token";
5611 let mut language = Language::new(
5612 LanguageConfig {
5613 path_suffixes: vec!["rs".to_string()],
5614 ..Default::default()
5615 },
5616 None,
5617 );
5618 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5619 disk_based_diagnostics_sources: &["disk"],
5620 disk_based_diagnostics_progress_token: Some(progress_token),
5621 ..Default::default()
5622 });
5623
5624 let fs = FakeFs::new(cx.background());
5625 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5626
5627 let project = Project::test(fs, cx);
5628 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5629
5630 let worktree_id = project
5631 .update(cx, |project, cx| {
5632 project.find_or_create_local_worktree("/dir", true, cx)
5633 })
5634 .await
5635 .unwrap()
5636 .0
5637 .read_with(cx, |tree, _| tree.id());
5638
5639 let buffer = project
5640 .update(cx, |project, cx| {
5641 project.open_buffer((worktree_id, "a.rs"), cx)
5642 })
5643 .await
5644 .unwrap();
5645
5646 // Simulate diagnostics starting to update.
5647 let mut fake_server = fake_servers.next().await.unwrap();
5648 fake_server.start_progress(progress_token).await;
5649
5650 // Restart the server before the diagnostics finish updating.
5651 project.update(cx, |project, cx| {
5652 project.restart_language_servers_for_buffers([buffer], cx);
5653 });
5654 let mut events = subscribe(&project, cx);
5655
5656 // Simulate the newly started server sending more diagnostics.
5657 let mut fake_server = fake_servers.next().await.unwrap();
5658 fake_server.start_progress(progress_token).await;
5659 assert_eq!(
5660 events.next().await.unwrap(),
5661 Event::DiskBasedDiagnosticsStarted
5662 );
5663
5664 // All diagnostics are considered done, despite the old server's diagnostic
5665 // task never completing.
5666 fake_server.end_progress(progress_token).await;
5667 assert_eq!(
5668 events.next().await.unwrap(),
5669 Event::DiskBasedDiagnosticsUpdated
5670 );
5671 assert_eq!(
5672 events.next().await.unwrap(),
5673 Event::DiskBasedDiagnosticsFinished
5674 );
5675 project.read_with(cx, |project, _| {
5676 assert!(!project.is_running_disk_based_diagnostics());
5677 });
5678 }
5679
5680 #[gpui::test]
5681 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5682 cx.foreground().forbid_parking();
5683
5684 let mut language = Language::new(
5685 LanguageConfig {
5686 name: "Rust".into(),
5687 path_suffixes: vec!["rs".to_string()],
5688 ..Default::default()
5689 },
5690 Some(tree_sitter_rust::language()),
5691 );
5692 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5693 disk_based_diagnostics_sources: &["disk"],
5694 ..Default::default()
5695 });
5696
5697 let text = "
5698 fn a() { A }
5699 fn b() { BB }
5700 fn c() { CCC }
5701 "
5702 .unindent();
5703
5704 let fs = FakeFs::new(cx.background());
5705 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5706
5707 let project = Project::test(fs, cx);
5708 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5709
5710 let worktree_id = project
5711 .update(cx, |project, cx| {
5712 project.find_or_create_local_worktree("/dir", true, cx)
5713 })
5714 .await
5715 .unwrap()
5716 .0
5717 .read_with(cx, |tree, _| tree.id());
5718
5719 let buffer = project
5720 .update(cx, |project, cx| {
5721 project.open_buffer((worktree_id, "a.rs"), cx)
5722 })
5723 .await
5724 .unwrap();
5725
5726 let mut fake_server = fake_servers.next().await.unwrap();
5727 let open_notification = fake_server
5728 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5729 .await;
5730
5731 // Edit the buffer, moving the content down
5732 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5733 let change_notification_1 = fake_server
5734 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5735 .await;
5736 assert!(
5737 change_notification_1.text_document.version > open_notification.text_document.version
5738 );
5739
5740 // Report some diagnostics for the initial version of the buffer
5741 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5742 lsp::PublishDiagnosticsParams {
5743 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5744 version: Some(open_notification.text_document.version),
5745 diagnostics: vec![
5746 lsp::Diagnostic {
5747 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5748 severity: Some(DiagnosticSeverity::ERROR),
5749 message: "undefined variable 'A'".to_string(),
5750 source: Some("disk".to_string()),
5751 ..Default::default()
5752 },
5753 lsp::Diagnostic {
5754 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5755 severity: Some(DiagnosticSeverity::ERROR),
5756 message: "undefined variable 'BB'".to_string(),
5757 source: Some("disk".to_string()),
5758 ..Default::default()
5759 },
5760 lsp::Diagnostic {
5761 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5762 severity: Some(DiagnosticSeverity::ERROR),
5763 source: Some("disk".to_string()),
5764 message: "undefined variable 'CCC'".to_string(),
5765 ..Default::default()
5766 },
5767 ],
5768 },
5769 );
5770
5771 // The diagnostics have moved down since they were created.
5772 buffer.next_notification(cx).await;
5773 buffer.read_with(cx, |buffer, _| {
5774 assert_eq!(
5775 buffer
5776 .snapshot()
5777 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5778 .collect::<Vec<_>>(),
5779 &[
5780 DiagnosticEntry {
5781 range: Point::new(3, 9)..Point::new(3, 11),
5782 diagnostic: Diagnostic {
5783 severity: DiagnosticSeverity::ERROR,
5784 message: "undefined variable 'BB'".to_string(),
5785 is_disk_based: true,
5786 group_id: 1,
5787 is_primary: true,
5788 ..Default::default()
5789 },
5790 },
5791 DiagnosticEntry {
5792 range: Point::new(4, 9)..Point::new(4, 12),
5793 diagnostic: Diagnostic {
5794 severity: DiagnosticSeverity::ERROR,
5795 message: "undefined variable 'CCC'".to_string(),
5796 is_disk_based: true,
5797 group_id: 2,
5798 is_primary: true,
5799 ..Default::default()
5800 }
5801 }
5802 ]
5803 );
5804 assert_eq!(
5805 chunks_with_diagnostics(buffer, 0..buffer.len()),
5806 [
5807 ("\n\nfn a() { ".to_string(), None),
5808 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5809 (" }\nfn b() { ".to_string(), None),
5810 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5811 (" }\nfn c() { ".to_string(), None),
5812 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5813 (" }\n".to_string(), None),
5814 ]
5815 );
5816 assert_eq!(
5817 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5818 [
5819 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5820 (" }\nfn c() { ".to_string(), None),
5821 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5822 ]
5823 );
5824 });
5825
5826 // Ensure overlapping diagnostics are highlighted correctly.
5827 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5828 lsp::PublishDiagnosticsParams {
5829 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5830 version: Some(open_notification.text_document.version),
5831 diagnostics: vec![
5832 lsp::Diagnostic {
5833 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5834 severity: Some(DiagnosticSeverity::ERROR),
5835 message: "undefined variable 'A'".to_string(),
5836 source: Some("disk".to_string()),
5837 ..Default::default()
5838 },
5839 lsp::Diagnostic {
5840 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5841 severity: Some(DiagnosticSeverity::WARNING),
5842 message: "unreachable statement".to_string(),
5843 source: Some("disk".to_string()),
5844 ..Default::default()
5845 },
5846 ],
5847 },
5848 );
5849
5850 buffer.next_notification(cx).await;
5851 buffer.read_with(cx, |buffer, _| {
5852 assert_eq!(
5853 buffer
5854 .snapshot()
5855 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5856 .collect::<Vec<_>>(),
5857 &[
5858 DiagnosticEntry {
5859 range: Point::new(2, 9)..Point::new(2, 12),
5860 diagnostic: Diagnostic {
5861 severity: DiagnosticSeverity::WARNING,
5862 message: "unreachable statement".to_string(),
5863 is_disk_based: true,
5864 group_id: 1,
5865 is_primary: true,
5866 ..Default::default()
5867 }
5868 },
5869 DiagnosticEntry {
5870 range: Point::new(2, 9)..Point::new(2, 10),
5871 diagnostic: Diagnostic {
5872 severity: DiagnosticSeverity::ERROR,
5873 message: "undefined variable 'A'".to_string(),
5874 is_disk_based: true,
5875 group_id: 0,
5876 is_primary: true,
5877 ..Default::default()
5878 },
5879 }
5880 ]
5881 );
5882 assert_eq!(
5883 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5884 [
5885 ("fn a() { ".to_string(), None),
5886 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5887 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5888 ("\n".to_string(), None),
5889 ]
5890 );
5891 assert_eq!(
5892 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5893 [
5894 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5895 ("\n".to_string(), None),
5896 ]
5897 );
5898 });
5899
5900 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5901 // changes since the last save.
5902 buffer.update(cx, |buffer, cx| {
5903 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5904 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5905 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5906 });
5907 let change_notification_2 = fake_server
5908 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5909 .await;
5910 assert!(
5911 change_notification_2.text_document.version
5912 > change_notification_1.text_document.version
5913 );
5914
5915 // Handle out-of-order diagnostics
5916 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5917 lsp::PublishDiagnosticsParams {
5918 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5919 version: Some(change_notification_2.text_document.version),
5920 diagnostics: vec![
5921 lsp::Diagnostic {
5922 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5923 severity: Some(DiagnosticSeverity::ERROR),
5924 message: "undefined variable 'BB'".to_string(),
5925 source: Some("disk".to_string()),
5926 ..Default::default()
5927 },
5928 lsp::Diagnostic {
5929 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5930 severity: Some(DiagnosticSeverity::WARNING),
5931 message: "undefined variable 'A'".to_string(),
5932 source: Some("disk".to_string()),
5933 ..Default::default()
5934 },
5935 ],
5936 },
5937 );
5938
5939 buffer.next_notification(cx).await;
5940 buffer.read_with(cx, |buffer, _| {
5941 assert_eq!(
5942 buffer
5943 .snapshot()
5944 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5945 .collect::<Vec<_>>(),
5946 &[
5947 DiagnosticEntry {
5948 range: Point::new(2, 21)..Point::new(2, 22),
5949 diagnostic: Diagnostic {
5950 severity: DiagnosticSeverity::WARNING,
5951 message: "undefined variable 'A'".to_string(),
5952 is_disk_based: true,
5953 group_id: 1,
5954 is_primary: true,
5955 ..Default::default()
5956 }
5957 },
5958 DiagnosticEntry {
5959 range: Point::new(3, 9)..Point::new(3, 14),
5960 diagnostic: Diagnostic {
5961 severity: DiagnosticSeverity::ERROR,
5962 message: "undefined variable 'BB'".to_string(),
5963 is_disk_based: true,
5964 group_id: 0,
5965 is_primary: true,
5966 ..Default::default()
5967 },
5968 }
5969 ]
5970 );
5971 });
5972 }
5973
5974 #[gpui::test]
5975 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5976 cx.foreground().forbid_parking();
5977
5978 let text = concat!(
5979 "let one = ;\n", //
5980 "let two = \n",
5981 "let three = 3;\n",
5982 );
5983
5984 let fs = FakeFs::new(cx.background());
5985 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5986
5987 let project = Project::test(fs, cx);
5988 let worktree_id = project
5989 .update(cx, |project, cx| {
5990 project.find_or_create_local_worktree("/dir", true, cx)
5991 })
5992 .await
5993 .unwrap()
5994 .0
5995 .read_with(cx, |tree, _| tree.id());
5996
5997 let buffer = project
5998 .update(cx, |project, cx| {
5999 project.open_buffer((worktree_id, "a.rs"), cx)
6000 })
6001 .await
6002 .unwrap();
6003
6004 project.update(cx, |project, cx| {
6005 project
6006 .update_buffer_diagnostics(
6007 &buffer,
6008 vec![
6009 DiagnosticEntry {
6010 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6011 diagnostic: Diagnostic {
6012 severity: DiagnosticSeverity::ERROR,
6013 message: "syntax error 1".to_string(),
6014 ..Default::default()
6015 },
6016 },
6017 DiagnosticEntry {
6018 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6019 diagnostic: Diagnostic {
6020 severity: DiagnosticSeverity::ERROR,
6021 message: "syntax error 2".to_string(),
6022 ..Default::default()
6023 },
6024 },
6025 ],
6026 None,
6027 cx,
6028 )
6029 .unwrap();
6030 });
6031
6032 // An empty range is extended forward to include the following character.
6033 // At the end of a line, an empty range is extended backward to include
6034 // the preceding character.
6035 buffer.read_with(cx, |buffer, _| {
6036 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6037 assert_eq!(
6038 chunks
6039 .iter()
6040 .map(|(s, d)| (s.as_str(), *d))
6041 .collect::<Vec<_>>(),
6042 &[
6043 ("let one = ", None),
6044 (";", Some(DiagnosticSeverity::ERROR)),
6045 ("\nlet two =", None),
6046 (" ", Some(DiagnosticSeverity::ERROR)),
6047 ("\nlet three = 3;\n", None)
6048 ]
6049 );
6050 });
6051 }
6052
6053 #[gpui::test]
6054 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6055 cx.foreground().forbid_parking();
6056
6057 let mut language = Language::new(
6058 LanguageConfig {
6059 name: "Rust".into(),
6060 path_suffixes: vec!["rs".to_string()],
6061 ..Default::default()
6062 },
6063 Some(tree_sitter_rust::language()),
6064 );
6065 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6066
6067 let text = "
6068 fn a() {
6069 f1();
6070 }
6071 fn b() {
6072 f2();
6073 }
6074 fn c() {
6075 f3();
6076 }
6077 "
6078 .unindent();
6079
6080 let fs = FakeFs::new(cx.background());
6081 fs.insert_tree(
6082 "/dir",
6083 json!({
6084 "a.rs": text.clone(),
6085 }),
6086 )
6087 .await;
6088
6089 let project = Project::test(fs, cx);
6090 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6091
6092 let worktree_id = project
6093 .update(cx, |project, cx| {
6094 project.find_or_create_local_worktree("/dir", true, cx)
6095 })
6096 .await
6097 .unwrap()
6098 .0
6099 .read_with(cx, |tree, _| tree.id());
6100
6101 let buffer = project
6102 .update(cx, |project, cx| {
6103 project.open_buffer((worktree_id, "a.rs"), cx)
6104 })
6105 .await
6106 .unwrap();
6107
6108 let mut fake_server = fake_servers.next().await.unwrap();
6109 let lsp_document_version = fake_server
6110 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6111 .await
6112 .text_document
6113 .version;
6114
6115 // Simulate editing the buffer after the language server computes some edits.
6116 buffer.update(cx, |buffer, cx| {
6117 buffer.edit(
6118 [Point::new(0, 0)..Point::new(0, 0)],
6119 "// above first function\n",
6120 cx,
6121 );
6122 buffer.edit(
6123 [Point::new(2, 0)..Point::new(2, 0)],
6124 " // inside first function\n",
6125 cx,
6126 );
6127 buffer.edit(
6128 [Point::new(6, 4)..Point::new(6, 4)],
6129 "// inside second function ",
6130 cx,
6131 );
6132
6133 assert_eq!(
6134 buffer.text(),
6135 "
6136 // above first function
6137 fn a() {
6138 // inside first function
6139 f1();
6140 }
6141 fn b() {
6142 // inside second function f2();
6143 }
6144 fn c() {
6145 f3();
6146 }
6147 "
6148 .unindent()
6149 );
6150 });
6151
6152 let edits = project
6153 .update(cx, |project, cx| {
6154 project.edits_from_lsp(
6155 &buffer,
6156 vec![
6157 // replace body of first function
6158 lsp::TextEdit {
6159 range: lsp::Range::new(
6160 lsp::Position::new(0, 0),
6161 lsp::Position::new(3, 0),
6162 ),
6163 new_text: "
6164 fn a() {
6165 f10();
6166 }
6167 "
6168 .unindent(),
6169 },
6170 // edit inside second function
6171 lsp::TextEdit {
6172 range: lsp::Range::new(
6173 lsp::Position::new(4, 6),
6174 lsp::Position::new(4, 6),
6175 ),
6176 new_text: "00".into(),
6177 },
6178 // edit inside third function via two distinct edits
6179 lsp::TextEdit {
6180 range: lsp::Range::new(
6181 lsp::Position::new(7, 5),
6182 lsp::Position::new(7, 5),
6183 ),
6184 new_text: "4000".into(),
6185 },
6186 lsp::TextEdit {
6187 range: lsp::Range::new(
6188 lsp::Position::new(7, 5),
6189 lsp::Position::new(7, 6),
6190 ),
6191 new_text: "".into(),
6192 },
6193 ],
6194 Some(lsp_document_version),
6195 cx,
6196 )
6197 })
6198 .await
6199 .unwrap();
6200
6201 buffer.update(cx, |buffer, cx| {
6202 for (range, new_text) in edits {
6203 buffer.edit([range], new_text, cx);
6204 }
6205 assert_eq!(
6206 buffer.text(),
6207 "
6208 // above first function
6209 fn a() {
6210 // inside first function
6211 f10();
6212 }
6213 fn b() {
6214 // inside second function f200();
6215 }
6216 fn c() {
6217 f4000();
6218 }
6219 "
6220 .unindent()
6221 );
6222 });
6223 }
6224
6225 #[gpui::test]
6226 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6227 cx.foreground().forbid_parking();
6228
6229 let text = "
6230 use a::b;
6231 use a::c;
6232
6233 fn f() {
6234 b();
6235 c();
6236 }
6237 "
6238 .unindent();
6239
6240 let fs = FakeFs::new(cx.background());
6241 fs.insert_tree(
6242 "/dir",
6243 json!({
6244 "a.rs": text.clone(),
6245 }),
6246 )
6247 .await;
6248
6249 let project = Project::test(fs, cx);
6250 let worktree_id = project
6251 .update(cx, |project, cx| {
6252 project.find_or_create_local_worktree("/dir", true, cx)
6253 })
6254 .await
6255 .unwrap()
6256 .0
6257 .read_with(cx, |tree, _| tree.id());
6258
6259 let buffer = project
6260 .update(cx, |project, cx| {
6261 project.open_buffer((worktree_id, "a.rs"), cx)
6262 })
6263 .await
6264 .unwrap();
6265
6266 // Simulate the language server sending us a small edit in the form of a very large diff.
6267 // Rust-analyzer does this when performing a merge-imports code action.
6268 let edits = project
6269 .update(cx, |project, cx| {
6270 project.edits_from_lsp(
6271 &buffer,
6272 [
6273 // Replace the first use statement without editing the semicolon.
6274 lsp::TextEdit {
6275 range: lsp::Range::new(
6276 lsp::Position::new(0, 4),
6277 lsp::Position::new(0, 8),
6278 ),
6279 new_text: "a::{b, c}".into(),
6280 },
6281 // Reinsert the remainder of the file between the semicolon and the final
6282 // newline of the file.
6283 lsp::TextEdit {
6284 range: lsp::Range::new(
6285 lsp::Position::new(0, 9),
6286 lsp::Position::new(0, 9),
6287 ),
6288 new_text: "\n\n".into(),
6289 },
6290 lsp::TextEdit {
6291 range: lsp::Range::new(
6292 lsp::Position::new(0, 9),
6293 lsp::Position::new(0, 9),
6294 ),
6295 new_text: "
6296 fn f() {
6297 b();
6298 c();
6299 }"
6300 .unindent(),
6301 },
6302 // Delete everything after the first newline of the file.
6303 lsp::TextEdit {
6304 range: lsp::Range::new(
6305 lsp::Position::new(1, 0),
6306 lsp::Position::new(7, 0),
6307 ),
6308 new_text: "".into(),
6309 },
6310 ],
6311 None,
6312 cx,
6313 )
6314 })
6315 .await
6316 .unwrap();
6317
6318 buffer.update(cx, |buffer, cx| {
6319 let edits = edits
6320 .into_iter()
6321 .map(|(range, text)| {
6322 (
6323 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6324 text,
6325 )
6326 })
6327 .collect::<Vec<_>>();
6328
6329 assert_eq!(
6330 edits,
6331 [
6332 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6333 (Point::new(1, 0)..Point::new(2, 0), "".into())
6334 ]
6335 );
6336
6337 for (range, new_text) in edits {
6338 buffer.edit([range], new_text, cx);
6339 }
6340 assert_eq!(
6341 buffer.text(),
6342 "
6343 use a::{b, c};
6344
6345 fn f() {
6346 b();
6347 c();
6348 }
6349 "
6350 .unindent()
6351 );
6352 });
6353 }
6354
6355 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6356 buffer: &Buffer,
6357 range: Range<T>,
6358 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6359 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6360 for chunk in buffer.snapshot().chunks(range, true) {
6361 if chunks.last().map_or(false, |prev_chunk| {
6362 prev_chunk.1 == chunk.diagnostic_severity
6363 }) {
6364 chunks.last_mut().unwrap().0.push_str(chunk.text);
6365 } else {
6366 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6367 }
6368 }
6369 chunks
6370 }
6371
6372 #[gpui::test]
6373 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6374 let dir = temp_tree(json!({
6375 "root": {
6376 "dir1": {},
6377 "dir2": {
6378 "dir3": {}
6379 }
6380 }
6381 }));
6382
6383 let project = Project::test(Arc::new(RealFs), cx);
6384 let (tree, _) = project
6385 .update(cx, |project, cx| {
6386 project.find_or_create_local_worktree(&dir.path(), true, cx)
6387 })
6388 .await
6389 .unwrap();
6390
6391 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6392 .await;
6393
6394 let cancel_flag = Default::default();
6395 let results = project
6396 .read_with(cx, |project, cx| {
6397 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6398 })
6399 .await;
6400
6401 assert!(results.is_empty());
6402 }
6403
6404 #[gpui::test]
6405 async fn test_definition(cx: &mut gpui::TestAppContext) {
6406 let mut language = Language::new(
6407 LanguageConfig {
6408 name: "Rust".into(),
6409 path_suffixes: vec!["rs".to_string()],
6410 ..Default::default()
6411 },
6412 Some(tree_sitter_rust::language()),
6413 );
6414 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6415
6416 let fs = FakeFs::new(cx.background());
6417 fs.insert_tree(
6418 "/dir",
6419 json!({
6420 "a.rs": "const fn a() { A }",
6421 "b.rs": "const y: i32 = crate::a()",
6422 }),
6423 )
6424 .await;
6425
6426 let project = Project::test(fs, cx);
6427 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6428
6429 let (tree, _) = project
6430 .update(cx, |project, cx| {
6431 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6432 })
6433 .await
6434 .unwrap();
6435 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6436 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6437 .await;
6438
6439 let buffer = project
6440 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6441 .await
6442 .unwrap();
6443
6444 let fake_server = fake_servers.next().await.unwrap();
6445 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6446 let params = params.text_document_position_params;
6447 assert_eq!(
6448 params.text_document.uri.to_file_path().unwrap(),
6449 Path::new("/dir/b.rs"),
6450 );
6451 assert_eq!(params.position, lsp::Position::new(0, 22));
6452
6453 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6454 lsp::Location::new(
6455 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6456 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6457 ),
6458 )))
6459 });
6460
6461 let mut definitions = project
6462 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6463 .await
6464 .unwrap();
6465
6466 assert_eq!(definitions.len(), 1);
6467 let definition = definitions.pop().unwrap();
6468 cx.update(|cx| {
6469 let target_buffer = definition.buffer.read(cx);
6470 assert_eq!(
6471 target_buffer
6472 .file()
6473 .unwrap()
6474 .as_local()
6475 .unwrap()
6476 .abs_path(cx),
6477 Path::new("/dir/a.rs"),
6478 );
6479 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6480 assert_eq!(
6481 list_worktrees(&project, cx),
6482 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6483 );
6484
6485 drop(definition);
6486 });
6487 cx.read(|cx| {
6488 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6489 });
6490
6491 fn list_worktrees<'a>(
6492 project: &'a ModelHandle<Project>,
6493 cx: &'a AppContext,
6494 ) -> Vec<(&'a Path, bool)> {
6495 project
6496 .read(cx)
6497 .worktrees(cx)
6498 .map(|worktree| {
6499 let worktree = worktree.read(cx);
6500 (
6501 worktree.as_local().unwrap().abs_path().as_ref(),
6502 worktree.is_visible(),
6503 )
6504 })
6505 .collect::<Vec<_>>()
6506 }
6507 }
6508
6509 #[gpui::test(iterations = 10)]
6510 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6511 let mut language = Language::new(
6512 LanguageConfig {
6513 name: "TypeScript".into(),
6514 path_suffixes: vec!["ts".to_string()],
6515 ..Default::default()
6516 },
6517 None,
6518 );
6519 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6520
6521 let fs = FakeFs::new(cx.background());
6522 fs.insert_tree(
6523 "/dir",
6524 json!({
6525 "a.ts": "a",
6526 }),
6527 )
6528 .await;
6529
6530 let project = Project::test(fs, cx);
6531 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6532
6533 let (tree, _) = project
6534 .update(cx, |project, cx| {
6535 project.find_or_create_local_worktree("/dir", true, cx)
6536 })
6537 .await
6538 .unwrap();
6539 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6540 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6541 .await;
6542
6543 let buffer = project
6544 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6545 .await
6546 .unwrap();
6547
6548 let fake_server = fake_language_servers.next().await.unwrap();
6549
6550 // Language server returns code actions that contain commands, and not edits.
6551 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6552 fake_server
6553 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6554 Ok(Some(vec![
6555 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6556 title: "The code action".into(),
6557 command: Some(lsp::Command {
6558 title: "The command".into(),
6559 command: "_the/command".into(),
6560 arguments: Some(vec![json!("the-argument")]),
6561 }),
6562 ..Default::default()
6563 }),
6564 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6565 title: "two".into(),
6566 ..Default::default()
6567 }),
6568 ]))
6569 })
6570 .next()
6571 .await;
6572
6573 let action = actions.await.unwrap()[0].clone();
6574 let apply = project.update(cx, |project, cx| {
6575 project.apply_code_action(buffer.clone(), action, true, cx)
6576 });
6577
6578 // Resolving the code action does not populate its edits. In absence of
6579 // edits, we must execute the given command.
6580 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6581 |action, _| async move { Ok(action) },
6582 );
6583
6584 // While executing the command, the language server sends the editor
6585 // a `workspaceEdit` request.
6586 fake_server
6587 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6588 let fake = fake_server.clone();
6589 move |params, _| {
6590 assert_eq!(params.command, "_the/command");
6591 let fake = fake.clone();
6592 async move {
6593 fake.server
6594 .request::<lsp::request::ApplyWorkspaceEdit>(
6595 lsp::ApplyWorkspaceEditParams {
6596 label: None,
6597 edit: lsp::WorkspaceEdit {
6598 changes: Some(
6599 [(
6600 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6601 vec![lsp::TextEdit {
6602 range: lsp::Range::new(
6603 lsp::Position::new(0, 0),
6604 lsp::Position::new(0, 0),
6605 ),
6606 new_text: "X".into(),
6607 }],
6608 )]
6609 .into_iter()
6610 .collect(),
6611 ),
6612 ..Default::default()
6613 },
6614 },
6615 )
6616 .await
6617 .unwrap();
6618 Ok(Some(json!(null)))
6619 }
6620 }
6621 })
6622 .next()
6623 .await;
6624
6625 // Applying the code action returns a project transaction containing the edits
6626 // sent by the language server in its `workspaceEdit` request.
6627 let transaction = apply.await.unwrap();
6628 assert!(transaction.0.contains_key(&buffer));
6629 buffer.update(cx, |buffer, cx| {
6630 assert_eq!(buffer.text(), "Xa");
6631 buffer.undo(cx);
6632 assert_eq!(buffer.text(), "a");
6633 });
6634 }
6635
6636 #[gpui::test]
6637 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6638 let fs = FakeFs::new(cx.background());
6639 fs.insert_tree(
6640 "/dir",
6641 json!({
6642 "file1": "the old contents",
6643 }),
6644 )
6645 .await;
6646
6647 let project = Project::test(fs.clone(), cx);
6648 let worktree_id = project
6649 .update(cx, |p, cx| {
6650 p.find_or_create_local_worktree("/dir", true, cx)
6651 })
6652 .await
6653 .unwrap()
6654 .0
6655 .read_with(cx, |tree, _| tree.id());
6656
6657 let buffer = project
6658 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6659 .await
6660 .unwrap();
6661 buffer
6662 .update(cx, |buffer, cx| {
6663 assert_eq!(buffer.text(), "the old contents");
6664 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6665 buffer.save(cx)
6666 })
6667 .await
6668 .unwrap();
6669
6670 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6671 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6672 }
6673
6674 #[gpui::test]
6675 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6676 let fs = FakeFs::new(cx.background());
6677 fs.insert_tree(
6678 "/dir",
6679 json!({
6680 "file1": "the old contents",
6681 }),
6682 )
6683 .await;
6684
6685 let project = Project::test(fs.clone(), cx);
6686 let worktree_id = project
6687 .update(cx, |p, cx| {
6688 p.find_or_create_local_worktree("/dir/file1", true, cx)
6689 })
6690 .await
6691 .unwrap()
6692 .0
6693 .read_with(cx, |tree, _| tree.id());
6694
6695 let buffer = project
6696 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6697 .await
6698 .unwrap();
6699 buffer
6700 .update(cx, |buffer, cx| {
6701 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6702 buffer.save(cx)
6703 })
6704 .await
6705 .unwrap();
6706
6707 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6708 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6709 }
6710
6711 #[gpui::test]
6712 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6713 let fs = FakeFs::new(cx.background());
6714 fs.insert_tree("/dir", json!({})).await;
6715
6716 let project = Project::test(fs.clone(), cx);
6717 let (worktree, _) = project
6718 .update(cx, |project, cx| {
6719 project.find_or_create_local_worktree("/dir", true, cx)
6720 })
6721 .await
6722 .unwrap();
6723 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6724
6725 let buffer = project.update(cx, |project, cx| {
6726 project.create_buffer("", None, cx).unwrap()
6727 });
6728 buffer.update(cx, |buffer, cx| {
6729 buffer.edit([0..0], "abc", cx);
6730 assert!(buffer.is_dirty());
6731 assert!(!buffer.has_conflict());
6732 });
6733 project
6734 .update(cx, |project, cx| {
6735 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6736 })
6737 .await
6738 .unwrap();
6739 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6740 buffer.read_with(cx, |buffer, cx| {
6741 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6742 assert!(!buffer.is_dirty());
6743 assert!(!buffer.has_conflict());
6744 });
6745
6746 let opened_buffer = project
6747 .update(cx, |project, cx| {
6748 project.open_buffer((worktree_id, "file1"), cx)
6749 })
6750 .await
6751 .unwrap();
6752 assert_eq!(opened_buffer, buffer);
6753 }
6754
6755 #[gpui::test(retries = 5)]
6756 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6757 let dir = temp_tree(json!({
6758 "a": {
6759 "file1": "",
6760 "file2": "",
6761 "file3": "",
6762 },
6763 "b": {
6764 "c": {
6765 "file4": "",
6766 "file5": "",
6767 }
6768 }
6769 }));
6770
6771 let project = Project::test(Arc::new(RealFs), cx);
6772 let rpc = project.read_with(cx, |p, _| p.client.clone());
6773
6774 let (tree, _) = project
6775 .update(cx, |p, cx| {
6776 p.find_or_create_local_worktree(dir.path(), true, cx)
6777 })
6778 .await
6779 .unwrap();
6780 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6781
6782 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6783 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6784 async move { buffer.await.unwrap() }
6785 };
6786 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6787 tree.read_with(cx, |tree, _| {
6788 tree.entry_for_path(path)
6789 .expect(&format!("no entry for path {}", path))
6790 .id
6791 })
6792 };
6793
6794 let buffer2 = buffer_for_path("a/file2", cx).await;
6795 let buffer3 = buffer_for_path("a/file3", cx).await;
6796 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6797 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6798
6799 let file2_id = id_for_path("a/file2", &cx);
6800 let file3_id = id_for_path("a/file3", &cx);
6801 let file4_id = id_for_path("b/c/file4", &cx);
6802
6803 // Wait for the initial scan.
6804 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6805 .await;
6806
6807 // Create a remote copy of this worktree.
6808 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6809 let (remote, load_task) = cx.update(|cx| {
6810 Worktree::remote(
6811 1,
6812 1,
6813 initial_snapshot.to_proto(&Default::default(), true),
6814 rpc.clone(),
6815 cx,
6816 )
6817 });
6818 load_task.await;
6819
6820 cx.read(|cx| {
6821 assert!(!buffer2.read(cx).is_dirty());
6822 assert!(!buffer3.read(cx).is_dirty());
6823 assert!(!buffer4.read(cx).is_dirty());
6824 assert!(!buffer5.read(cx).is_dirty());
6825 });
6826
6827 // Rename and delete files and directories.
6828 tree.flush_fs_events(&cx).await;
6829 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6830 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6831 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6832 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6833 tree.flush_fs_events(&cx).await;
6834
6835 let expected_paths = vec![
6836 "a",
6837 "a/file1",
6838 "a/file2.new",
6839 "b",
6840 "d",
6841 "d/file3",
6842 "d/file4",
6843 ];
6844
6845 cx.read(|app| {
6846 assert_eq!(
6847 tree.read(app)
6848 .paths()
6849 .map(|p| p.to_str().unwrap())
6850 .collect::<Vec<_>>(),
6851 expected_paths
6852 );
6853
6854 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6855 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6856 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6857
6858 assert_eq!(
6859 buffer2.read(app).file().unwrap().path().as_ref(),
6860 Path::new("a/file2.new")
6861 );
6862 assert_eq!(
6863 buffer3.read(app).file().unwrap().path().as_ref(),
6864 Path::new("d/file3")
6865 );
6866 assert_eq!(
6867 buffer4.read(app).file().unwrap().path().as_ref(),
6868 Path::new("d/file4")
6869 );
6870 assert_eq!(
6871 buffer5.read(app).file().unwrap().path().as_ref(),
6872 Path::new("b/c/file5")
6873 );
6874
6875 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6876 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6877 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6878 assert!(buffer5.read(app).file().unwrap().is_deleted());
6879 });
6880
6881 // Update the remote worktree. Check that it becomes consistent with the
6882 // local worktree.
6883 remote.update(cx, |remote, cx| {
6884 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6885 &initial_snapshot,
6886 1,
6887 1,
6888 true,
6889 );
6890 remote
6891 .as_remote_mut()
6892 .unwrap()
6893 .snapshot
6894 .apply_remote_update(update_message)
6895 .unwrap();
6896
6897 assert_eq!(
6898 remote
6899 .paths()
6900 .map(|p| p.to_str().unwrap())
6901 .collect::<Vec<_>>(),
6902 expected_paths
6903 );
6904 });
6905 }
6906
6907 #[gpui::test]
6908 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6909 let fs = FakeFs::new(cx.background());
6910 fs.insert_tree(
6911 "/the-dir",
6912 json!({
6913 "a.txt": "a-contents",
6914 "b.txt": "b-contents",
6915 }),
6916 )
6917 .await;
6918
6919 let project = Project::test(fs.clone(), cx);
6920 let worktree_id = project
6921 .update(cx, |p, cx| {
6922 p.find_or_create_local_worktree("/the-dir", true, cx)
6923 })
6924 .await
6925 .unwrap()
6926 .0
6927 .read_with(cx, |tree, _| tree.id());
6928
6929 // Spawn multiple tasks to open paths, repeating some paths.
6930 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6931 (
6932 p.open_buffer((worktree_id, "a.txt"), cx),
6933 p.open_buffer((worktree_id, "b.txt"), cx),
6934 p.open_buffer((worktree_id, "a.txt"), cx),
6935 )
6936 });
6937
6938 let buffer_a_1 = buffer_a_1.await.unwrap();
6939 let buffer_a_2 = buffer_a_2.await.unwrap();
6940 let buffer_b = buffer_b.await.unwrap();
6941 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6942 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6943
6944 // There is only one buffer per path.
6945 let buffer_a_id = buffer_a_1.id();
6946 assert_eq!(buffer_a_2.id(), buffer_a_id);
6947
6948 // Open the same path again while it is still open.
6949 drop(buffer_a_1);
6950 let buffer_a_3 = project
6951 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6952 .await
6953 .unwrap();
6954
6955 // There's still only one buffer per path.
6956 assert_eq!(buffer_a_3.id(), buffer_a_id);
6957 }
6958
6959 #[gpui::test]
6960 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6961 use std::fs;
6962
6963 let dir = temp_tree(json!({
6964 "file1": "abc",
6965 "file2": "def",
6966 "file3": "ghi",
6967 }));
6968
6969 let project = Project::test(Arc::new(RealFs), cx);
6970 let (worktree, _) = project
6971 .update(cx, |p, cx| {
6972 p.find_or_create_local_worktree(dir.path(), true, cx)
6973 })
6974 .await
6975 .unwrap();
6976 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6977
6978 worktree.flush_fs_events(&cx).await;
6979 worktree
6980 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6981 .await;
6982
6983 let buffer1 = project
6984 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6985 .await
6986 .unwrap();
6987 let events = Rc::new(RefCell::new(Vec::new()));
6988
6989 // initially, the buffer isn't dirty.
6990 buffer1.update(cx, |buffer, cx| {
6991 cx.subscribe(&buffer1, {
6992 let events = events.clone();
6993 move |_, _, event, _| match event {
6994 BufferEvent::Operation(_) => {}
6995 _ => events.borrow_mut().push(event.clone()),
6996 }
6997 })
6998 .detach();
6999
7000 assert!(!buffer.is_dirty());
7001 assert!(events.borrow().is_empty());
7002
7003 buffer.edit(vec![1..2], "", cx);
7004 });
7005
7006 // after the first edit, the buffer is dirty, and emits a dirtied event.
7007 buffer1.update(cx, |buffer, cx| {
7008 assert!(buffer.text() == "ac");
7009 assert!(buffer.is_dirty());
7010 assert_eq!(
7011 *events.borrow(),
7012 &[language::Event::Edited, language::Event::Dirtied]
7013 );
7014 events.borrow_mut().clear();
7015 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7016 });
7017
7018 // after saving, the buffer is not dirty, and emits a saved event.
7019 buffer1.update(cx, |buffer, cx| {
7020 assert!(!buffer.is_dirty());
7021 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7022 events.borrow_mut().clear();
7023
7024 buffer.edit(vec![1..1], "B", cx);
7025 buffer.edit(vec![2..2], "D", cx);
7026 });
7027
7028 // after editing again, the buffer is dirty, and emits another dirty event.
7029 buffer1.update(cx, |buffer, cx| {
7030 assert!(buffer.text() == "aBDc");
7031 assert!(buffer.is_dirty());
7032 assert_eq!(
7033 *events.borrow(),
7034 &[
7035 language::Event::Edited,
7036 language::Event::Dirtied,
7037 language::Event::Edited,
7038 ],
7039 );
7040 events.borrow_mut().clear();
7041
7042 // TODO - currently, after restoring the buffer to its
7043 // previously-saved state, the is still considered dirty.
7044 buffer.edit([1..3], "", cx);
7045 assert!(buffer.text() == "ac");
7046 assert!(buffer.is_dirty());
7047 });
7048
7049 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7050
7051 // When a file is deleted, the buffer is considered dirty.
7052 let events = Rc::new(RefCell::new(Vec::new()));
7053 let buffer2 = project
7054 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
7055 .await
7056 .unwrap();
7057 buffer2.update(cx, |_, cx| {
7058 cx.subscribe(&buffer2, {
7059 let events = events.clone();
7060 move |_, _, event, _| events.borrow_mut().push(event.clone())
7061 })
7062 .detach();
7063 });
7064
7065 fs::remove_file(dir.path().join("file2")).unwrap();
7066 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7067 assert_eq!(
7068 *events.borrow(),
7069 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7070 );
7071
7072 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7073 let events = Rc::new(RefCell::new(Vec::new()));
7074 let buffer3 = project
7075 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
7076 .await
7077 .unwrap();
7078 buffer3.update(cx, |_, cx| {
7079 cx.subscribe(&buffer3, {
7080 let events = events.clone();
7081 move |_, _, event, _| events.borrow_mut().push(event.clone())
7082 })
7083 .detach();
7084 });
7085
7086 worktree.flush_fs_events(&cx).await;
7087 buffer3.update(cx, |buffer, cx| {
7088 buffer.edit(Some(0..0), "x", cx);
7089 });
7090 events.borrow_mut().clear();
7091 fs::remove_file(dir.path().join("file3")).unwrap();
7092 buffer3
7093 .condition(&cx, |_, _| !events.borrow().is_empty())
7094 .await;
7095 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7096 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7097 }
7098
7099 #[gpui::test]
7100 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7101 use std::fs;
7102
7103 let initial_contents = "aaa\nbbbbb\nc\n";
7104 let dir = temp_tree(json!({ "the-file": initial_contents }));
7105
7106 let project = Project::test(Arc::new(RealFs), cx);
7107 let (worktree, _) = project
7108 .update(cx, |p, cx| {
7109 p.find_or_create_local_worktree(dir.path(), true, cx)
7110 })
7111 .await
7112 .unwrap();
7113 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7114
7115 worktree
7116 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
7117 .await;
7118
7119 let abs_path = dir.path().join("the-file");
7120 let buffer = project
7121 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
7122 .await
7123 .unwrap();
7124
7125 // TODO
7126 // Add a cursor on each row.
7127 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
7128 // assert!(!buffer.is_dirty());
7129 // buffer.add_selection_set(
7130 // &(0..3)
7131 // .map(|row| Selection {
7132 // id: row as usize,
7133 // start: Point::new(row, 1),
7134 // end: Point::new(row, 1),
7135 // reversed: false,
7136 // goal: SelectionGoal::None,
7137 // })
7138 // .collect::<Vec<_>>(),
7139 // cx,
7140 // )
7141 // });
7142
7143 // Change the file on disk, adding two new lines of text, and removing
7144 // one line.
7145 buffer.read_with(cx, |buffer, _| {
7146 assert!(!buffer.is_dirty());
7147 assert!(!buffer.has_conflict());
7148 });
7149 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7150 fs::write(&abs_path, new_contents).unwrap();
7151
7152 // Because the buffer was not modified, it is reloaded from disk. Its
7153 // contents are edited according to the diff between the old and new
7154 // file contents.
7155 buffer
7156 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7157 .await;
7158
7159 buffer.update(cx, |buffer, _| {
7160 assert_eq!(buffer.text(), new_contents);
7161 assert!(!buffer.is_dirty());
7162 assert!(!buffer.has_conflict());
7163
7164 // TODO
7165 // let cursor_positions = buffer
7166 // .selection_set(selection_set_id)
7167 // .unwrap()
7168 // .selections::<Point>(&*buffer)
7169 // .map(|selection| {
7170 // assert_eq!(selection.start, selection.end);
7171 // selection.start
7172 // })
7173 // .collect::<Vec<_>>();
7174 // assert_eq!(
7175 // cursor_positions,
7176 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7177 // );
7178 });
7179
7180 // Modify the buffer
7181 buffer.update(cx, |buffer, cx| {
7182 buffer.edit(vec![0..0], " ", cx);
7183 assert!(buffer.is_dirty());
7184 assert!(!buffer.has_conflict());
7185 });
7186
7187 // Change the file on disk again, adding blank lines to the beginning.
7188 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
7189
7190 // Because the buffer is modified, it doesn't reload from disk, but is
7191 // marked as having a conflict.
7192 buffer
7193 .condition(&cx, |buffer, _| buffer.has_conflict())
7194 .await;
7195 }
7196
7197 #[gpui::test]
7198 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7199 cx.foreground().forbid_parking();
7200
7201 let fs = FakeFs::new(cx.background());
7202 fs.insert_tree(
7203 "/the-dir",
7204 json!({
7205 "a.rs": "
7206 fn foo(mut v: Vec<usize>) {
7207 for x in &v {
7208 v.push(1);
7209 }
7210 }
7211 "
7212 .unindent(),
7213 }),
7214 )
7215 .await;
7216
7217 let project = Project::test(fs.clone(), cx);
7218 let (worktree, _) = project
7219 .update(cx, |p, cx| {
7220 p.find_or_create_local_worktree("/the-dir", true, cx)
7221 })
7222 .await
7223 .unwrap();
7224 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7225
7226 let buffer = project
7227 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7228 .await
7229 .unwrap();
7230
7231 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7232 let message = lsp::PublishDiagnosticsParams {
7233 uri: buffer_uri.clone(),
7234 diagnostics: vec![
7235 lsp::Diagnostic {
7236 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7237 severity: Some(DiagnosticSeverity::WARNING),
7238 message: "error 1".to_string(),
7239 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7240 location: lsp::Location {
7241 uri: buffer_uri.clone(),
7242 range: lsp::Range::new(
7243 lsp::Position::new(1, 8),
7244 lsp::Position::new(1, 9),
7245 ),
7246 },
7247 message: "error 1 hint 1".to_string(),
7248 }]),
7249 ..Default::default()
7250 },
7251 lsp::Diagnostic {
7252 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7253 severity: Some(DiagnosticSeverity::HINT),
7254 message: "error 1 hint 1".to_string(),
7255 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7256 location: lsp::Location {
7257 uri: buffer_uri.clone(),
7258 range: lsp::Range::new(
7259 lsp::Position::new(1, 8),
7260 lsp::Position::new(1, 9),
7261 ),
7262 },
7263 message: "original diagnostic".to_string(),
7264 }]),
7265 ..Default::default()
7266 },
7267 lsp::Diagnostic {
7268 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7269 severity: Some(DiagnosticSeverity::ERROR),
7270 message: "error 2".to_string(),
7271 related_information: Some(vec![
7272 lsp::DiagnosticRelatedInformation {
7273 location: lsp::Location {
7274 uri: buffer_uri.clone(),
7275 range: lsp::Range::new(
7276 lsp::Position::new(1, 13),
7277 lsp::Position::new(1, 15),
7278 ),
7279 },
7280 message: "error 2 hint 1".to_string(),
7281 },
7282 lsp::DiagnosticRelatedInformation {
7283 location: lsp::Location {
7284 uri: buffer_uri.clone(),
7285 range: lsp::Range::new(
7286 lsp::Position::new(1, 13),
7287 lsp::Position::new(1, 15),
7288 ),
7289 },
7290 message: "error 2 hint 2".to_string(),
7291 },
7292 ]),
7293 ..Default::default()
7294 },
7295 lsp::Diagnostic {
7296 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7297 severity: Some(DiagnosticSeverity::HINT),
7298 message: "error 2 hint 1".to_string(),
7299 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7300 location: lsp::Location {
7301 uri: buffer_uri.clone(),
7302 range: lsp::Range::new(
7303 lsp::Position::new(2, 8),
7304 lsp::Position::new(2, 17),
7305 ),
7306 },
7307 message: "original diagnostic".to_string(),
7308 }]),
7309 ..Default::default()
7310 },
7311 lsp::Diagnostic {
7312 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7313 severity: Some(DiagnosticSeverity::HINT),
7314 message: "error 2 hint 2".to_string(),
7315 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7316 location: lsp::Location {
7317 uri: buffer_uri.clone(),
7318 range: lsp::Range::new(
7319 lsp::Position::new(2, 8),
7320 lsp::Position::new(2, 17),
7321 ),
7322 },
7323 message: "original diagnostic".to_string(),
7324 }]),
7325 ..Default::default()
7326 },
7327 ],
7328 version: None,
7329 };
7330
7331 project
7332 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7333 .unwrap();
7334 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7335
7336 assert_eq!(
7337 buffer
7338 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7339 .collect::<Vec<_>>(),
7340 &[
7341 DiagnosticEntry {
7342 range: Point::new(1, 8)..Point::new(1, 9),
7343 diagnostic: Diagnostic {
7344 severity: DiagnosticSeverity::WARNING,
7345 message: "error 1".to_string(),
7346 group_id: 0,
7347 is_primary: true,
7348 ..Default::default()
7349 }
7350 },
7351 DiagnosticEntry {
7352 range: Point::new(1, 8)..Point::new(1, 9),
7353 diagnostic: Diagnostic {
7354 severity: DiagnosticSeverity::HINT,
7355 message: "error 1 hint 1".to_string(),
7356 group_id: 0,
7357 is_primary: false,
7358 ..Default::default()
7359 }
7360 },
7361 DiagnosticEntry {
7362 range: Point::new(1, 13)..Point::new(1, 15),
7363 diagnostic: Diagnostic {
7364 severity: DiagnosticSeverity::HINT,
7365 message: "error 2 hint 1".to_string(),
7366 group_id: 1,
7367 is_primary: false,
7368 ..Default::default()
7369 }
7370 },
7371 DiagnosticEntry {
7372 range: Point::new(1, 13)..Point::new(1, 15),
7373 diagnostic: Diagnostic {
7374 severity: DiagnosticSeverity::HINT,
7375 message: "error 2 hint 2".to_string(),
7376 group_id: 1,
7377 is_primary: false,
7378 ..Default::default()
7379 }
7380 },
7381 DiagnosticEntry {
7382 range: Point::new(2, 8)..Point::new(2, 17),
7383 diagnostic: Diagnostic {
7384 severity: DiagnosticSeverity::ERROR,
7385 message: "error 2".to_string(),
7386 group_id: 1,
7387 is_primary: true,
7388 ..Default::default()
7389 }
7390 }
7391 ]
7392 );
7393
7394 assert_eq!(
7395 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7396 &[
7397 DiagnosticEntry {
7398 range: Point::new(1, 8)..Point::new(1, 9),
7399 diagnostic: Diagnostic {
7400 severity: DiagnosticSeverity::WARNING,
7401 message: "error 1".to_string(),
7402 group_id: 0,
7403 is_primary: true,
7404 ..Default::default()
7405 }
7406 },
7407 DiagnosticEntry {
7408 range: Point::new(1, 8)..Point::new(1, 9),
7409 diagnostic: Diagnostic {
7410 severity: DiagnosticSeverity::HINT,
7411 message: "error 1 hint 1".to_string(),
7412 group_id: 0,
7413 is_primary: false,
7414 ..Default::default()
7415 }
7416 },
7417 ]
7418 );
7419 assert_eq!(
7420 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7421 &[
7422 DiagnosticEntry {
7423 range: Point::new(1, 13)..Point::new(1, 15),
7424 diagnostic: Diagnostic {
7425 severity: DiagnosticSeverity::HINT,
7426 message: "error 2 hint 1".to_string(),
7427 group_id: 1,
7428 is_primary: false,
7429 ..Default::default()
7430 }
7431 },
7432 DiagnosticEntry {
7433 range: Point::new(1, 13)..Point::new(1, 15),
7434 diagnostic: Diagnostic {
7435 severity: DiagnosticSeverity::HINT,
7436 message: "error 2 hint 2".to_string(),
7437 group_id: 1,
7438 is_primary: false,
7439 ..Default::default()
7440 }
7441 },
7442 DiagnosticEntry {
7443 range: Point::new(2, 8)..Point::new(2, 17),
7444 diagnostic: Diagnostic {
7445 severity: DiagnosticSeverity::ERROR,
7446 message: "error 2".to_string(),
7447 group_id: 1,
7448 is_primary: true,
7449 ..Default::default()
7450 }
7451 }
7452 ]
7453 );
7454 }
7455
7456 #[gpui::test]
7457 async fn test_rename(cx: &mut gpui::TestAppContext) {
7458 cx.foreground().forbid_parking();
7459
7460 let mut language = Language::new(
7461 LanguageConfig {
7462 name: "Rust".into(),
7463 path_suffixes: vec!["rs".to_string()],
7464 ..Default::default()
7465 },
7466 Some(tree_sitter_rust::language()),
7467 );
7468 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7469 capabilities: lsp::ServerCapabilities {
7470 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7471 prepare_provider: Some(true),
7472 work_done_progress_options: Default::default(),
7473 })),
7474 ..Default::default()
7475 },
7476 ..Default::default()
7477 });
7478
7479 let fs = FakeFs::new(cx.background());
7480 fs.insert_tree(
7481 "/dir",
7482 json!({
7483 "one.rs": "const ONE: usize = 1;",
7484 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7485 }),
7486 )
7487 .await;
7488
7489 let project = Project::test(fs.clone(), cx);
7490 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7491
7492 let (tree, _) = project
7493 .update(cx, |project, cx| {
7494 project.find_or_create_local_worktree("/dir", true, cx)
7495 })
7496 .await
7497 .unwrap();
7498 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7499 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7500 .await;
7501
7502 let buffer = project
7503 .update(cx, |project, cx| {
7504 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7505 })
7506 .await
7507 .unwrap();
7508
7509 let fake_server = fake_servers.next().await.unwrap();
7510
7511 let response = project.update(cx, |project, cx| {
7512 project.prepare_rename(buffer.clone(), 7, cx)
7513 });
7514 fake_server
7515 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7516 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7517 assert_eq!(params.position, lsp::Position::new(0, 7));
7518 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7519 lsp::Position::new(0, 6),
7520 lsp::Position::new(0, 9),
7521 ))))
7522 })
7523 .next()
7524 .await
7525 .unwrap();
7526 let range = response.await.unwrap().unwrap();
7527 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7528 assert_eq!(range, 6..9);
7529
7530 let response = project.update(cx, |project, cx| {
7531 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7532 });
7533 fake_server
7534 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7535 assert_eq!(
7536 params.text_document_position.text_document.uri.as_str(),
7537 "file:///dir/one.rs"
7538 );
7539 assert_eq!(
7540 params.text_document_position.position,
7541 lsp::Position::new(0, 7)
7542 );
7543 assert_eq!(params.new_name, "THREE");
7544 Ok(Some(lsp::WorkspaceEdit {
7545 changes: Some(
7546 [
7547 (
7548 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7549 vec![lsp::TextEdit::new(
7550 lsp::Range::new(
7551 lsp::Position::new(0, 6),
7552 lsp::Position::new(0, 9),
7553 ),
7554 "THREE".to_string(),
7555 )],
7556 ),
7557 (
7558 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7559 vec![
7560 lsp::TextEdit::new(
7561 lsp::Range::new(
7562 lsp::Position::new(0, 24),
7563 lsp::Position::new(0, 27),
7564 ),
7565 "THREE".to_string(),
7566 ),
7567 lsp::TextEdit::new(
7568 lsp::Range::new(
7569 lsp::Position::new(0, 35),
7570 lsp::Position::new(0, 38),
7571 ),
7572 "THREE".to_string(),
7573 ),
7574 ],
7575 ),
7576 ]
7577 .into_iter()
7578 .collect(),
7579 ),
7580 ..Default::default()
7581 }))
7582 })
7583 .next()
7584 .await
7585 .unwrap();
7586 let mut transaction = response.await.unwrap().0;
7587 assert_eq!(transaction.len(), 2);
7588 assert_eq!(
7589 transaction
7590 .remove_entry(&buffer)
7591 .unwrap()
7592 .0
7593 .read_with(cx, |buffer, _| buffer.text()),
7594 "const THREE: usize = 1;"
7595 );
7596 assert_eq!(
7597 transaction
7598 .into_keys()
7599 .next()
7600 .unwrap()
7601 .read_with(cx, |buffer, _| buffer.text()),
7602 "const TWO: usize = one::THREE + one::THREE;"
7603 );
7604 }
7605
7606 #[gpui::test]
7607 async fn test_search(cx: &mut gpui::TestAppContext) {
7608 let fs = FakeFs::new(cx.background());
7609 fs.insert_tree(
7610 "/dir",
7611 json!({
7612 "one.rs": "const ONE: usize = 1;",
7613 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7614 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7615 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7616 }),
7617 )
7618 .await;
7619 let project = Project::test(fs.clone(), cx);
7620 let (tree, _) = project
7621 .update(cx, |project, cx| {
7622 project.find_or_create_local_worktree("/dir", true, cx)
7623 })
7624 .await
7625 .unwrap();
7626 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7627 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7628 .await;
7629
7630 assert_eq!(
7631 search(&project, SearchQuery::text("TWO", false, true), cx)
7632 .await
7633 .unwrap(),
7634 HashMap::from_iter([
7635 ("two.rs".to_string(), vec![6..9]),
7636 ("three.rs".to_string(), vec![37..40])
7637 ])
7638 );
7639
7640 let buffer_4 = project
7641 .update(cx, |project, cx| {
7642 project.open_buffer((worktree_id, "four.rs"), cx)
7643 })
7644 .await
7645 .unwrap();
7646 buffer_4.update(cx, |buffer, cx| {
7647 buffer.edit([20..28, 31..43], "two::TWO", cx);
7648 });
7649
7650 assert_eq!(
7651 search(&project, SearchQuery::text("TWO", false, true), cx)
7652 .await
7653 .unwrap(),
7654 HashMap::from_iter([
7655 ("two.rs".to_string(), vec![6..9]),
7656 ("three.rs".to_string(), vec![37..40]),
7657 ("four.rs".to_string(), vec![25..28, 36..39])
7658 ])
7659 );
7660
7661 async fn search(
7662 project: &ModelHandle<Project>,
7663 query: SearchQuery,
7664 cx: &mut gpui::TestAppContext,
7665 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7666 let results = project
7667 .update(cx, |project, cx| project.search(query, cx))
7668 .await?;
7669
7670 Ok(results
7671 .into_iter()
7672 .map(|(buffer, ranges)| {
7673 buffer.read_with(cx, |buffer, _| {
7674 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7675 let ranges = ranges
7676 .into_iter()
7677 .map(|range| range.to_offset(buffer))
7678 .collect::<Vec<_>>();
7679 (path, ranges)
7680 })
7681 })
7682 .collect())
7683 }
7684 }
7685}