1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, Patch, PointUtf16, TextBufferSnapshot, ToLspPosition,
22 ToOffset, ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
61 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
62 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
63 language_server_settings: Arc<Mutex<serde_json::Value>>,
64 next_language_server_id: usize,
65 client: Arc<client::Client>,
66 next_entry_id: Arc<AtomicUsize>,
67 user_store: ModelHandle<UserStore>,
68 fs: Arc<dyn Fs>,
69 client_state: ProjectClientState,
70 collaborators: HashMap<PeerId, Collaborator>,
71 subscriptions: Vec<client::Subscription>,
72 language_servers_with_diagnostics_running: isize,
73 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
74 shared_buffers: HashMap<PeerId, HashSet<u64>>,
75 loading_buffers: HashMap<
76 ProjectPath,
77 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
78 >,
79 loading_local_worktrees:
80 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
81 opened_buffers: HashMap<u64, OpenBuffer>,
82 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
83 nonce: u128,
84}
85
86enum OpenBuffer {
87 Strong(ModelHandle<Buffer>),
88 Weak(WeakModelHandle<Buffer>),
89 Loading(Vec<Operation>),
90}
91
92enum WorktreeHandle {
93 Strong(ModelHandle<Worktree>),
94 Weak(WeakModelHandle<Worktree>),
95}
96
97enum ProjectClientState {
98 Local {
99 is_shared: bool,
100 remote_id_tx: watch::Sender<Option<u64>>,
101 remote_id_rx: watch::Receiver<Option<u64>>,
102 _maintain_remote_id_task: Task<Option<()>>,
103 },
104 Remote {
105 sharing_has_stopped: bool,
106 remote_id: u64,
107 replica_id: ReplicaId,
108 _detect_unshare_task: Task<Option<()>>,
109 },
110}
111
112#[derive(Clone, Debug)]
113pub struct Collaborator {
114 pub user: Arc<User>,
115 pub peer_id: PeerId,
116 pub replica_id: ReplicaId,
117}
118
119#[derive(Clone, Debug, PartialEq)]
120pub enum Event {
121 ActiveEntryChanged(Option<ProjectEntryId>),
122 WorktreeRemoved(WorktreeId),
123 DiskBasedDiagnosticsStarted,
124 DiskBasedDiagnosticsUpdated,
125 DiskBasedDiagnosticsFinished,
126 DiagnosticsUpdated(ProjectPath),
127 RemoteIdChanged(Option<u64>),
128 CollaboratorLeft(PeerId),
129}
130
131enum LanguageServerEvent {
132 WorkStart {
133 token: String,
134 },
135 WorkProgress {
136 token: String,
137 progress: LanguageServerProgress,
138 },
139 WorkEnd {
140 token: String,
141 },
142 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
143}
144
145pub struct LanguageServerStatus {
146 pub name: String,
147 pub pending_work: BTreeMap<String, LanguageServerProgress>,
148 pending_diagnostic_updates: isize,
149}
150
151#[derive(Clone, Debug)]
152pub struct LanguageServerProgress {
153 pub message: Option<String>,
154 pub percentage: Option<usize>,
155 pub last_update_at: Instant,
156}
157
158#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
159pub struct ProjectPath {
160 pub worktree_id: WorktreeId,
161 pub path: Arc<Path>,
162}
163
164#[derive(Clone, Debug, Default, PartialEq)]
165pub struct DiagnosticSummary {
166 pub error_count: usize,
167 pub warning_count: usize,
168 pub info_count: usize,
169 pub hint_count: usize,
170}
171
172#[derive(Debug)]
173pub struct Location {
174 pub buffer: ModelHandle<Buffer>,
175 pub range: Range<language::Anchor>,
176}
177
178#[derive(Debug)]
179pub struct DocumentHighlight {
180 pub range: Range<language::Anchor>,
181 pub kind: DocumentHighlightKind,
182}
183
184#[derive(Clone, Debug)]
185pub struct Symbol {
186 pub source_worktree_id: WorktreeId,
187 pub worktree_id: WorktreeId,
188 pub language_name: String,
189 pub path: PathBuf,
190 pub label: CodeLabel,
191 pub name: String,
192 pub kind: lsp::SymbolKind,
193 pub range: Range<PointUtf16>,
194 pub signature: [u8; 32],
195}
196
197#[derive(Default)]
198pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
199
200impl DiagnosticSummary {
201 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
202 let mut this = Self {
203 error_count: 0,
204 warning_count: 0,
205 info_count: 0,
206 hint_count: 0,
207 };
208
209 for entry in diagnostics {
210 if entry.diagnostic.is_primary {
211 match entry.diagnostic.severity {
212 DiagnosticSeverity::ERROR => this.error_count += 1,
213 DiagnosticSeverity::WARNING => this.warning_count += 1,
214 DiagnosticSeverity::INFORMATION => this.info_count += 1,
215 DiagnosticSeverity::HINT => this.hint_count += 1,
216 _ => {}
217 }
218 }
219 }
220
221 this
222 }
223
224 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
225 proto::DiagnosticSummary {
226 path: path.to_string_lossy().to_string(),
227 error_count: self.error_count as u32,
228 warning_count: self.warning_count as u32,
229 info_count: self.info_count as u32,
230 hint_count: self.hint_count as u32,
231 }
232 }
233}
234
235#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
236pub struct ProjectEntryId(usize);
237
238impl ProjectEntryId {
239 pub fn new(counter: &AtomicUsize) -> Self {
240 Self(counter.fetch_add(1, SeqCst))
241 }
242
243 pub fn from_proto(id: u64) -> Self {
244 Self(id as usize)
245 }
246
247 pub fn to_proto(&self) -> u64 {
248 self.0 as u64
249 }
250
251 pub fn to_usize(&self) -> usize {
252 self.0
253 }
254}
255
256impl Project {
257 pub fn init(client: &Arc<Client>) {
258 client.add_model_message_handler(Self::handle_add_collaborator);
259 client.add_model_message_handler(Self::handle_buffer_reloaded);
260 client.add_model_message_handler(Self::handle_buffer_saved);
261 client.add_model_message_handler(Self::handle_start_language_server);
262 client.add_model_message_handler(Self::handle_update_language_server);
263 client.add_model_message_handler(Self::handle_remove_collaborator);
264 client.add_model_message_handler(Self::handle_register_worktree);
265 client.add_model_message_handler(Self::handle_unregister_worktree);
266 client.add_model_message_handler(Self::handle_unshare_project);
267 client.add_model_message_handler(Self::handle_update_buffer_file);
268 client.add_model_message_handler(Self::handle_update_buffer);
269 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
270 client.add_model_message_handler(Self::handle_update_worktree);
271 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
272 client.add_model_request_handler(Self::handle_apply_code_action);
273 client.add_model_request_handler(Self::handle_format_buffers);
274 client.add_model_request_handler(Self::handle_get_code_actions);
275 client.add_model_request_handler(Self::handle_get_completions);
276 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
277 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
278 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
279 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
280 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
281 client.add_model_request_handler(Self::handle_search_project);
282 client.add_model_request_handler(Self::handle_get_project_symbols);
283 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
284 client.add_model_request_handler(Self::handle_open_buffer_by_id);
285 client.add_model_request_handler(Self::handle_open_buffer_by_path);
286 client.add_model_request_handler(Self::handle_save_buffer);
287 }
288
289 pub fn local(
290 client: Arc<Client>,
291 user_store: ModelHandle<UserStore>,
292 languages: Arc<LanguageRegistry>,
293 fs: Arc<dyn Fs>,
294 cx: &mut MutableAppContext,
295 ) -> ModelHandle<Self> {
296 cx.add_model(|cx: &mut ModelContext<Self>| {
297 let (remote_id_tx, remote_id_rx) = watch::channel();
298 let _maintain_remote_id_task = cx.spawn_weak({
299 let rpc = client.clone();
300 move |this, mut cx| {
301 async move {
302 let mut status = rpc.status();
303 while let Some(status) = status.next().await {
304 if let Some(this) = this.upgrade(&cx) {
305 if status.is_connected() {
306 this.update(&mut cx, |this, cx| this.register(cx)).await?;
307 } else {
308 this.update(&mut cx, |this, cx| this.unregister(cx));
309 }
310 }
311 }
312 Ok(())
313 }
314 .log_err()
315 }
316 });
317
318 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
319 Self {
320 worktrees: Default::default(),
321 collaborators: Default::default(),
322 opened_buffers: Default::default(),
323 shared_buffers: Default::default(),
324 loading_buffers: Default::default(),
325 loading_local_worktrees: Default::default(),
326 buffer_snapshots: Default::default(),
327 client_state: ProjectClientState::Local {
328 is_shared: false,
329 remote_id_tx,
330 remote_id_rx,
331 _maintain_remote_id_task,
332 },
333 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
334 subscriptions: Vec::new(),
335 active_entry: None,
336 languages,
337 client,
338 user_store,
339 fs,
340 next_entry_id: Default::default(),
341 language_servers_with_diagnostics_running: 0,
342 language_servers: Default::default(),
343 started_language_servers: Default::default(),
344 language_server_statuses: Default::default(),
345 language_server_settings: Default::default(),
346 next_language_server_id: 0,
347 nonce: StdRng::from_entropy().gen(),
348 }
349 })
350 }
351
352 pub async fn remote(
353 remote_id: u64,
354 client: Arc<Client>,
355 user_store: ModelHandle<UserStore>,
356 languages: Arc<LanguageRegistry>,
357 fs: Arc<dyn Fs>,
358 cx: &mut AsyncAppContext,
359 ) -> Result<ModelHandle<Self>> {
360 client.authenticate_and_connect(true, &cx).await?;
361
362 let response = client
363 .request(proto::JoinProject {
364 project_id: remote_id,
365 })
366 .await?;
367
368 let replica_id = response.replica_id as ReplicaId;
369
370 let mut worktrees = Vec::new();
371 for worktree in response.worktrees {
372 let (worktree, load_task) = cx
373 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
374 worktrees.push(worktree);
375 load_task.detach();
376 }
377
378 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
379 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
380 let mut this = Self {
381 worktrees: Vec::new(),
382 loading_buffers: Default::default(),
383 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
384 shared_buffers: Default::default(),
385 loading_local_worktrees: Default::default(),
386 active_entry: None,
387 collaborators: Default::default(),
388 languages,
389 user_store: user_store.clone(),
390 fs,
391 next_entry_id: Default::default(),
392 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
393 client: client.clone(),
394 client_state: ProjectClientState::Remote {
395 sharing_has_stopped: false,
396 remote_id,
397 replica_id,
398 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
399 async move {
400 let mut status = client.status();
401 let is_connected =
402 status.next().await.map_or(false, |s| s.is_connected());
403 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
404 if !is_connected || status.next().await.is_some() {
405 if let Some(this) = this.upgrade(&cx) {
406 this.update(&mut cx, |this, cx| this.project_unshared(cx))
407 }
408 }
409 Ok(())
410 }
411 .log_err()
412 }),
413 },
414 language_servers_with_diagnostics_running: 0,
415 language_servers: Default::default(),
416 started_language_servers: Default::default(),
417 language_server_settings: Default::default(),
418 language_server_statuses: response
419 .language_servers
420 .into_iter()
421 .map(|server| {
422 (
423 server.id as usize,
424 LanguageServerStatus {
425 name: server.name,
426 pending_work: Default::default(),
427 pending_diagnostic_updates: 0,
428 },
429 )
430 })
431 .collect(),
432 next_language_server_id: 0,
433 opened_buffers: Default::default(),
434 buffer_snapshots: Default::default(),
435 nonce: StdRng::from_entropy().gen(),
436 };
437 for worktree in worktrees {
438 this.add_worktree(&worktree, cx);
439 }
440 this
441 });
442
443 let user_ids = response
444 .collaborators
445 .iter()
446 .map(|peer| peer.user_id)
447 .collect();
448 user_store
449 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
450 .await?;
451 let mut collaborators = HashMap::default();
452 for message in response.collaborators {
453 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
454 collaborators.insert(collaborator.peer_id, collaborator);
455 }
456
457 this.update(cx, |this, _| {
458 this.collaborators = collaborators;
459 });
460
461 Ok(this)
462 }
463
464 #[cfg(any(test, feature = "test-support"))]
465 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
466 let languages = Arc::new(LanguageRegistry::test());
467 let http_client = client::test::FakeHttpClient::with_404_response();
468 let client = client::Client::new(http_client.clone());
469 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
470 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
471 }
472
473 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
474 self.opened_buffers
475 .get(&remote_id)
476 .and_then(|buffer| buffer.upgrade(cx))
477 }
478
479 #[cfg(any(test, feature = "test-support"))]
480 pub fn languages(&self) -> &Arc<LanguageRegistry> {
481 &self.languages
482 }
483
484 #[cfg(any(test, feature = "test-support"))]
485 pub fn check_invariants(&self, cx: &AppContext) {
486 if self.is_local() {
487 let mut worktree_root_paths = HashMap::default();
488 for worktree in self.worktrees(cx) {
489 let worktree = worktree.read(cx);
490 let abs_path = worktree.as_local().unwrap().abs_path().clone();
491 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
492 assert_eq!(
493 prev_worktree_id,
494 None,
495 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
496 abs_path,
497 worktree.id(),
498 prev_worktree_id
499 )
500 }
501 } else {
502 let replica_id = self.replica_id();
503 for buffer in self.opened_buffers.values() {
504 if let Some(buffer) = buffer.upgrade(cx) {
505 let buffer = buffer.read(cx);
506 assert_eq!(
507 buffer.deferred_ops_len(),
508 0,
509 "replica {}, buffer {} has deferred operations",
510 replica_id,
511 buffer.remote_id()
512 );
513 }
514 }
515 }
516 }
517
518 #[cfg(any(test, feature = "test-support"))]
519 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
520 let path = path.into();
521 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
522 self.opened_buffers.iter().any(|(_, buffer)| {
523 if let Some(buffer) = buffer.upgrade(cx) {
524 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
525 if file.worktree == worktree && file.path() == &path.path {
526 return true;
527 }
528 }
529 }
530 false
531 })
532 } else {
533 false
534 }
535 }
536
537 pub fn fs(&self) -> &Arc<dyn Fs> {
538 &self.fs
539 }
540
541 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
542 self.unshare(cx);
543 for worktree in &self.worktrees {
544 if let Some(worktree) = worktree.upgrade(cx) {
545 worktree.update(cx, |worktree, _| {
546 worktree.as_local_mut().unwrap().unregister();
547 });
548 }
549 }
550
551 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
552 *remote_id_tx.borrow_mut() = None;
553 }
554
555 self.subscriptions.clear();
556 }
557
558 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
559 self.unregister(cx);
560
561 let response = self.client.request(proto::RegisterProject {});
562 cx.spawn(|this, mut cx| async move {
563 let remote_id = response.await?.project_id;
564
565 let mut registrations = Vec::new();
566 this.update(&mut cx, |this, cx| {
567 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
568 *remote_id_tx.borrow_mut() = Some(remote_id);
569 }
570
571 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
572
573 this.subscriptions
574 .push(this.client.add_model_for_remote_entity(remote_id, cx));
575
576 for worktree in &this.worktrees {
577 if let Some(worktree) = worktree.upgrade(cx) {
578 registrations.push(worktree.update(cx, |worktree, cx| {
579 let worktree = worktree.as_local_mut().unwrap();
580 worktree.register(remote_id, cx)
581 }));
582 }
583 }
584 });
585
586 futures::future::try_join_all(registrations).await?;
587 Ok(())
588 })
589 }
590
591 pub fn remote_id(&self) -> Option<u64> {
592 match &self.client_state {
593 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
594 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
595 }
596 }
597
598 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
599 let mut id = None;
600 let mut watch = None;
601 match &self.client_state {
602 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
603 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
604 }
605
606 async move {
607 if let Some(id) = id {
608 return id;
609 }
610 let mut watch = watch.unwrap();
611 loop {
612 let id = *watch.borrow();
613 if let Some(id) = id {
614 return id;
615 }
616 watch.next().await;
617 }
618 }
619 }
620
621 pub fn replica_id(&self) -> ReplicaId {
622 match &self.client_state {
623 ProjectClientState::Local { .. } => 0,
624 ProjectClientState::Remote { replica_id, .. } => *replica_id,
625 }
626 }
627
628 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
629 &self.collaborators
630 }
631
632 pub fn worktrees<'a>(
633 &'a self,
634 cx: &'a AppContext,
635 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
636 self.worktrees
637 .iter()
638 .filter_map(move |worktree| worktree.upgrade(cx))
639 }
640
641 pub fn visible_worktrees<'a>(
642 &'a self,
643 cx: &'a AppContext,
644 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
645 self.worktrees.iter().filter_map(|worktree| {
646 worktree.upgrade(cx).and_then(|worktree| {
647 if worktree.read(cx).is_visible() {
648 Some(worktree)
649 } else {
650 None
651 }
652 })
653 })
654 }
655
656 pub fn worktree_for_id(
657 &self,
658 id: WorktreeId,
659 cx: &AppContext,
660 ) -> Option<ModelHandle<Worktree>> {
661 self.worktrees(cx)
662 .find(|worktree| worktree.read(cx).id() == id)
663 }
664
665 pub fn worktree_for_entry(
666 &self,
667 entry_id: ProjectEntryId,
668 cx: &AppContext,
669 ) -> Option<ModelHandle<Worktree>> {
670 self.worktrees(cx)
671 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
672 }
673
674 pub fn worktree_id_for_entry(
675 &self,
676 entry_id: ProjectEntryId,
677 cx: &AppContext,
678 ) -> Option<WorktreeId> {
679 self.worktree_for_entry(entry_id, cx)
680 .map(|worktree| worktree.read(cx).id())
681 }
682
683 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
684 let rpc = self.client.clone();
685 cx.spawn(|this, mut cx| async move {
686 let project_id = this.update(&mut cx, |this, cx| {
687 if let ProjectClientState::Local {
688 is_shared,
689 remote_id_rx,
690 ..
691 } = &mut this.client_state
692 {
693 *is_shared = true;
694
695 for open_buffer in this.opened_buffers.values_mut() {
696 match open_buffer {
697 OpenBuffer::Strong(_) => {}
698 OpenBuffer::Weak(buffer) => {
699 if let Some(buffer) = buffer.upgrade(cx) {
700 *open_buffer = OpenBuffer::Strong(buffer);
701 }
702 }
703 OpenBuffer::Loading(_) => unreachable!(),
704 }
705 }
706
707 for worktree_handle in this.worktrees.iter_mut() {
708 match worktree_handle {
709 WorktreeHandle::Strong(_) => {}
710 WorktreeHandle::Weak(worktree) => {
711 if let Some(worktree) = worktree.upgrade(cx) {
712 *worktree_handle = WorktreeHandle::Strong(worktree);
713 }
714 }
715 }
716 }
717
718 remote_id_rx
719 .borrow()
720 .ok_or_else(|| anyhow!("no project id"))
721 } else {
722 Err(anyhow!("can't share a remote project"))
723 }
724 })?;
725
726 rpc.request(proto::ShareProject { project_id }).await?;
727
728 let mut tasks = Vec::new();
729 this.update(&mut cx, |this, cx| {
730 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
731 worktree.update(cx, |worktree, cx| {
732 let worktree = worktree.as_local_mut().unwrap();
733 tasks.push(worktree.share(project_id, cx));
734 });
735 }
736 });
737 for task in tasks {
738 task.await?;
739 }
740 this.update(&mut cx, |_, cx| cx.notify());
741 Ok(())
742 })
743 }
744
745 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
746 let rpc = self.client.clone();
747
748 if let ProjectClientState::Local {
749 is_shared,
750 remote_id_rx,
751 ..
752 } = &mut self.client_state
753 {
754 if !*is_shared {
755 return;
756 }
757
758 *is_shared = false;
759 self.collaborators.clear();
760 self.shared_buffers.clear();
761 for worktree_handle in self.worktrees.iter_mut() {
762 if let WorktreeHandle::Strong(worktree) = worktree_handle {
763 let is_visible = worktree.update(cx, |worktree, _| {
764 worktree.as_local_mut().unwrap().unshare();
765 worktree.is_visible()
766 });
767 if !is_visible {
768 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
769 }
770 }
771 }
772
773 for open_buffer in self.opened_buffers.values_mut() {
774 match open_buffer {
775 OpenBuffer::Strong(buffer) => {
776 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
777 }
778 _ => {}
779 }
780 }
781
782 if let Some(project_id) = *remote_id_rx.borrow() {
783 rpc.send(proto::UnshareProject { project_id }).log_err();
784 }
785
786 cx.notify();
787 } else {
788 log::error!("attempted to unshare a remote project");
789 }
790 }
791
792 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
793 if let ProjectClientState::Remote {
794 sharing_has_stopped,
795 ..
796 } = &mut self.client_state
797 {
798 *sharing_has_stopped = true;
799 self.collaborators.clear();
800 cx.notify();
801 }
802 }
803
804 pub fn is_read_only(&self) -> bool {
805 match &self.client_state {
806 ProjectClientState::Local { .. } => false,
807 ProjectClientState::Remote {
808 sharing_has_stopped,
809 ..
810 } => *sharing_has_stopped,
811 }
812 }
813
814 pub fn is_local(&self) -> bool {
815 match &self.client_state {
816 ProjectClientState::Local { .. } => true,
817 ProjectClientState::Remote { .. } => false,
818 }
819 }
820
821 pub fn is_remote(&self) -> bool {
822 !self.is_local()
823 }
824
825 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
826 if self.is_remote() {
827 return Err(anyhow!("creating buffers as a guest is not supported yet"));
828 }
829
830 let buffer = cx.add_model(|cx| {
831 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
832 });
833 self.register_buffer(&buffer, cx)?;
834 Ok(buffer)
835 }
836
837 pub fn open_path(
838 &mut self,
839 path: impl Into<ProjectPath>,
840 cx: &mut ModelContext<Self>,
841 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
842 let task = self.open_buffer(path, cx);
843 cx.spawn_weak(|_, cx| async move {
844 let buffer = task.await?;
845 let project_entry_id = buffer
846 .read_with(&cx, |buffer, cx| {
847 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
848 })
849 .ok_or_else(|| anyhow!("no project entry"))?;
850 Ok((project_entry_id, buffer.into()))
851 })
852 }
853
854 pub fn open_buffer(
855 &mut self,
856 path: impl Into<ProjectPath>,
857 cx: &mut ModelContext<Self>,
858 ) -> Task<Result<ModelHandle<Buffer>>> {
859 let project_path = path.into();
860 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
861 worktree
862 } else {
863 return Task::ready(Err(anyhow!("no such worktree")));
864 };
865
866 // If there is already a buffer for the given path, then return it.
867 let existing_buffer = self.get_open_buffer(&project_path, cx);
868 if let Some(existing_buffer) = existing_buffer {
869 return Task::ready(Ok(existing_buffer));
870 }
871
872 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
873 // If the given path is already being loaded, then wait for that existing
874 // task to complete and return the same buffer.
875 hash_map::Entry::Occupied(e) => e.get().clone(),
876
877 // Otherwise, record the fact that this path is now being loaded.
878 hash_map::Entry::Vacant(entry) => {
879 let (mut tx, rx) = postage::watch::channel();
880 entry.insert(rx.clone());
881
882 let load_buffer = if worktree.read(cx).is_local() {
883 self.open_local_buffer(&project_path.path, &worktree, cx)
884 } else {
885 self.open_remote_buffer(&project_path.path, &worktree, cx)
886 };
887
888 cx.spawn(move |this, mut cx| async move {
889 let load_result = load_buffer.await;
890 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
891 // Record the fact that the buffer is no longer loading.
892 this.loading_buffers.remove(&project_path);
893 let buffer = load_result.map_err(Arc::new)?;
894 Ok(buffer)
895 }));
896 })
897 .detach();
898 rx
899 }
900 };
901
902 cx.foreground().spawn(async move {
903 loop {
904 if let Some(result) = loading_watch.borrow().as_ref() {
905 match result {
906 Ok(buffer) => return Ok(buffer.clone()),
907 Err(error) => return Err(anyhow!("{}", error)),
908 }
909 }
910 loading_watch.next().await;
911 }
912 })
913 }
914
915 fn open_local_buffer(
916 &mut self,
917 path: &Arc<Path>,
918 worktree: &ModelHandle<Worktree>,
919 cx: &mut ModelContext<Self>,
920 ) -> Task<Result<ModelHandle<Buffer>>> {
921 let load_buffer = worktree.update(cx, |worktree, cx| {
922 let worktree = worktree.as_local_mut().unwrap();
923 worktree.load_buffer(path, cx)
924 });
925 cx.spawn(|this, mut cx| async move {
926 let buffer = load_buffer.await?;
927 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
928 Ok(buffer)
929 })
930 }
931
932 fn open_remote_buffer(
933 &mut self,
934 path: &Arc<Path>,
935 worktree: &ModelHandle<Worktree>,
936 cx: &mut ModelContext<Self>,
937 ) -> Task<Result<ModelHandle<Buffer>>> {
938 let rpc = self.client.clone();
939 let project_id = self.remote_id().unwrap();
940 let remote_worktree_id = worktree.read(cx).id();
941 let path = path.clone();
942 let path_string = path.to_string_lossy().to_string();
943 cx.spawn(|this, mut cx| async move {
944 let response = rpc
945 .request(proto::OpenBufferByPath {
946 project_id,
947 worktree_id: remote_worktree_id.to_proto(),
948 path: path_string,
949 })
950 .await?;
951 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
952 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
953 .await
954 })
955 }
956
957 fn open_local_buffer_via_lsp(
958 &mut self,
959 abs_path: lsp::Url,
960 lang_name: Arc<str>,
961 lang_server: Arc<LanguageServer>,
962 cx: &mut ModelContext<Self>,
963 ) -> Task<Result<ModelHandle<Buffer>>> {
964 cx.spawn(|this, mut cx| async move {
965 let abs_path = abs_path
966 .to_file_path()
967 .map_err(|_| anyhow!("can't convert URI to path"))?;
968 let (worktree, relative_path) = if let Some(result) =
969 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
970 {
971 result
972 } else {
973 let worktree = this
974 .update(&mut cx, |this, cx| {
975 this.create_local_worktree(&abs_path, false, cx)
976 })
977 .await?;
978 this.update(&mut cx, |this, cx| {
979 this.language_servers
980 .insert((worktree.read(cx).id(), lang_name), lang_server);
981 });
982 (worktree, PathBuf::new())
983 };
984
985 let project_path = ProjectPath {
986 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
987 path: relative_path.into(),
988 };
989 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
990 .await
991 })
992 }
993
994 pub fn open_buffer_by_id(
995 &mut self,
996 id: u64,
997 cx: &mut ModelContext<Self>,
998 ) -> Task<Result<ModelHandle<Buffer>>> {
999 if let Some(buffer) = self.buffer_for_id(id, cx) {
1000 Task::ready(Ok(buffer))
1001 } else if self.is_local() {
1002 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1003 } else if let Some(project_id) = self.remote_id() {
1004 let request = self
1005 .client
1006 .request(proto::OpenBufferById { project_id, id });
1007 cx.spawn(|this, mut cx| async move {
1008 let buffer = request
1009 .await?
1010 .buffer
1011 .ok_or_else(|| anyhow!("invalid buffer"))?;
1012 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1013 .await
1014 })
1015 } else {
1016 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1017 }
1018 }
1019
1020 pub fn save_buffer_as(
1021 &mut self,
1022 buffer: ModelHandle<Buffer>,
1023 abs_path: PathBuf,
1024 cx: &mut ModelContext<Project>,
1025 ) -> Task<Result<()>> {
1026 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1027 cx.spawn(|this, mut cx| async move {
1028 let (worktree, path) = worktree_task.await?;
1029 worktree
1030 .update(&mut cx, |worktree, cx| {
1031 worktree
1032 .as_local_mut()
1033 .unwrap()
1034 .save_buffer_as(buffer.clone(), path, cx)
1035 })
1036 .await?;
1037 this.update(&mut cx, |this, cx| {
1038 this.assign_language_to_buffer(&buffer, cx);
1039 this.register_buffer_with_language_server(&buffer, cx);
1040 });
1041 Ok(())
1042 })
1043 }
1044
1045 pub fn get_open_buffer(
1046 &mut self,
1047 path: &ProjectPath,
1048 cx: &mut ModelContext<Self>,
1049 ) -> Option<ModelHandle<Buffer>> {
1050 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1051 self.opened_buffers.values().find_map(|buffer| {
1052 let buffer = buffer.upgrade(cx)?;
1053 let file = File::from_dyn(buffer.read(cx).file())?;
1054 if file.worktree == worktree && file.path() == &path.path {
1055 Some(buffer)
1056 } else {
1057 None
1058 }
1059 })
1060 }
1061
1062 fn register_buffer(
1063 &mut self,
1064 buffer: &ModelHandle<Buffer>,
1065 cx: &mut ModelContext<Self>,
1066 ) -> Result<()> {
1067 let remote_id = buffer.read(cx).remote_id();
1068 let open_buffer = if self.is_remote() || self.is_shared() {
1069 OpenBuffer::Strong(buffer.clone())
1070 } else {
1071 OpenBuffer::Weak(buffer.downgrade())
1072 };
1073
1074 match self.opened_buffers.insert(remote_id, open_buffer) {
1075 None => {}
1076 Some(OpenBuffer::Loading(operations)) => {
1077 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1078 }
1079 Some(OpenBuffer::Weak(existing_handle)) => {
1080 if existing_handle.upgrade(cx).is_some() {
1081 Err(anyhow!(
1082 "already registered buffer with remote id {}",
1083 remote_id
1084 ))?
1085 }
1086 }
1087 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1088 "already registered buffer with remote id {}",
1089 remote_id
1090 ))?,
1091 }
1092 cx.subscribe(buffer, |this, buffer, event, cx| {
1093 this.on_buffer_event(buffer, event, cx);
1094 })
1095 .detach();
1096
1097 self.assign_language_to_buffer(buffer, cx);
1098 self.register_buffer_with_language_server(buffer, cx);
1099
1100 Ok(())
1101 }
1102
1103 fn register_buffer_with_language_server(
1104 &mut self,
1105 buffer_handle: &ModelHandle<Buffer>,
1106 cx: &mut ModelContext<Self>,
1107 ) {
1108 let buffer = buffer_handle.read(cx);
1109 let buffer_id = buffer.remote_id();
1110 if let Some(file) = File::from_dyn(buffer.file()) {
1111 if file.is_local() {
1112 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1113 let initial_snapshot = buffer.text_snapshot();
1114 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1115
1116 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1117 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1118 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1119 .log_err();
1120 }
1121 }
1122
1123 if let Some(server) = language_server {
1124 server
1125 .notify::<lsp::notification::DidOpenTextDocument>(
1126 lsp::DidOpenTextDocumentParams {
1127 text_document: lsp::TextDocumentItem::new(
1128 uri,
1129 Default::default(),
1130 0,
1131 initial_snapshot.text(),
1132 ),
1133 }
1134 .clone(),
1135 )
1136 .log_err();
1137 buffer_handle.update(cx, |buffer, cx| {
1138 buffer.set_completion_triggers(
1139 server
1140 .capabilities()
1141 .completion_provider
1142 .as_ref()
1143 .and_then(|provider| provider.trigger_characters.clone())
1144 .unwrap_or(Vec::new()),
1145 cx,
1146 )
1147 });
1148 self.buffer_snapshots
1149 .insert(buffer_id, vec![(0, initial_snapshot)]);
1150 }
1151
1152 cx.observe_release(buffer_handle, |this, buffer, cx| {
1153 if let Some(file) = File::from_dyn(buffer.file()) {
1154 if file.is_local() {
1155 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1156 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1157 server
1158 .notify::<lsp::notification::DidCloseTextDocument>(
1159 lsp::DidCloseTextDocumentParams {
1160 text_document: lsp::TextDocumentIdentifier::new(
1161 uri.clone(),
1162 ),
1163 },
1164 )
1165 .log_err();
1166 }
1167 }
1168 }
1169 })
1170 .detach();
1171 }
1172 }
1173 }
1174
1175 fn on_buffer_event(
1176 &mut self,
1177 buffer: ModelHandle<Buffer>,
1178 event: &BufferEvent,
1179 cx: &mut ModelContext<Self>,
1180 ) -> Option<()> {
1181 match event {
1182 BufferEvent::Operation(operation) => {
1183 let project_id = self.remote_id()?;
1184 let request = self.client.request(proto::UpdateBuffer {
1185 project_id,
1186 buffer_id: buffer.read(cx).remote_id(),
1187 operations: vec![language::proto::serialize_operation(&operation)],
1188 });
1189 cx.background().spawn(request).detach_and_log_err(cx);
1190 }
1191 BufferEvent::Edited { .. } => {
1192 let language_server = self
1193 .language_server_for_buffer(buffer.read(cx), cx)?
1194 .clone();
1195 let buffer = buffer.read(cx);
1196 let file = File::from_dyn(buffer.file())?;
1197 let abs_path = file.as_local()?.abs_path(cx);
1198 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1199 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1200 let (version, prev_snapshot) = buffer_snapshots.last()?;
1201 let next_snapshot = buffer.text_snapshot();
1202 let next_version = version + 1;
1203
1204 let content_changes = buffer
1205 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1206 .map(|edit| {
1207 let edit_start = edit.new.start.0;
1208 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1209 let new_text = next_snapshot
1210 .text_for_range(edit.new.start.1..edit.new.end.1)
1211 .collect();
1212 lsp::TextDocumentContentChangeEvent {
1213 range: Some(lsp::Range::new(
1214 edit_start.to_lsp_position(),
1215 edit_end.to_lsp_position(),
1216 )),
1217 range_length: None,
1218 text: new_text,
1219 }
1220 })
1221 .collect();
1222
1223 buffer_snapshots.push((next_version, next_snapshot));
1224
1225 language_server
1226 .notify::<lsp::notification::DidChangeTextDocument>(
1227 lsp::DidChangeTextDocumentParams {
1228 text_document: lsp::VersionedTextDocumentIdentifier::new(
1229 uri,
1230 next_version,
1231 ),
1232 content_changes,
1233 },
1234 )
1235 .log_err();
1236 }
1237 BufferEvent::Saved => {
1238 let file = File::from_dyn(buffer.read(cx).file())?;
1239 let worktree_id = file.worktree_id(cx);
1240 let abs_path = file.as_local()?.abs_path(cx);
1241 let text_document = lsp::TextDocumentIdentifier {
1242 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1243 };
1244
1245 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1246 server
1247 .notify::<lsp::notification::DidSaveTextDocument>(
1248 lsp::DidSaveTextDocumentParams {
1249 text_document: text_document.clone(),
1250 text: None,
1251 },
1252 )
1253 .log_err();
1254 }
1255 }
1256 _ => {}
1257 }
1258
1259 None
1260 }
1261
1262 fn language_servers_for_worktree(
1263 &self,
1264 worktree_id: WorktreeId,
1265 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1266 self.language_servers.iter().filter_map(
1267 move |((language_server_worktree_id, language_name), server)| {
1268 if *language_server_worktree_id == worktree_id {
1269 Some((language_name.as_ref(), server))
1270 } else {
1271 None
1272 }
1273 },
1274 )
1275 }
1276
1277 fn assign_language_to_buffer(
1278 &mut self,
1279 buffer: &ModelHandle<Buffer>,
1280 cx: &mut ModelContext<Self>,
1281 ) -> Option<()> {
1282 // If the buffer has a language, set it and start the language server if we haven't already.
1283 let full_path = buffer.read(cx).file()?.full_path(cx);
1284 let language = self.languages.select_language(&full_path)?;
1285 buffer.update(cx, |buffer, cx| {
1286 buffer.set_language(Some(language.clone()), cx);
1287 });
1288
1289 let file = File::from_dyn(buffer.read(cx).file())?;
1290 let worktree = file.worktree.read(cx).as_local()?;
1291 let worktree_id = worktree.id();
1292 let worktree_abs_path = worktree.abs_path().clone();
1293 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1294
1295 None
1296 }
1297
1298 fn start_language_server(
1299 &mut self,
1300 worktree_id: WorktreeId,
1301 worktree_path: Arc<Path>,
1302 language: Arc<Language>,
1303 cx: &mut ModelContext<Self>,
1304 ) {
1305 let key = (worktree_id, language.name());
1306 self.started_language_servers
1307 .entry(key.clone())
1308 .or_insert_with(|| {
1309 let server_id = post_inc(&mut self.next_language_server_id);
1310 let language_server = self.languages.start_language_server(
1311 server_id,
1312 language.clone(),
1313 worktree_path,
1314 self.client.http_client(),
1315 cx,
1316 );
1317 cx.spawn_weak(|this, mut cx| async move {
1318 let mut language_server = language_server?.await.log_err()?;
1319 let this = this.upgrade(&cx)?;
1320 let (language_server_events_tx, language_server_events_rx) =
1321 smol::channel::unbounded();
1322
1323 language_server
1324 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1325 let language_server_events_tx = language_server_events_tx.clone();
1326 move |params| {
1327 language_server_events_tx
1328 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1329 .ok();
1330 }
1331 })
1332 .detach();
1333
1334 language_server
1335 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1336 let settings = this
1337 .read_with(&cx, |this, _| this.language_server_settings.clone());
1338 move |params| {
1339 let settings = settings.lock();
1340 Ok(params
1341 .items
1342 .into_iter()
1343 .map(|item| {
1344 if let Some(section) = &item.section {
1345 settings
1346 .get(section)
1347 .cloned()
1348 .unwrap_or(serde_json::Value::Null)
1349 } else {
1350 settings.clone()
1351 }
1352 })
1353 .collect())
1354 }
1355 })
1356 .detach();
1357
1358 language_server
1359 .on_notification::<lsp::notification::Progress, _>(move |params| {
1360 let token = match params.token {
1361 lsp::NumberOrString::String(token) => token,
1362 lsp::NumberOrString::Number(token) => {
1363 log::info!("skipping numeric progress token {}", token);
1364 return;
1365 }
1366 };
1367
1368 match params.value {
1369 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1370 lsp::WorkDoneProgress::Begin(_) => {
1371 language_server_events_tx
1372 .try_send(LanguageServerEvent::WorkStart { token })
1373 .ok();
1374 }
1375 lsp::WorkDoneProgress::Report(report) => {
1376 language_server_events_tx
1377 .try_send(LanguageServerEvent::WorkProgress {
1378 token,
1379 progress: LanguageServerProgress {
1380 message: report.message,
1381 percentage: report
1382 .percentage
1383 .map(|p| p as usize),
1384 last_update_at: Instant::now(),
1385 },
1386 })
1387 .ok();
1388 }
1389 lsp::WorkDoneProgress::End(_) => {
1390 language_server_events_tx
1391 .try_send(LanguageServerEvent::WorkEnd { token })
1392 .ok();
1393 }
1394 },
1395 }
1396 })
1397 .detach();
1398
1399 // Process all the LSP events.
1400 cx.spawn(|mut cx| {
1401 let this = this.downgrade();
1402 async move {
1403 while let Ok(event) = language_server_events_rx.recv().await {
1404 let this = this.upgrade(&cx)?;
1405 this.update(&mut cx, |this, cx| {
1406 this.on_lsp_event(server_id, event, &language, cx)
1407 });
1408
1409 // Don't starve the main thread when lots of events arrive all at once.
1410 smol::future::yield_now().await;
1411 }
1412 Some(())
1413 }
1414 })
1415 .detach();
1416
1417 let language_server = language_server.initialize().await.log_err()?;
1418 this.update(&mut cx, |this, cx| {
1419 this.language_servers
1420 .insert(key.clone(), language_server.clone());
1421 this.language_server_statuses.insert(
1422 server_id,
1423 LanguageServerStatus {
1424 name: language_server.name().to_string(),
1425 pending_work: Default::default(),
1426 pending_diagnostic_updates: 0,
1427 },
1428 );
1429 language_server
1430 .notify::<lsp::notification::DidChangeConfiguration>(
1431 lsp::DidChangeConfigurationParams {
1432 settings: this.language_server_settings.lock().clone(),
1433 },
1434 )
1435 .ok();
1436
1437 if let Some(project_id) = this.remote_id() {
1438 this.client
1439 .send(proto::StartLanguageServer {
1440 project_id,
1441 server: Some(proto::LanguageServer {
1442 id: server_id as u64,
1443 name: language_server.name().to_string(),
1444 }),
1445 })
1446 .log_err();
1447 }
1448
1449 // Tell the language server about every open buffer in the worktree that matches the language.
1450 for buffer in this.opened_buffers.values() {
1451 if let Some(buffer_handle) = buffer.upgrade(cx) {
1452 let buffer = buffer_handle.read(cx);
1453 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1454 file
1455 } else {
1456 continue;
1457 };
1458 let language = if let Some(language) = buffer.language() {
1459 language
1460 } else {
1461 continue;
1462 };
1463 if (file.worktree.read(cx).id(), language.name()) != key {
1464 continue;
1465 }
1466
1467 let file = file.as_local()?;
1468 let versions = this
1469 .buffer_snapshots
1470 .entry(buffer.remote_id())
1471 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1472 let (version, initial_snapshot) = versions.last().unwrap();
1473 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1474 language_server
1475 .notify::<lsp::notification::DidOpenTextDocument>(
1476 lsp::DidOpenTextDocumentParams {
1477 text_document: lsp::TextDocumentItem::new(
1478 uri,
1479 Default::default(),
1480 *version,
1481 initial_snapshot.text(),
1482 ),
1483 },
1484 )
1485 .log_err()?;
1486 buffer_handle.update(cx, |buffer, cx| {
1487 buffer.set_completion_triggers(
1488 language_server
1489 .capabilities()
1490 .completion_provider
1491 .as_ref()
1492 .and_then(|provider| {
1493 provider.trigger_characters.clone()
1494 })
1495 .unwrap_or(Vec::new()),
1496 cx,
1497 )
1498 });
1499 }
1500 }
1501
1502 cx.notify();
1503 Some(())
1504 });
1505
1506 Some(language_server)
1507 })
1508 });
1509 }
1510
1511 pub fn restart_language_servers_for_buffers(
1512 &mut self,
1513 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1514 cx: &mut ModelContext<Self>,
1515 ) -> Option<()> {
1516 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1517 .into_iter()
1518 .filter_map(|buffer| {
1519 let file = File::from_dyn(buffer.read(cx).file())?;
1520 let worktree = file.worktree.read(cx).as_local()?;
1521 let worktree_id = worktree.id();
1522 let worktree_abs_path = worktree.abs_path().clone();
1523 let full_path = file.full_path(cx);
1524 Some((worktree_id, worktree_abs_path, full_path))
1525 })
1526 .collect();
1527 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1528 let language = self.languages.select_language(&full_path)?;
1529 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1530 }
1531
1532 None
1533 }
1534
1535 fn restart_language_server(
1536 &mut self,
1537 worktree_id: WorktreeId,
1538 worktree_path: Arc<Path>,
1539 language: Arc<Language>,
1540 cx: &mut ModelContext<Self>,
1541 ) {
1542 let key = (worktree_id, language.name());
1543 let server_to_shutdown = self.language_servers.remove(&key);
1544 self.started_language_servers.remove(&key);
1545 server_to_shutdown
1546 .as_ref()
1547 .map(|server| self.language_server_statuses.remove(&server.server_id()));
1548 cx.spawn_weak(|this, mut cx| async move {
1549 if let Some(this) = this.upgrade(&cx) {
1550 if let Some(server_to_shutdown) = server_to_shutdown {
1551 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1552 shutdown_task.await;
1553 }
1554 }
1555
1556 this.update(&mut cx, |this, cx| {
1557 this.start_language_server(worktree_id, worktree_path, language, cx);
1558 });
1559 }
1560 })
1561 .detach();
1562 }
1563
1564 fn on_lsp_event(
1565 &mut self,
1566 language_server_id: usize,
1567 event: LanguageServerEvent,
1568 language: &Arc<Language>,
1569 cx: &mut ModelContext<Self>,
1570 ) {
1571 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1572 let language_server_status =
1573 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1574 status
1575 } else {
1576 return;
1577 };
1578
1579 match event {
1580 LanguageServerEvent::WorkStart { token } => {
1581 if Some(&token) == disk_diagnostics_token {
1582 language_server_status.pending_diagnostic_updates += 1;
1583 if language_server_status.pending_diagnostic_updates == 1 {
1584 self.disk_based_diagnostics_started(cx);
1585 self.broadcast_language_server_update(
1586 language_server_id,
1587 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1588 proto::LspDiskBasedDiagnosticsUpdating {},
1589 ),
1590 );
1591 }
1592 } else {
1593 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1594 self.broadcast_language_server_update(
1595 language_server_id,
1596 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1597 token,
1598 }),
1599 );
1600 }
1601 }
1602 LanguageServerEvent::WorkProgress { token, progress } => {
1603 if Some(&token) != disk_diagnostics_token {
1604 self.on_lsp_work_progress(
1605 language_server_id,
1606 token.clone(),
1607 progress.clone(),
1608 cx,
1609 );
1610 self.broadcast_language_server_update(
1611 language_server_id,
1612 proto::update_language_server::Variant::WorkProgress(
1613 proto::LspWorkProgress {
1614 token,
1615 message: progress.message,
1616 percentage: progress.percentage.map(|p| p as u32),
1617 },
1618 ),
1619 );
1620 }
1621 }
1622 LanguageServerEvent::WorkEnd { token } => {
1623 if Some(&token) == disk_diagnostics_token {
1624 language_server_status.pending_diagnostic_updates -= 1;
1625 if language_server_status.pending_diagnostic_updates == 0 {
1626 self.disk_based_diagnostics_finished(cx);
1627 self.broadcast_language_server_update(
1628 language_server_id,
1629 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1630 proto::LspDiskBasedDiagnosticsUpdated {},
1631 ),
1632 );
1633 }
1634 } else {
1635 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1636 self.broadcast_language_server_update(
1637 language_server_id,
1638 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1639 token,
1640 }),
1641 );
1642 }
1643 }
1644 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1645 language.process_diagnostics(&mut params);
1646
1647 if disk_diagnostics_token.is_none() {
1648 self.disk_based_diagnostics_started(cx);
1649 self.broadcast_language_server_update(
1650 language_server_id,
1651 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1652 proto::LspDiskBasedDiagnosticsUpdating {},
1653 ),
1654 );
1655 }
1656 self.update_diagnostics(
1657 params,
1658 language
1659 .disk_based_diagnostic_sources()
1660 .unwrap_or(&Default::default()),
1661 cx,
1662 )
1663 .log_err();
1664 if disk_diagnostics_token.is_none() {
1665 self.disk_based_diagnostics_finished(cx);
1666 self.broadcast_language_server_update(
1667 language_server_id,
1668 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1669 proto::LspDiskBasedDiagnosticsUpdated {},
1670 ),
1671 );
1672 }
1673 }
1674 }
1675 }
1676
1677 fn on_lsp_work_start(
1678 &mut self,
1679 language_server_id: usize,
1680 token: String,
1681 cx: &mut ModelContext<Self>,
1682 ) {
1683 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1684 status.pending_work.insert(
1685 token,
1686 LanguageServerProgress {
1687 message: None,
1688 percentage: None,
1689 last_update_at: Instant::now(),
1690 },
1691 );
1692 cx.notify();
1693 }
1694 }
1695
1696 fn on_lsp_work_progress(
1697 &mut self,
1698 language_server_id: usize,
1699 token: String,
1700 progress: LanguageServerProgress,
1701 cx: &mut ModelContext<Self>,
1702 ) {
1703 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1704 status.pending_work.insert(token, progress);
1705 cx.notify();
1706 }
1707 }
1708
1709 fn on_lsp_work_end(
1710 &mut self,
1711 language_server_id: usize,
1712 token: String,
1713 cx: &mut ModelContext<Self>,
1714 ) {
1715 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1716 status.pending_work.remove(&token);
1717 cx.notify();
1718 }
1719 }
1720
1721 fn broadcast_language_server_update(
1722 &self,
1723 language_server_id: usize,
1724 event: proto::update_language_server::Variant,
1725 ) {
1726 if let Some(project_id) = self.remote_id() {
1727 self.client
1728 .send(proto::UpdateLanguageServer {
1729 project_id,
1730 language_server_id: language_server_id as u64,
1731 variant: Some(event),
1732 })
1733 .log_err();
1734 }
1735 }
1736
1737 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1738 for server in self.language_servers.values() {
1739 server
1740 .notify::<lsp::notification::DidChangeConfiguration>(
1741 lsp::DidChangeConfigurationParams {
1742 settings: settings.clone(),
1743 },
1744 )
1745 .ok();
1746 }
1747 *self.language_server_settings.lock() = settings;
1748 }
1749
1750 pub fn language_server_statuses(
1751 &self,
1752 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1753 self.language_server_statuses.values()
1754 }
1755
1756 pub fn update_diagnostics(
1757 &mut self,
1758 params: lsp::PublishDiagnosticsParams,
1759 disk_based_sources: &HashSet<String>,
1760 cx: &mut ModelContext<Self>,
1761 ) -> Result<()> {
1762 let abs_path = params
1763 .uri
1764 .to_file_path()
1765 .map_err(|_| anyhow!("URI is not a file"))?;
1766 let mut next_group_id = 0;
1767 let mut diagnostics = Vec::default();
1768 let mut primary_diagnostic_group_ids = HashMap::default();
1769 let mut sources_by_group_id = HashMap::default();
1770 let mut supporting_diagnostics = HashMap::default();
1771 for diagnostic in ¶ms.diagnostics {
1772 let source = diagnostic.source.as_ref();
1773 let code = diagnostic.code.as_ref().map(|code| match code {
1774 lsp::NumberOrString::Number(code) => code.to_string(),
1775 lsp::NumberOrString::String(code) => code.clone(),
1776 });
1777 let range = range_from_lsp(diagnostic.range);
1778 let is_supporting = diagnostic
1779 .related_information
1780 .as_ref()
1781 .map_or(false, |infos| {
1782 infos.iter().any(|info| {
1783 primary_diagnostic_group_ids.contains_key(&(
1784 source,
1785 code.clone(),
1786 range_from_lsp(info.location.range),
1787 ))
1788 })
1789 });
1790
1791 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1792 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1793 });
1794
1795 if is_supporting {
1796 supporting_diagnostics.insert(
1797 (source, code.clone(), range),
1798 (diagnostic.severity, is_unnecessary),
1799 );
1800 } else {
1801 let group_id = post_inc(&mut next_group_id);
1802 let is_disk_based =
1803 source.map_or(false, |source| disk_based_sources.contains(source));
1804
1805 sources_by_group_id.insert(group_id, source);
1806 primary_diagnostic_group_ids
1807 .insert((source, code.clone(), range.clone()), group_id);
1808
1809 diagnostics.push(DiagnosticEntry {
1810 range,
1811 diagnostic: Diagnostic {
1812 code: code.clone(),
1813 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1814 message: diagnostic.message.clone(),
1815 group_id,
1816 is_primary: true,
1817 is_valid: true,
1818 is_disk_based,
1819 is_unnecessary,
1820 },
1821 });
1822 if let Some(infos) = &diagnostic.related_information {
1823 for info in infos {
1824 if info.location.uri == params.uri && !info.message.is_empty() {
1825 let range = range_from_lsp(info.location.range);
1826 diagnostics.push(DiagnosticEntry {
1827 range,
1828 diagnostic: Diagnostic {
1829 code: code.clone(),
1830 severity: DiagnosticSeverity::INFORMATION,
1831 message: info.message.clone(),
1832 group_id,
1833 is_primary: false,
1834 is_valid: true,
1835 is_disk_based,
1836 is_unnecessary: false,
1837 },
1838 });
1839 }
1840 }
1841 }
1842 }
1843 }
1844
1845 for entry in &mut diagnostics {
1846 let diagnostic = &mut entry.diagnostic;
1847 if !diagnostic.is_primary {
1848 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1849 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1850 source,
1851 diagnostic.code.clone(),
1852 entry.range.clone(),
1853 )) {
1854 if let Some(severity) = severity {
1855 diagnostic.severity = severity;
1856 }
1857 diagnostic.is_unnecessary = is_unnecessary;
1858 }
1859 }
1860 }
1861
1862 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1863 Ok(())
1864 }
1865
1866 pub fn update_diagnostic_entries(
1867 &mut self,
1868 abs_path: PathBuf,
1869 version: Option<i32>,
1870 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1871 cx: &mut ModelContext<Project>,
1872 ) -> Result<(), anyhow::Error> {
1873 let (worktree, relative_path) = self
1874 .find_local_worktree(&abs_path, cx)
1875 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1876 if !worktree.read(cx).is_visible() {
1877 return Ok(());
1878 }
1879
1880 let project_path = ProjectPath {
1881 worktree_id: worktree.read(cx).id(),
1882 path: relative_path.into(),
1883 };
1884
1885 for buffer in self.opened_buffers.values() {
1886 if let Some(buffer) = buffer.upgrade(cx) {
1887 if buffer
1888 .read(cx)
1889 .file()
1890 .map_or(false, |file| *file.path() == project_path.path)
1891 {
1892 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1893 break;
1894 }
1895 }
1896 }
1897 worktree.update(cx, |worktree, cx| {
1898 worktree
1899 .as_local_mut()
1900 .ok_or_else(|| anyhow!("not a local worktree"))?
1901 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1902 })?;
1903 cx.emit(Event::DiagnosticsUpdated(project_path));
1904 Ok(())
1905 }
1906
1907 fn update_buffer_diagnostics(
1908 &mut self,
1909 buffer: &ModelHandle<Buffer>,
1910 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1911 version: Option<i32>,
1912 cx: &mut ModelContext<Self>,
1913 ) -> Result<()> {
1914 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1915 Ordering::Equal
1916 .then_with(|| b.is_primary.cmp(&a.is_primary))
1917 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1918 .then_with(|| a.severity.cmp(&b.severity))
1919 .then_with(|| a.message.cmp(&b.message))
1920 }
1921
1922 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1923
1924 diagnostics.sort_unstable_by(|a, b| {
1925 Ordering::Equal
1926 .then_with(|| a.range.start.cmp(&b.range.start))
1927 .then_with(|| b.range.end.cmp(&a.range.end))
1928 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1929 });
1930
1931 let mut sanitized_diagnostics = Vec::new();
1932 let edits_since_save = Patch::new(
1933 snapshot
1934 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1935 .collect(),
1936 );
1937 for entry in diagnostics {
1938 let start;
1939 let end;
1940 if entry.diagnostic.is_disk_based {
1941 // Some diagnostics are based on files on disk instead of buffers'
1942 // current contents. Adjust these diagnostics' ranges to reflect
1943 // any unsaved edits.
1944 start = edits_since_save.old_to_new(entry.range.start);
1945 end = edits_since_save.old_to_new(entry.range.end);
1946 } else {
1947 start = entry.range.start;
1948 end = entry.range.end;
1949 }
1950
1951 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1952 ..snapshot.clip_point_utf16(end, Bias::Right);
1953
1954 // Expand empty ranges by one character
1955 if range.start == range.end {
1956 range.end.column += 1;
1957 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1958 if range.start == range.end && range.end.column > 0 {
1959 range.start.column -= 1;
1960 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1961 }
1962 }
1963
1964 sanitized_diagnostics.push(DiagnosticEntry {
1965 range,
1966 diagnostic: entry.diagnostic,
1967 });
1968 }
1969 drop(edits_since_save);
1970
1971 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1972 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1973 Ok(())
1974 }
1975
1976 pub fn format(
1977 &self,
1978 buffers: HashSet<ModelHandle<Buffer>>,
1979 push_to_history: bool,
1980 cx: &mut ModelContext<Project>,
1981 ) -> Task<Result<ProjectTransaction>> {
1982 let mut local_buffers = Vec::new();
1983 let mut remote_buffers = None;
1984 for buffer_handle in buffers {
1985 let buffer = buffer_handle.read(cx);
1986 if let Some(file) = File::from_dyn(buffer.file()) {
1987 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1988 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1989 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1990 }
1991 } else {
1992 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1993 }
1994 } else {
1995 return Task::ready(Ok(Default::default()));
1996 }
1997 }
1998
1999 let remote_buffers = self.remote_id().zip(remote_buffers);
2000 let client = self.client.clone();
2001
2002 cx.spawn(|this, mut cx| async move {
2003 let mut project_transaction = ProjectTransaction::default();
2004
2005 if let Some((project_id, remote_buffers)) = remote_buffers {
2006 let response = client
2007 .request(proto::FormatBuffers {
2008 project_id,
2009 buffer_ids: remote_buffers
2010 .iter()
2011 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2012 .collect(),
2013 })
2014 .await?
2015 .transaction
2016 .ok_or_else(|| anyhow!("missing transaction"))?;
2017 project_transaction = this
2018 .update(&mut cx, |this, cx| {
2019 this.deserialize_project_transaction(response, push_to_history, cx)
2020 })
2021 .await?;
2022 }
2023
2024 for (buffer, buffer_abs_path, language_server) in local_buffers {
2025 let text_document = lsp::TextDocumentIdentifier::new(
2026 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2027 );
2028 let capabilities = &language_server.capabilities();
2029 let lsp_edits = if capabilities
2030 .document_formatting_provider
2031 .as_ref()
2032 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2033 {
2034 language_server
2035 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2036 text_document,
2037 options: Default::default(),
2038 work_done_progress_params: Default::default(),
2039 })
2040 .await?
2041 } else if capabilities
2042 .document_range_formatting_provider
2043 .as_ref()
2044 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2045 {
2046 let buffer_start = lsp::Position::new(0, 0);
2047 let buffer_end = buffer
2048 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
2049 .to_lsp_position();
2050 language_server
2051 .request::<lsp::request::RangeFormatting>(
2052 lsp::DocumentRangeFormattingParams {
2053 text_document,
2054 range: lsp::Range::new(buffer_start, buffer_end),
2055 options: Default::default(),
2056 work_done_progress_params: Default::default(),
2057 },
2058 )
2059 .await?
2060 } else {
2061 continue;
2062 };
2063
2064 if let Some(lsp_edits) = lsp_edits {
2065 let edits = this
2066 .update(&mut cx, |this, cx| {
2067 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2068 })
2069 .await?;
2070 buffer.update(&mut cx, |buffer, cx| {
2071 buffer.finalize_last_transaction();
2072 buffer.start_transaction();
2073 for (range, text) in edits {
2074 buffer.edit([range], text, cx);
2075 }
2076 if buffer.end_transaction(cx).is_some() {
2077 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2078 if !push_to_history {
2079 buffer.forget_transaction(transaction.id);
2080 }
2081 project_transaction.0.insert(cx.handle(), transaction);
2082 }
2083 });
2084 }
2085 }
2086
2087 Ok(project_transaction)
2088 })
2089 }
2090
2091 pub fn definition<T: ToPointUtf16>(
2092 &self,
2093 buffer: &ModelHandle<Buffer>,
2094 position: T,
2095 cx: &mut ModelContext<Self>,
2096 ) -> Task<Result<Vec<Location>>> {
2097 let position = position.to_point_utf16(buffer.read(cx));
2098 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2099 }
2100
2101 pub fn references<T: ToPointUtf16>(
2102 &self,
2103 buffer: &ModelHandle<Buffer>,
2104 position: T,
2105 cx: &mut ModelContext<Self>,
2106 ) -> Task<Result<Vec<Location>>> {
2107 let position = position.to_point_utf16(buffer.read(cx));
2108 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2109 }
2110
2111 pub fn document_highlights<T: ToPointUtf16>(
2112 &self,
2113 buffer: &ModelHandle<Buffer>,
2114 position: T,
2115 cx: &mut ModelContext<Self>,
2116 ) -> Task<Result<Vec<DocumentHighlight>>> {
2117 let position = position.to_point_utf16(buffer.read(cx));
2118
2119 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2120 }
2121
2122 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2123 if self.is_local() {
2124 let mut language_servers = HashMap::default();
2125 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2126 if let Some((worktree, language)) = self
2127 .worktree_for_id(*worktree_id, cx)
2128 .and_then(|worktree| worktree.read(cx).as_local())
2129 .zip(self.languages.get_language(language_name))
2130 {
2131 language_servers
2132 .entry(Arc::as_ptr(language_server))
2133 .or_insert((
2134 language_server.clone(),
2135 *worktree_id,
2136 worktree.abs_path().clone(),
2137 language.clone(),
2138 ));
2139 }
2140 }
2141
2142 let mut requests = Vec::new();
2143 for (language_server, _, _, _) in language_servers.values() {
2144 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2145 lsp::WorkspaceSymbolParams {
2146 query: query.to_string(),
2147 ..Default::default()
2148 },
2149 ));
2150 }
2151
2152 cx.spawn_weak(|this, cx| async move {
2153 let responses = futures::future::try_join_all(requests).await?;
2154
2155 let mut symbols = Vec::new();
2156 if let Some(this) = this.upgrade(&cx) {
2157 this.read_with(&cx, |this, cx| {
2158 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2159 language_servers.into_values().zip(responses)
2160 {
2161 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2162 |lsp_symbol| {
2163 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2164 let mut worktree_id = source_worktree_id;
2165 let path;
2166 if let Some((worktree, rel_path)) =
2167 this.find_local_worktree(&abs_path, cx)
2168 {
2169 worktree_id = worktree.read(cx).id();
2170 path = rel_path;
2171 } else {
2172 path = relativize_path(&worktree_abs_path, &abs_path);
2173 }
2174
2175 let label = language
2176 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2177 .unwrap_or_else(|| {
2178 CodeLabel::plain(lsp_symbol.name.clone(), None)
2179 });
2180 let signature = this.symbol_signature(worktree_id, &path);
2181
2182 Some(Symbol {
2183 source_worktree_id,
2184 worktree_id,
2185 language_name: language.name().to_string(),
2186 name: lsp_symbol.name,
2187 kind: lsp_symbol.kind,
2188 label,
2189 path,
2190 range: range_from_lsp(lsp_symbol.location.range),
2191 signature,
2192 })
2193 },
2194 ));
2195 }
2196 })
2197 }
2198
2199 Ok(symbols)
2200 })
2201 } else if let Some(project_id) = self.remote_id() {
2202 let request = self.client.request(proto::GetProjectSymbols {
2203 project_id,
2204 query: query.to_string(),
2205 });
2206 cx.spawn_weak(|this, cx| async move {
2207 let response = request.await?;
2208 let mut symbols = Vec::new();
2209 if let Some(this) = this.upgrade(&cx) {
2210 this.read_with(&cx, |this, _| {
2211 symbols.extend(
2212 response
2213 .symbols
2214 .into_iter()
2215 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2216 );
2217 })
2218 }
2219 Ok(symbols)
2220 })
2221 } else {
2222 Task::ready(Ok(Default::default()))
2223 }
2224 }
2225
2226 pub fn open_buffer_for_symbol(
2227 &mut self,
2228 symbol: &Symbol,
2229 cx: &mut ModelContext<Self>,
2230 ) -> Task<Result<ModelHandle<Buffer>>> {
2231 if self.is_local() {
2232 let language_server = if let Some(server) = self.language_servers.get(&(
2233 symbol.source_worktree_id,
2234 Arc::from(symbol.language_name.as_str()),
2235 )) {
2236 server.clone()
2237 } else {
2238 return Task::ready(Err(anyhow!(
2239 "language server for worktree and language not found"
2240 )));
2241 };
2242
2243 let worktree_abs_path = if let Some(worktree_abs_path) = self
2244 .worktree_for_id(symbol.worktree_id, cx)
2245 .and_then(|worktree| worktree.read(cx).as_local())
2246 .map(|local_worktree| local_worktree.abs_path())
2247 {
2248 worktree_abs_path
2249 } else {
2250 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2251 };
2252 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2253 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2254 uri
2255 } else {
2256 return Task::ready(Err(anyhow!("invalid symbol path")));
2257 };
2258
2259 self.open_local_buffer_via_lsp(
2260 symbol_uri,
2261 Arc::from(symbol.language_name.as_str()),
2262 language_server,
2263 cx,
2264 )
2265 } else if let Some(project_id) = self.remote_id() {
2266 let request = self.client.request(proto::OpenBufferForSymbol {
2267 project_id,
2268 symbol: Some(serialize_symbol(symbol)),
2269 });
2270 cx.spawn(|this, mut cx| async move {
2271 let response = request.await?;
2272 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2273 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2274 .await
2275 })
2276 } else {
2277 Task::ready(Err(anyhow!("project does not have a remote id")))
2278 }
2279 }
2280
2281 pub fn completions<T: ToPointUtf16>(
2282 &self,
2283 source_buffer_handle: &ModelHandle<Buffer>,
2284 position: T,
2285 cx: &mut ModelContext<Self>,
2286 ) -> Task<Result<Vec<Completion>>> {
2287 let source_buffer_handle = source_buffer_handle.clone();
2288 let source_buffer = source_buffer_handle.read(cx);
2289 let buffer_id = source_buffer.remote_id();
2290 let language = source_buffer.language().cloned();
2291 let worktree;
2292 let buffer_abs_path;
2293 if let Some(file) = File::from_dyn(source_buffer.file()) {
2294 worktree = file.worktree.clone();
2295 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2296 } else {
2297 return Task::ready(Ok(Default::default()));
2298 };
2299
2300 let position = position.to_point_utf16(source_buffer);
2301 let anchor = source_buffer.anchor_after(position);
2302
2303 if worktree.read(cx).as_local().is_some() {
2304 let buffer_abs_path = buffer_abs_path.unwrap();
2305 let lang_server =
2306 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2307 server.clone()
2308 } else {
2309 return Task::ready(Ok(Default::default()));
2310 };
2311
2312 cx.spawn(|_, cx| async move {
2313 let completions = lang_server
2314 .request::<lsp::request::Completion>(lsp::CompletionParams {
2315 text_document_position: lsp::TextDocumentPositionParams::new(
2316 lsp::TextDocumentIdentifier::new(
2317 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2318 ),
2319 position.to_lsp_position(),
2320 ),
2321 context: Default::default(),
2322 work_done_progress_params: Default::default(),
2323 partial_result_params: Default::default(),
2324 })
2325 .await
2326 .context("lsp completion request failed")?;
2327
2328 let completions = if let Some(completions) = completions {
2329 match completions {
2330 lsp::CompletionResponse::Array(completions) => completions,
2331 lsp::CompletionResponse::List(list) => list.items,
2332 }
2333 } else {
2334 Default::default()
2335 };
2336
2337 source_buffer_handle.read_with(&cx, |this, _| {
2338 Ok(completions
2339 .into_iter()
2340 .filter_map(|lsp_completion| {
2341 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2342 lsp::CompletionTextEdit::Edit(edit) => {
2343 (range_from_lsp(edit.range), edit.new_text.clone())
2344 }
2345 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2346 log::info!("unsupported insert/replace completion");
2347 return None;
2348 }
2349 };
2350
2351 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2352 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2353 if clipped_start == old_range.start && clipped_end == old_range.end {
2354 Some(Completion {
2355 old_range: this.anchor_before(old_range.start)
2356 ..this.anchor_after(old_range.end),
2357 new_text,
2358 label: language
2359 .as_ref()
2360 .and_then(|l| l.label_for_completion(&lsp_completion))
2361 .unwrap_or_else(|| {
2362 CodeLabel::plain(
2363 lsp_completion.label.clone(),
2364 lsp_completion.filter_text.as_deref(),
2365 )
2366 }),
2367 lsp_completion,
2368 })
2369 } else {
2370 None
2371 }
2372 })
2373 .collect())
2374 })
2375 })
2376 } else if let Some(project_id) = self.remote_id() {
2377 let rpc = self.client.clone();
2378 let message = proto::GetCompletions {
2379 project_id,
2380 buffer_id,
2381 position: Some(language::proto::serialize_anchor(&anchor)),
2382 version: serialize_version(&source_buffer.version()),
2383 };
2384 cx.spawn_weak(|_, mut cx| async move {
2385 let response = rpc.request(message).await?;
2386
2387 source_buffer_handle
2388 .update(&mut cx, |buffer, _| {
2389 buffer.wait_for_version(deserialize_version(response.version))
2390 })
2391 .await;
2392
2393 response
2394 .completions
2395 .into_iter()
2396 .map(|completion| {
2397 language::proto::deserialize_completion(completion, language.as_ref())
2398 })
2399 .collect()
2400 })
2401 } else {
2402 Task::ready(Ok(Default::default()))
2403 }
2404 }
2405
2406 pub fn apply_additional_edits_for_completion(
2407 &self,
2408 buffer_handle: ModelHandle<Buffer>,
2409 completion: Completion,
2410 push_to_history: bool,
2411 cx: &mut ModelContext<Self>,
2412 ) -> Task<Result<Option<Transaction>>> {
2413 let buffer = buffer_handle.read(cx);
2414 let buffer_id = buffer.remote_id();
2415
2416 if self.is_local() {
2417 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2418 server.clone()
2419 } else {
2420 return Task::ready(Ok(Default::default()));
2421 };
2422
2423 cx.spawn(|this, mut cx| async move {
2424 let resolved_completion = lang_server
2425 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2426 .await?;
2427 if let Some(edits) = resolved_completion.additional_text_edits {
2428 let edits = this
2429 .update(&mut cx, |this, cx| {
2430 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2431 })
2432 .await?;
2433 buffer_handle.update(&mut cx, |buffer, cx| {
2434 buffer.finalize_last_transaction();
2435 buffer.start_transaction();
2436 for (range, text) in edits {
2437 buffer.edit([range], text, cx);
2438 }
2439 let transaction = if buffer.end_transaction(cx).is_some() {
2440 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2441 if !push_to_history {
2442 buffer.forget_transaction(transaction.id);
2443 }
2444 Some(transaction)
2445 } else {
2446 None
2447 };
2448 Ok(transaction)
2449 })
2450 } else {
2451 Ok(None)
2452 }
2453 })
2454 } else if let Some(project_id) = self.remote_id() {
2455 let client = self.client.clone();
2456 cx.spawn(|_, mut cx| async move {
2457 let response = client
2458 .request(proto::ApplyCompletionAdditionalEdits {
2459 project_id,
2460 buffer_id,
2461 completion: Some(language::proto::serialize_completion(&completion)),
2462 })
2463 .await?;
2464
2465 if let Some(transaction) = response.transaction {
2466 let transaction = language::proto::deserialize_transaction(transaction)?;
2467 buffer_handle
2468 .update(&mut cx, |buffer, _| {
2469 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2470 })
2471 .await;
2472 if push_to_history {
2473 buffer_handle.update(&mut cx, |buffer, _| {
2474 buffer.push_transaction(transaction.clone(), Instant::now());
2475 });
2476 }
2477 Ok(Some(transaction))
2478 } else {
2479 Ok(None)
2480 }
2481 })
2482 } else {
2483 Task::ready(Err(anyhow!("project does not have a remote id")))
2484 }
2485 }
2486
2487 pub fn code_actions<T: ToOffset>(
2488 &self,
2489 buffer_handle: &ModelHandle<Buffer>,
2490 range: Range<T>,
2491 cx: &mut ModelContext<Self>,
2492 ) -> Task<Result<Vec<CodeAction>>> {
2493 let buffer_handle = buffer_handle.clone();
2494 let buffer = buffer_handle.read(cx);
2495 let buffer_id = buffer.remote_id();
2496 let worktree;
2497 let buffer_abs_path;
2498 if let Some(file) = File::from_dyn(buffer.file()) {
2499 worktree = file.worktree.clone();
2500 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2501 } else {
2502 return Task::ready(Ok(Default::default()));
2503 };
2504 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2505
2506 if worktree.read(cx).as_local().is_some() {
2507 let buffer_abs_path = buffer_abs_path.unwrap();
2508 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2509 server.clone()
2510 } else {
2511 return Task::ready(Ok(Default::default()));
2512 };
2513
2514 let lsp_range = lsp::Range::new(
2515 range.start.to_point_utf16(buffer).to_lsp_position(),
2516 range.end.to_point_utf16(buffer).to_lsp_position(),
2517 );
2518 cx.foreground().spawn(async move {
2519 if !lang_server.capabilities().code_action_provider.is_some() {
2520 return Ok(Default::default());
2521 }
2522
2523 Ok(lang_server
2524 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2525 text_document: lsp::TextDocumentIdentifier::new(
2526 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2527 ),
2528 range: lsp_range,
2529 work_done_progress_params: Default::default(),
2530 partial_result_params: Default::default(),
2531 context: lsp::CodeActionContext {
2532 diagnostics: Default::default(),
2533 only: Some(vec![
2534 lsp::CodeActionKind::QUICKFIX,
2535 lsp::CodeActionKind::REFACTOR,
2536 lsp::CodeActionKind::REFACTOR_EXTRACT,
2537 ]),
2538 },
2539 })
2540 .await?
2541 .unwrap_or_default()
2542 .into_iter()
2543 .filter_map(|entry| {
2544 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2545 Some(CodeAction {
2546 range: range.clone(),
2547 lsp_action,
2548 })
2549 } else {
2550 None
2551 }
2552 })
2553 .collect())
2554 })
2555 } else if let Some(project_id) = self.remote_id() {
2556 let rpc = self.client.clone();
2557 let version = buffer.version();
2558 cx.spawn_weak(|_, mut cx| async move {
2559 let response = rpc
2560 .request(proto::GetCodeActions {
2561 project_id,
2562 buffer_id,
2563 start: Some(language::proto::serialize_anchor(&range.start)),
2564 end: Some(language::proto::serialize_anchor(&range.end)),
2565 version: serialize_version(&version),
2566 })
2567 .await?;
2568
2569 buffer_handle
2570 .update(&mut cx, |buffer, _| {
2571 buffer.wait_for_version(deserialize_version(response.version))
2572 })
2573 .await;
2574
2575 response
2576 .actions
2577 .into_iter()
2578 .map(language::proto::deserialize_code_action)
2579 .collect()
2580 })
2581 } else {
2582 Task::ready(Ok(Default::default()))
2583 }
2584 }
2585
2586 pub fn apply_code_action(
2587 &self,
2588 buffer_handle: ModelHandle<Buffer>,
2589 mut action: CodeAction,
2590 push_to_history: bool,
2591 cx: &mut ModelContext<Self>,
2592 ) -> Task<Result<ProjectTransaction>> {
2593 if self.is_local() {
2594 let buffer = buffer_handle.read(cx);
2595 let lang_name = if let Some(lang) = buffer.language() {
2596 lang.name()
2597 } else {
2598 return Task::ready(Ok(Default::default()));
2599 };
2600 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2601 server.clone()
2602 } else {
2603 return Task::ready(Ok(Default::default()));
2604 };
2605 let range = action.range.to_point_utf16(buffer);
2606
2607 cx.spawn(|this, mut cx| async move {
2608 if let Some(lsp_range) = action
2609 .lsp_action
2610 .data
2611 .as_mut()
2612 .and_then(|d| d.get_mut("codeActionParams"))
2613 .and_then(|d| d.get_mut("range"))
2614 {
2615 *lsp_range = serde_json::to_value(&lsp::Range::new(
2616 range.start.to_lsp_position(),
2617 range.end.to_lsp_position(),
2618 ))
2619 .unwrap();
2620 action.lsp_action = lang_server
2621 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2622 .await?;
2623 } else {
2624 let actions = this
2625 .update(&mut cx, |this, cx| {
2626 this.code_actions(&buffer_handle, action.range, cx)
2627 })
2628 .await?;
2629 action.lsp_action = actions
2630 .into_iter()
2631 .find(|a| a.lsp_action.title == action.lsp_action.title)
2632 .ok_or_else(|| anyhow!("code action is outdated"))?
2633 .lsp_action;
2634 }
2635
2636 if let Some(edit) = action.lsp_action.edit {
2637 Self::deserialize_workspace_edit(
2638 this,
2639 edit,
2640 push_to_history,
2641 lang_name,
2642 lang_server,
2643 &mut cx,
2644 )
2645 .await
2646 } else {
2647 Ok(ProjectTransaction::default())
2648 }
2649 })
2650 } else if let Some(project_id) = self.remote_id() {
2651 let client = self.client.clone();
2652 let request = proto::ApplyCodeAction {
2653 project_id,
2654 buffer_id: buffer_handle.read(cx).remote_id(),
2655 action: Some(language::proto::serialize_code_action(&action)),
2656 };
2657 cx.spawn(|this, mut cx| async move {
2658 let response = client
2659 .request(request)
2660 .await?
2661 .transaction
2662 .ok_or_else(|| anyhow!("missing transaction"))?;
2663 this.update(&mut cx, |this, cx| {
2664 this.deserialize_project_transaction(response, push_to_history, cx)
2665 })
2666 .await
2667 })
2668 } else {
2669 Task::ready(Err(anyhow!("project does not have a remote id")))
2670 }
2671 }
2672
2673 async fn deserialize_workspace_edit(
2674 this: ModelHandle<Self>,
2675 edit: lsp::WorkspaceEdit,
2676 push_to_history: bool,
2677 language_name: Arc<str>,
2678 language_server: Arc<LanguageServer>,
2679 cx: &mut AsyncAppContext,
2680 ) -> Result<ProjectTransaction> {
2681 let fs = this.read_with(cx, |this, _| this.fs.clone());
2682 let mut operations = Vec::new();
2683 if let Some(document_changes) = edit.document_changes {
2684 match document_changes {
2685 lsp::DocumentChanges::Edits(edits) => {
2686 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2687 }
2688 lsp::DocumentChanges::Operations(ops) => operations = ops,
2689 }
2690 } else if let Some(changes) = edit.changes {
2691 operations.extend(changes.into_iter().map(|(uri, edits)| {
2692 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2693 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2694 uri,
2695 version: None,
2696 },
2697 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2698 })
2699 }));
2700 }
2701
2702 let mut project_transaction = ProjectTransaction::default();
2703 for operation in operations {
2704 match operation {
2705 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2706 let abs_path = op
2707 .uri
2708 .to_file_path()
2709 .map_err(|_| anyhow!("can't convert URI to path"))?;
2710
2711 if let Some(parent_path) = abs_path.parent() {
2712 fs.create_dir(parent_path).await?;
2713 }
2714 if abs_path.ends_with("/") {
2715 fs.create_dir(&abs_path).await?;
2716 } else {
2717 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2718 .await?;
2719 }
2720 }
2721 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2722 let source_abs_path = op
2723 .old_uri
2724 .to_file_path()
2725 .map_err(|_| anyhow!("can't convert URI to path"))?;
2726 let target_abs_path = op
2727 .new_uri
2728 .to_file_path()
2729 .map_err(|_| anyhow!("can't convert URI to path"))?;
2730 fs.rename(
2731 &source_abs_path,
2732 &target_abs_path,
2733 op.options.map(Into::into).unwrap_or_default(),
2734 )
2735 .await?;
2736 }
2737 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2738 let abs_path = op
2739 .uri
2740 .to_file_path()
2741 .map_err(|_| anyhow!("can't convert URI to path"))?;
2742 let options = op.options.map(Into::into).unwrap_or_default();
2743 if abs_path.ends_with("/") {
2744 fs.remove_dir(&abs_path, options).await?;
2745 } else {
2746 fs.remove_file(&abs_path, options).await?;
2747 }
2748 }
2749 lsp::DocumentChangeOperation::Edit(op) => {
2750 let buffer_to_edit = this
2751 .update(cx, |this, cx| {
2752 this.open_local_buffer_via_lsp(
2753 op.text_document.uri,
2754 language_name.clone(),
2755 language_server.clone(),
2756 cx,
2757 )
2758 })
2759 .await?;
2760
2761 let edits = this
2762 .update(cx, |this, cx| {
2763 let edits = op.edits.into_iter().map(|edit| match edit {
2764 lsp::OneOf::Left(edit) => edit,
2765 lsp::OneOf::Right(edit) => edit.text_edit,
2766 });
2767 this.edits_from_lsp(
2768 &buffer_to_edit,
2769 edits,
2770 op.text_document.version,
2771 cx,
2772 )
2773 })
2774 .await?;
2775
2776 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2777 buffer.finalize_last_transaction();
2778 buffer.start_transaction();
2779 for (range, text) in edits {
2780 buffer.edit([range], text, cx);
2781 }
2782 let transaction = if buffer.end_transaction(cx).is_some() {
2783 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2784 if !push_to_history {
2785 buffer.forget_transaction(transaction.id);
2786 }
2787 Some(transaction)
2788 } else {
2789 None
2790 };
2791
2792 transaction
2793 });
2794 if let Some(transaction) = transaction {
2795 project_transaction.0.insert(buffer_to_edit, transaction);
2796 }
2797 }
2798 }
2799 }
2800
2801 Ok(project_transaction)
2802 }
2803
2804 pub fn prepare_rename<T: ToPointUtf16>(
2805 &self,
2806 buffer: ModelHandle<Buffer>,
2807 position: T,
2808 cx: &mut ModelContext<Self>,
2809 ) -> Task<Result<Option<Range<Anchor>>>> {
2810 let position = position.to_point_utf16(buffer.read(cx));
2811 self.request_lsp(buffer, PrepareRename { position }, cx)
2812 }
2813
2814 pub fn perform_rename<T: ToPointUtf16>(
2815 &self,
2816 buffer: ModelHandle<Buffer>,
2817 position: T,
2818 new_name: String,
2819 push_to_history: bool,
2820 cx: &mut ModelContext<Self>,
2821 ) -> Task<Result<ProjectTransaction>> {
2822 let position = position.to_point_utf16(buffer.read(cx));
2823 self.request_lsp(
2824 buffer,
2825 PerformRename {
2826 position,
2827 new_name,
2828 push_to_history,
2829 },
2830 cx,
2831 )
2832 }
2833
2834 pub fn search(
2835 &self,
2836 query: SearchQuery,
2837 cx: &mut ModelContext<Self>,
2838 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2839 if self.is_local() {
2840 let snapshots = self
2841 .visible_worktrees(cx)
2842 .filter_map(|tree| {
2843 let tree = tree.read(cx).as_local()?;
2844 Some(tree.snapshot())
2845 })
2846 .collect::<Vec<_>>();
2847
2848 let background = cx.background().clone();
2849 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2850 if path_count == 0 {
2851 return Task::ready(Ok(Default::default()));
2852 }
2853 let workers = background.num_cpus().min(path_count);
2854 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2855 cx.background()
2856 .spawn({
2857 let fs = self.fs.clone();
2858 let background = cx.background().clone();
2859 let query = query.clone();
2860 async move {
2861 let fs = &fs;
2862 let query = &query;
2863 let matching_paths_tx = &matching_paths_tx;
2864 let paths_per_worker = (path_count + workers - 1) / workers;
2865 let snapshots = &snapshots;
2866 background
2867 .scoped(|scope| {
2868 for worker_ix in 0..workers {
2869 let worker_start_ix = worker_ix * paths_per_worker;
2870 let worker_end_ix = worker_start_ix + paths_per_worker;
2871 scope.spawn(async move {
2872 let mut snapshot_start_ix = 0;
2873 let mut abs_path = PathBuf::new();
2874 for snapshot in snapshots {
2875 let snapshot_end_ix =
2876 snapshot_start_ix + snapshot.visible_file_count();
2877 if worker_end_ix <= snapshot_start_ix {
2878 break;
2879 } else if worker_start_ix > snapshot_end_ix {
2880 snapshot_start_ix = snapshot_end_ix;
2881 continue;
2882 } else {
2883 let start_in_snapshot = worker_start_ix
2884 .saturating_sub(snapshot_start_ix);
2885 let end_in_snapshot =
2886 cmp::min(worker_end_ix, snapshot_end_ix)
2887 - snapshot_start_ix;
2888
2889 for entry in snapshot
2890 .files(false, start_in_snapshot)
2891 .take(end_in_snapshot - start_in_snapshot)
2892 {
2893 if matching_paths_tx.is_closed() {
2894 break;
2895 }
2896
2897 abs_path.clear();
2898 abs_path.push(&snapshot.abs_path());
2899 abs_path.push(&entry.path);
2900 let matches = if let Some(file) =
2901 fs.open_sync(&abs_path).await.log_err()
2902 {
2903 query.detect(file).unwrap_or(false)
2904 } else {
2905 false
2906 };
2907
2908 if matches {
2909 let project_path =
2910 (snapshot.id(), entry.path.clone());
2911 if matching_paths_tx
2912 .send(project_path)
2913 .await
2914 .is_err()
2915 {
2916 break;
2917 }
2918 }
2919 }
2920
2921 snapshot_start_ix = snapshot_end_ix;
2922 }
2923 }
2924 });
2925 }
2926 })
2927 .await;
2928 }
2929 })
2930 .detach();
2931
2932 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2933 let open_buffers = self
2934 .opened_buffers
2935 .values()
2936 .filter_map(|b| b.upgrade(cx))
2937 .collect::<HashSet<_>>();
2938 cx.spawn(|this, cx| async move {
2939 for buffer in &open_buffers {
2940 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2941 buffers_tx.send((buffer.clone(), snapshot)).await?;
2942 }
2943
2944 let open_buffers = Rc::new(RefCell::new(open_buffers));
2945 while let Some(project_path) = matching_paths_rx.next().await {
2946 if buffers_tx.is_closed() {
2947 break;
2948 }
2949
2950 let this = this.clone();
2951 let open_buffers = open_buffers.clone();
2952 let buffers_tx = buffers_tx.clone();
2953 cx.spawn(|mut cx| async move {
2954 if let Some(buffer) = this
2955 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2956 .await
2957 .log_err()
2958 {
2959 if open_buffers.borrow_mut().insert(buffer.clone()) {
2960 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2961 buffers_tx.send((buffer, snapshot)).await?;
2962 }
2963 }
2964
2965 Ok::<_, anyhow::Error>(())
2966 })
2967 .detach();
2968 }
2969
2970 Ok::<_, anyhow::Error>(())
2971 })
2972 .detach_and_log_err(cx);
2973
2974 let background = cx.background().clone();
2975 cx.background().spawn(async move {
2976 let query = &query;
2977 let mut matched_buffers = Vec::new();
2978 for _ in 0..workers {
2979 matched_buffers.push(HashMap::default());
2980 }
2981 background
2982 .scoped(|scope| {
2983 for worker_matched_buffers in matched_buffers.iter_mut() {
2984 let mut buffers_rx = buffers_rx.clone();
2985 scope.spawn(async move {
2986 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2987 let buffer_matches = query
2988 .search(snapshot.as_rope())
2989 .await
2990 .iter()
2991 .map(|range| {
2992 snapshot.anchor_before(range.start)
2993 ..snapshot.anchor_after(range.end)
2994 })
2995 .collect::<Vec<_>>();
2996 if !buffer_matches.is_empty() {
2997 worker_matched_buffers
2998 .insert(buffer.clone(), buffer_matches);
2999 }
3000 }
3001 });
3002 }
3003 })
3004 .await;
3005 Ok(matched_buffers.into_iter().flatten().collect())
3006 })
3007 } else if let Some(project_id) = self.remote_id() {
3008 let request = self.client.request(query.to_proto(project_id));
3009 cx.spawn(|this, mut cx| async move {
3010 let response = request.await?;
3011 let mut result = HashMap::default();
3012 for location in response.locations {
3013 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3014 let target_buffer = this
3015 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3016 .await?;
3017 let start = location
3018 .start
3019 .and_then(deserialize_anchor)
3020 .ok_or_else(|| anyhow!("missing target start"))?;
3021 let end = location
3022 .end
3023 .and_then(deserialize_anchor)
3024 .ok_or_else(|| anyhow!("missing target end"))?;
3025 result
3026 .entry(target_buffer)
3027 .or_insert(Vec::new())
3028 .push(start..end)
3029 }
3030 Ok(result)
3031 })
3032 } else {
3033 Task::ready(Ok(Default::default()))
3034 }
3035 }
3036
3037 fn request_lsp<R: LspCommand>(
3038 &self,
3039 buffer_handle: ModelHandle<Buffer>,
3040 request: R,
3041 cx: &mut ModelContext<Self>,
3042 ) -> Task<Result<R::Response>>
3043 where
3044 <R::LspRequest as lsp::request::Request>::Result: Send,
3045 {
3046 let buffer = buffer_handle.read(cx);
3047 if self.is_local() {
3048 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3049 if let Some((file, language_server)) =
3050 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3051 {
3052 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3053 return cx.spawn(|this, cx| async move {
3054 if !request.check_capabilities(&language_server.capabilities()) {
3055 return Ok(Default::default());
3056 }
3057
3058 let response = language_server
3059 .request::<R::LspRequest>(lsp_params)
3060 .await
3061 .context("lsp request failed")?;
3062 request
3063 .response_from_lsp(response, this, buffer_handle, cx)
3064 .await
3065 });
3066 }
3067 } else if let Some(project_id) = self.remote_id() {
3068 let rpc = self.client.clone();
3069 let message = request.to_proto(project_id, buffer);
3070 return cx.spawn(|this, cx| async move {
3071 let response = rpc.request(message).await?;
3072 request
3073 .response_from_proto(response, this, buffer_handle, cx)
3074 .await
3075 });
3076 }
3077 Task::ready(Ok(Default::default()))
3078 }
3079
3080 pub fn find_or_create_local_worktree(
3081 &mut self,
3082 abs_path: impl AsRef<Path>,
3083 visible: bool,
3084 cx: &mut ModelContext<Self>,
3085 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3086 let abs_path = abs_path.as_ref();
3087 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3088 Task::ready(Ok((tree.clone(), relative_path.into())))
3089 } else {
3090 let worktree = self.create_local_worktree(abs_path, visible, cx);
3091 cx.foreground()
3092 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3093 }
3094 }
3095
3096 pub fn find_local_worktree(
3097 &self,
3098 abs_path: &Path,
3099 cx: &AppContext,
3100 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3101 for tree in self.worktrees(cx) {
3102 if let Some(relative_path) = tree
3103 .read(cx)
3104 .as_local()
3105 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3106 {
3107 return Some((tree.clone(), relative_path.into()));
3108 }
3109 }
3110 None
3111 }
3112
3113 pub fn is_shared(&self) -> bool {
3114 match &self.client_state {
3115 ProjectClientState::Local { is_shared, .. } => *is_shared,
3116 ProjectClientState::Remote { .. } => false,
3117 }
3118 }
3119
3120 fn create_local_worktree(
3121 &mut self,
3122 abs_path: impl AsRef<Path>,
3123 visible: bool,
3124 cx: &mut ModelContext<Self>,
3125 ) -> Task<Result<ModelHandle<Worktree>>> {
3126 let fs = self.fs.clone();
3127 let client = self.client.clone();
3128 let next_entry_id = self.next_entry_id.clone();
3129 let path: Arc<Path> = abs_path.as_ref().into();
3130 let task = self
3131 .loading_local_worktrees
3132 .entry(path.clone())
3133 .or_insert_with(|| {
3134 cx.spawn(|project, mut cx| {
3135 async move {
3136 let worktree = Worktree::local(
3137 client.clone(),
3138 path.clone(),
3139 visible,
3140 fs,
3141 next_entry_id,
3142 &mut cx,
3143 )
3144 .await;
3145 project.update(&mut cx, |project, _| {
3146 project.loading_local_worktrees.remove(&path);
3147 });
3148 let worktree = worktree?;
3149
3150 let (remote_project_id, is_shared) =
3151 project.update(&mut cx, |project, cx| {
3152 project.add_worktree(&worktree, cx);
3153 (project.remote_id(), project.is_shared())
3154 });
3155
3156 if let Some(project_id) = remote_project_id {
3157 if is_shared {
3158 worktree
3159 .update(&mut cx, |worktree, cx| {
3160 worktree.as_local_mut().unwrap().share(project_id, cx)
3161 })
3162 .await?;
3163 } else {
3164 worktree
3165 .update(&mut cx, |worktree, cx| {
3166 worktree.as_local_mut().unwrap().register(project_id, cx)
3167 })
3168 .await?;
3169 }
3170 }
3171
3172 Ok(worktree)
3173 }
3174 .map_err(|err| Arc::new(err))
3175 })
3176 .shared()
3177 })
3178 .clone();
3179 cx.foreground().spawn(async move {
3180 match task.await {
3181 Ok(worktree) => Ok(worktree),
3182 Err(err) => Err(anyhow!("{}", err)),
3183 }
3184 })
3185 }
3186
3187 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3188 self.worktrees.retain(|worktree| {
3189 worktree
3190 .upgrade(cx)
3191 .map_or(false, |w| w.read(cx).id() != id)
3192 });
3193 cx.notify();
3194 }
3195
3196 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3197 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3198 if worktree.read(cx).is_local() {
3199 cx.subscribe(&worktree, |this, worktree, _, cx| {
3200 this.update_local_worktree_buffers(worktree, cx);
3201 })
3202 .detach();
3203 }
3204
3205 let push_strong_handle = {
3206 let worktree = worktree.read(cx);
3207 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3208 };
3209 if push_strong_handle {
3210 self.worktrees
3211 .push(WorktreeHandle::Strong(worktree.clone()));
3212 } else {
3213 cx.observe_release(&worktree, |this, _, cx| {
3214 this.worktrees
3215 .retain(|worktree| worktree.upgrade(cx).is_some());
3216 cx.notify();
3217 })
3218 .detach();
3219 self.worktrees
3220 .push(WorktreeHandle::Weak(worktree.downgrade()));
3221 }
3222 cx.notify();
3223 }
3224
3225 fn update_local_worktree_buffers(
3226 &mut self,
3227 worktree_handle: ModelHandle<Worktree>,
3228 cx: &mut ModelContext<Self>,
3229 ) {
3230 let snapshot = worktree_handle.read(cx).snapshot();
3231 let mut buffers_to_delete = Vec::new();
3232 for (buffer_id, buffer) in &self.opened_buffers {
3233 if let Some(buffer) = buffer.upgrade(cx) {
3234 buffer.update(cx, |buffer, cx| {
3235 if let Some(old_file) = File::from_dyn(buffer.file()) {
3236 if old_file.worktree != worktree_handle {
3237 return;
3238 }
3239
3240 let new_file = if let Some(entry) = old_file
3241 .entry_id
3242 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3243 {
3244 File {
3245 is_local: true,
3246 entry_id: Some(entry.id),
3247 mtime: entry.mtime,
3248 path: entry.path.clone(),
3249 worktree: worktree_handle.clone(),
3250 }
3251 } else if let Some(entry) =
3252 snapshot.entry_for_path(old_file.path().as_ref())
3253 {
3254 File {
3255 is_local: true,
3256 entry_id: Some(entry.id),
3257 mtime: entry.mtime,
3258 path: entry.path.clone(),
3259 worktree: worktree_handle.clone(),
3260 }
3261 } else {
3262 File {
3263 is_local: true,
3264 entry_id: None,
3265 path: old_file.path().clone(),
3266 mtime: old_file.mtime(),
3267 worktree: worktree_handle.clone(),
3268 }
3269 };
3270
3271 if let Some(project_id) = self.remote_id() {
3272 self.client
3273 .send(proto::UpdateBufferFile {
3274 project_id,
3275 buffer_id: *buffer_id as u64,
3276 file: Some(new_file.to_proto()),
3277 })
3278 .log_err();
3279 }
3280 buffer.file_updated(Box::new(new_file), cx).detach();
3281 }
3282 });
3283 } else {
3284 buffers_to_delete.push(*buffer_id);
3285 }
3286 }
3287
3288 for buffer_id in buffers_to_delete {
3289 self.opened_buffers.remove(&buffer_id);
3290 }
3291 }
3292
3293 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3294 let new_active_entry = entry.and_then(|project_path| {
3295 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3296 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3297 Some(entry.id)
3298 });
3299 if new_active_entry != self.active_entry {
3300 self.active_entry = new_active_entry;
3301 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3302 }
3303 }
3304
3305 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3306 self.language_servers_with_diagnostics_running > 0
3307 }
3308
3309 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3310 let mut summary = DiagnosticSummary::default();
3311 for (_, path_summary) in self.diagnostic_summaries(cx) {
3312 summary.error_count += path_summary.error_count;
3313 summary.warning_count += path_summary.warning_count;
3314 summary.info_count += path_summary.info_count;
3315 summary.hint_count += path_summary.hint_count;
3316 }
3317 summary
3318 }
3319
3320 pub fn diagnostic_summaries<'a>(
3321 &'a self,
3322 cx: &'a AppContext,
3323 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3324 self.worktrees(cx).flat_map(move |worktree| {
3325 let worktree = worktree.read(cx);
3326 let worktree_id = worktree.id();
3327 worktree
3328 .diagnostic_summaries()
3329 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3330 })
3331 }
3332
3333 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3334 self.language_servers_with_diagnostics_running += 1;
3335 if self.language_servers_with_diagnostics_running == 1 {
3336 cx.emit(Event::DiskBasedDiagnosticsStarted);
3337 }
3338 }
3339
3340 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3341 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3342 self.language_servers_with_diagnostics_running -= 1;
3343 if self.language_servers_with_diagnostics_running == 0 {
3344 cx.emit(Event::DiskBasedDiagnosticsFinished);
3345 }
3346 }
3347
3348 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3349 self.active_entry
3350 }
3351
3352 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3353 self.worktree_for_id(path.worktree_id, cx)?
3354 .read(cx)
3355 .entry_for_path(&path.path)
3356 .map(|entry| entry.id)
3357 }
3358
3359 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3360 let worktree = self.worktree_for_entry(entry_id, cx)?;
3361 let worktree = worktree.read(cx);
3362 let worktree_id = worktree.id();
3363 let path = worktree.entry_for_id(entry_id)?.path.clone();
3364 Some(ProjectPath { worktree_id, path })
3365 }
3366
3367 // RPC message handlers
3368
3369 async fn handle_unshare_project(
3370 this: ModelHandle<Self>,
3371 _: TypedEnvelope<proto::UnshareProject>,
3372 _: Arc<Client>,
3373 mut cx: AsyncAppContext,
3374 ) -> Result<()> {
3375 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3376 Ok(())
3377 }
3378
3379 async fn handle_add_collaborator(
3380 this: ModelHandle<Self>,
3381 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3382 _: Arc<Client>,
3383 mut cx: AsyncAppContext,
3384 ) -> Result<()> {
3385 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3386 let collaborator = envelope
3387 .payload
3388 .collaborator
3389 .take()
3390 .ok_or_else(|| anyhow!("empty collaborator"))?;
3391
3392 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3393 this.update(&mut cx, |this, cx| {
3394 this.collaborators
3395 .insert(collaborator.peer_id, collaborator);
3396 cx.notify();
3397 });
3398
3399 Ok(())
3400 }
3401
3402 async fn handle_remove_collaborator(
3403 this: ModelHandle<Self>,
3404 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3405 _: Arc<Client>,
3406 mut cx: AsyncAppContext,
3407 ) -> Result<()> {
3408 this.update(&mut cx, |this, cx| {
3409 let peer_id = PeerId(envelope.payload.peer_id);
3410 let replica_id = this
3411 .collaborators
3412 .remove(&peer_id)
3413 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3414 .replica_id;
3415 for (_, buffer) in &this.opened_buffers {
3416 if let Some(buffer) = buffer.upgrade(cx) {
3417 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3418 }
3419 }
3420 cx.emit(Event::CollaboratorLeft(peer_id));
3421 cx.notify();
3422 Ok(())
3423 })
3424 }
3425
3426 async fn handle_register_worktree(
3427 this: ModelHandle<Self>,
3428 envelope: TypedEnvelope<proto::RegisterWorktree>,
3429 client: Arc<Client>,
3430 mut cx: AsyncAppContext,
3431 ) -> Result<()> {
3432 this.update(&mut cx, |this, cx| {
3433 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3434 let replica_id = this.replica_id();
3435 let worktree = proto::Worktree {
3436 id: envelope.payload.worktree_id,
3437 root_name: envelope.payload.root_name,
3438 entries: Default::default(),
3439 diagnostic_summaries: Default::default(),
3440 visible: envelope.payload.visible,
3441 };
3442 let (worktree, load_task) =
3443 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3444 this.add_worktree(&worktree, cx);
3445 load_task.detach();
3446 Ok(())
3447 })
3448 }
3449
3450 async fn handle_unregister_worktree(
3451 this: ModelHandle<Self>,
3452 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3453 _: Arc<Client>,
3454 mut cx: AsyncAppContext,
3455 ) -> Result<()> {
3456 this.update(&mut cx, |this, cx| {
3457 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3458 this.remove_worktree(worktree_id, cx);
3459 Ok(())
3460 })
3461 }
3462
3463 async fn handle_update_worktree(
3464 this: ModelHandle<Self>,
3465 envelope: TypedEnvelope<proto::UpdateWorktree>,
3466 _: Arc<Client>,
3467 mut cx: AsyncAppContext,
3468 ) -> Result<()> {
3469 this.update(&mut cx, |this, cx| {
3470 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3471 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3472 worktree.update(cx, |worktree, _| {
3473 let worktree = worktree.as_remote_mut().unwrap();
3474 worktree.update_from_remote(envelope)
3475 })?;
3476 }
3477 Ok(())
3478 })
3479 }
3480
3481 async fn handle_update_diagnostic_summary(
3482 this: ModelHandle<Self>,
3483 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3484 _: Arc<Client>,
3485 mut cx: AsyncAppContext,
3486 ) -> Result<()> {
3487 this.update(&mut cx, |this, cx| {
3488 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3489 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3490 if let Some(summary) = envelope.payload.summary {
3491 let project_path = ProjectPath {
3492 worktree_id,
3493 path: Path::new(&summary.path).into(),
3494 };
3495 worktree.update(cx, |worktree, _| {
3496 worktree
3497 .as_remote_mut()
3498 .unwrap()
3499 .update_diagnostic_summary(project_path.path.clone(), &summary);
3500 });
3501 cx.emit(Event::DiagnosticsUpdated(project_path));
3502 }
3503 }
3504 Ok(())
3505 })
3506 }
3507
3508 async fn handle_start_language_server(
3509 this: ModelHandle<Self>,
3510 envelope: TypedEnvelope<proto::StartLanguageServer>,
3511 _: Arc<Client>,
3512 mut cx: AsyncAppContext,
3513 ) -> Result<()> {
3514 let server = envelope
3515 .payload
3516 .server
3517 .ok_or_else(|| anyhow!("invalid server"))?;
3518 this.update(&mut cx, |this, cx| {
3519 this.language_server_statuses.insert(
3520 server.id as usize,
3521 LanguageServerStatus {
3522 name: server.name,
3523 pending_work: Default::default(),
3524 pending_diagnostic_updates: 0,
3525 },
3526 );
3527 cx.notify();
3528 });
3529 Ok(())
3530 }
3531
3532 async fn handle_update_language_server(
3533 this: ModelHandle<Self>,
3534 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3535 _: Arc<Client>,
3536 mut cx: AsyncAppContext,
3537 ) -> Result<()> {
3538 let language_server_id = envelope.payload.language_server_id as usize;
3539 match envelope
3540 .payload
3541 .variant
3542 .ok_or_else(|| anyhow!("invalid variant"))?
3543 {
3544 proto::update_language_server::Variant::WorkStart(payload) => {
3545 this.update(&mut cx, |this, cx| {
3546 this.on_lsp_work_start(language_server_id, payload.token, cx);
3547 })
3548 }
3549 proto::update_language_server::Variant::WorkProgress(payload) => {
3550 this.update(&mut cx, |this, cx| {
3551 this.on_lsp_work_progress(
3552 language_server_id,
3553 payload.token,
3554 LanguageServerProgress {
3555 message: payload.message,
3556 percentage: payload.percentage.map(|p| p as usize),
3557 last_update_at: Instant::now(),
3558 },
3559 cx,
3560 );
3561 })
3562 }
3563 proto::update_language_server::Variant::WorkEnd(payload) => {
3564 this.update(&mut cx, |this, cx| {
3565 this.on_lsp_work_end(language_server_id, payload.token, cx);
3566 })
3567 }
3568 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3569 this.update(&mut cx, |this, cx| {
3570 this.disk_based_diagnostics_started(cx);
3571 })
3572 }
3573 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3574 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3575 }
3576 }
3577
3578 Ok(())
3579 }
3580
3581 async fn handle_update_buffer(
3582 this: ModelHandle<Self>,
3583 envelope: TypedEnvelope<proto::UpdateBuffer>,
3584 _: Arc<Client>,
3585 mut cx: AsyncAppContext,
3586 ) -> Result<()> {
3587 this.update(&mut cx, |this, cx| {
3588 let payload = envelope.payload.clone();
3589 let buffer_id = payload.buffer_id;
3590 let ops = payload
3591 .operations
3592 .into_iter()
3593 .map(|op| language::proto::deserialize_operation(op))
3594 .collect::<Result<Vec<_>, _>>()?;
3595 match this.opened_buffers.entry(buffer_id) {
3596 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3597 OpenBuffer::Strong(buffer) => {
3598 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3599 }
3600 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3601 OpenBuffer::Weak(_) => {}
3602 },
3603 hash_map::Entry::Vacant(e) => {
3604 e.insert(OpenBuffer::Loading(ops));
3605 }
3606 }
3607 Ok(())
3608 })
3609 }
3610
3611 async fn handle_update_buffer_file(
3612 this: ModelHandle<Self>,
3613 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3614 _: Arc<Client>,
3615 mut cx: AsyncAppContext,
3616 ) -> Result<()> {
3617 this.update(&mut cx, |this, cx| {
3618 let payload = envelope.payload.clone();
3619 let buffer_id = payload.buffer_id;
3620 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3621 let worktree = this
3622 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3623 .ok_or_else(|| anyhow!("no such worktree"))?;
3624 let file = File::from_proto(file, worktree.clone(), cx)?;
3625 let buffer = this
3626 .opened_buffers
3627 .get_mut(&buffer_id)
3628 .and_then(|b| b.upgrade(cx))
3629 .ok_or_else(|| anyhow!("no such buffer"))?;
3630 buffer.update(cx, |buffer, cx| {
3631 buffer.file_updated(Box::new(file), cx).detach();
3632 });
3633 Ok(())
3634 })
3635 }
3636
3637 async fn handle_save_buffer(
3638 this: ModelHandle<Self>,
3639 envelope: TypedEnvelope<proto::SaveBuffer>,
3640 _: Arc<Client>,
3641 mut cx: AsyncAppContext,
3642 ) -> Result<proto::BufferSaved> {
3643 let buffer_id = envelope.payload.buffer_id;
3644 let requested_version = deserialize_version(envelope.payload.version);
3645
3646 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3647 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3648 let buffer = this
3649 .opened_buffers
3650 .get(&buffer_id)
3651 .map(|buffer| buffer.upgrade(cx).unwrap())
3652 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3653 Ok::<_, anyhow::Error>((project_id, buffer))
3654 })?;
3655 buffer
3656 .update(&mut cx, |buffer, _| {
3657 buffer.wait_for_version(requested_version)
3658 })
3659 .await;
3660
3661 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3662 Ok(proto::BufferSaved {
3663 project_id,
3664 buffer_id,
3665 version: serialize_version(&saved_version),
3666 mtime: Some(mtime.into()),
3667 })
3668 }
3669
3670 async fn handle_format_buffers(
3671 this: ModelHandle<Self>,
3672 envelope: TypedEnvelope<proto::FormatBuffers>,
3673 _: Arc<Client>,
3674 mut cx: AsyncAppContext,
3675 ) -> Result<proto::FormatBuffersResponse> {
3676 let sender_id = envelope.original_sender_id()?;
3677 let format = this.update(&mut cx, |this, cx| {
3678 let mut buffers = HashSet::default();
3679 for buffer_id in &envelope.payload.buffer_ids {
3680 buffers.insert(
3681 this.opened_buffers
3682 .get(buffer_id)
3683 .map(|buffer| buffer.upgrade(cx).unwrap())
3684 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3685 );
3686 }
3687 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3688 })?;
3689
3690 let project_transaction = format.await?;
3691 let project_transaction = this.update(&mut cx, |this, cx| {
3692 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3693 });
3694 Ok(proto::FormatBuffersResponse {
3695 transaction: Some(project_transaction),
3696 })
3697 }
3698
3699 async fn handle_get_completions(
3700 this: ModelHandle<Self>,
3701 envelope: TypedEnvelope<proto::GetCompletions>,
3702 _: Arc<Client>,
3703 mut cx: AsyncAppContext,
3704 ) -> Result<proto::GetCompletionsResponse> {
3705 let position = envelope
3706 .payload
3707 .position
3708 .and_then(language::proto::deserialize_anchor)
3709 .ok_or_else(|| anyhow!("invalid position"))?;
3710 let version = deserialize_version(envelope.payload.version);
3711 let buffer = this.read_with(&cx, |this, cx| {
3712 this.opened_buffers
3713 .get(&envelope.payload.buffer_id)
3714 .map(|buffer| buffer.upgrade(cx).unwrap())
3715 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3716 })?;
3717 buffer
3718 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3719 .await;
3720 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3721 let completions = this
3722 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3723 .await?;
3724
3725 Ok(proto::GetCompletionsResponse {
3726 completions: completions
3727 .iter()
3728 .map(language::proto::serialize_completion)
3729 .collect(),
3730 version: serialize_version(&version),
3731 })
3732 }
3733
3734 async fn handle_apply_additional_edits_for_completion(
3735 this: ModelHandle<Self>,
3736 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3737 _: Arc<Client>,
3738 mut cx: AsyncAppContext,
3739 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3740 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3741 let buffer = this
3742 .opened_buffers
3743 .get(&envelope.payload.buffer_id)
3744 .map(|buffer| buffer.upgrade(cx).unwrap())
3745 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3746 let language = buffer.read(cx).language();
3747 let completion = language::proto::deserialize_completion(
3748 envelope
3749 .payload
3750 .completion
3751 .ok_or_else(|| anyhow!("invalid completion"))?,
3752 language,
3753 )?;
3754 Ok::<_, anyhow::Error>(
3755 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3756 )
3757 })?;
3758
3759 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3760 transaction: apply_additional_edits
3761 .await?
3762 .as_ref()
3763 .map(language::proto::serialize_transaction),
3764 })
3765 }
3766
3767 async fn handle_get_code_actions(
3768 this: ModelHandle<Self>,
3769 envelope: TypedEnvelope<proto::GetCodeActions>,
3770 _: Arc<Client>,
3771 mut cx: AsyncAppContext,
3772 ) -> Result<proto::GetCodeActionsResponse> {
3773 let start = envelope
3774 .payload
3775 .start
3776 .and_then(language::proto::deserialize_anchor)
3777 .ok_or_else(|| anyhow!("invalid start"))?;
3778 let end = envelope
3779 .payload
3780 .end
3781 .and_then(language::proto::deserialize_anchor)
3782 .ok_or_else(|| anyhow!("invalid end"))?;
3783 let buffer = this.update(&mut cx, |this, cx| {
3784 this.opened_buffers
3785 .get(&envelope.payload.buffer_id)
3786 .map(|buffer| buffer.upgrade(cx).unwrap())
3787 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3788 })?;
3789 buffer
3790 .update(&mut cx, |buffer, _| {
3791 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3792 })
3793 .await;
3794
3795 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3796 let code_actions = this.update(&mut cx, |this, cx| {
3797 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3798 })?;
3799
3800 Ok(proto::GetCodeActionsResponse {
3801 actions: code_actions
3802 .await?
3803 .iter()
3804 .map(language::proto::serialize_code_action)
3805 .collect(),
3806 version: serialize_version(&version),
3807 })
3808 }
3809
3810 async fn handle_apply_code_action(
3811 this: ModelHandle<Self>,
3812 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3813 _: Arc<Client>,
3814 mut cx: AsyncAppContext,
3815 ) -> Result<proto::ApplyCodeActionResponse> {
3816 let sender_id = envelope.original_sender_id()?;
3817 let action = language::proto::deserialize_code_action(
3818 envelope
3819 .payload
3820 .action
3821 .ok_or_else(|| anyhow!("invalid action"))?,
3822 )?;
3823 let apply_code_action = this.update(&mut cx, |this, cx| {
3824 let buffer = this
3825 .opened_buffers
3826 .get(&envelope.payload.buffer_id)
3827 .map(|buffer| buffer.upgrade(cx).unwrap())
3828 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3829 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3830 })?;
3831
3832 let project_transaction = apply_code_action.await?;
3833 let project_transaction = this.update(&mut cx, |this, cx| {
3834 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3835 });
3836 Ok(proto::ApplyCodeActionResponse {
3837 transaction: Some(project_transaction),
3838 })
3839 }
3840
3841 async fn handle_lsp_command<T: LspCommand>(
3842 this: ModelHandle<Self>,
3843 envelope: TypedEnvelope<T::ProtoRequest>,
3844 _: Arc<Client>,
3845 mut cx: AsyncAppContext,
3846 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3847 where
3848 <T::LspRequest as lsp::request::Request>::Result: Send,
3849 {
3850 let sender_id = envelope.original_sender_id()?;
3851 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3852 let buffer_handle = this.read_with(&cx, |this, _| {
3853 this.opened_buffers
3854 .get(&buffer_id)
3855 .and_then(|buffer| buffer.upgrade(&cx))
3856 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3857 })?;
3858 let request = T::from_proto(
3859 envelope.payload,
3860 this.clone(),
3861 buffer_handle.clone(),
3862 cx.clone(),
3863 )
3864 .await?;
3865 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3866 let response = this
3867 .update(&mut cx, |this, cx| {
3868 this.request_lsp(buffer_handle, request, cx)
3869 })
3870 .await?;
3871 this.update(&mut cx, |this, cx| {
3872 Ok(T::response_to_proto(
3873 response,
3874 this,
3875 sender_id,
3876 &buffer_version,
3877 cx,
3878 ))
3879 })
3880 }
3881
3882 async fn handle_get_project_symbols(
3883 this: ModelHandle<Self>,
3884 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3885 _: Arc<Client>,
3886 mut cx: AsyncAppContext,
3887 ) -> Result<proto::GetProjectSymbolsResponse> {
3888 let symbols = this
3889 .update(&mut cx, |this, cx| {
3890 this.symbols(&envelope.payload.query, cx)
3891 })
3892 .await?;
3893
3894 Ok(proto::GetProjectSymbolsResponse {
3895 symbols: symbols.iter().map(serialize_symbol).collect(),
3896 })
3897 }
3898
3899 async fn handle_search_project(
3900 this: ModelHandle<Self>,
3901 envelope: TypedEnvelope<proto::SearchProject>,
3902 _: Arc<Client>,
3903 mut cx: AsyncAppContext,
3904 ) -> Result<proto::SearchProjectResponse> {
3905 let peer_id = envelope.original_sender_id()?;
3906 let query = SearchQuery::from_proto(envelope.payload)?;
3907 let result = this
3908 .update(&mut cx, |this, cx| this.search(query, cx))
3909 .await?;
3910
3911 this.update(&mut cx, |this, cx| {
3912 let mut locations = Vec::new();
3913 for (buffer, ranges) in result {
3914 for range in ranges {
3915 let start = serialize_anchor(&range.start);
3916 let end = serialize_anchor(&range.end);
3917 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3918 locations.push(proto::Location {
3919 buffer: Some(buffer),
3920 start: Some(start),
3921 end: Some(end),
3922 });
3923 }
3924 }
3925 Ok(proto::SearchProjectResponse { locations })
3926 })
3927 }
3928
3929 async fn handle_open_buffer_for_symbol(
3930 this: ModelHandle<Self>,
3931 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3932 _: Arc<Client>,
3933 mut cx: AsyncAppContext,
3934 ) -> Result<proto::OpenBufferForSymbolResponse> {
3935 let peer_id = envelope.original_sender_id()?;
3936 let symbol = envelope
3937 .payload
3938 .symbol
3939 .ok_or_else(|| anyhow!("invalid symbol"))?;
3940 let symbol = this.read_with(&cx, |this, _| {
3941 let symbol = this.deserialize_symbol(symbol)?;
3942 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3943 if signature == symbol.signature {
3944 Ok(symbol)
3945 } else {
3946 Err(anyhow!("invalid symbol signature"))
3947 }
3948 })?;
3949 let buffer = this
3950 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3951 .await?;
3952
3953 Ok(proto::OpenBufferForSymbolResponse {
3954 buffer: Some(this.update(&mut cx, |this, cx| {
3955 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3956 })),
3957 })
3958 }
3959
3960 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3961 let mut hasher = Sha256::new();
3962 hasher.update(worktree_id.to_proto().to_be_bytes());
3963 hasher.update(path.to_string_lossy().as_bytes());
3964 hasher.update(self.nonce.to_be_bytes());
3965 hasher.finalize().as_slice().try_into().unwrap()
3966 }
3967
3968 async fn handle_open_buffer_by_id(
3969 this: ModelHandle<Self>,
3970 envelope: TypedEnvelope<proto::OpenBufferById>,
3971 _: Arc<Client>,
3972 mut cx: AsyncAppContext,
3973 ) -> Result<proto::OpenBufferResponse> {
3974 let peer_id = envelope.original_sender_id()?;
3975 let buffer = this
3976 .update(&mut cx, |this, cx| {
3977 this.open_buffer_by_id(envelope.payload.id, cx)
3978 })
3979 .await?;
3980 this.update(&mut cx, |this, cx| {
3981 Ok(proto::OpenBufferResponse {
3982 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3983 })
3984 })
3985 }
3986
3987 async fn handle_open_buffer_by_path(
3988 this: ModelHandle<Self>,
3989 envelope: TypedEnvelope<proto::OpenBufferByPath>,
3990 _: Arc<Client>,
3991 mut cx: AsyncAppContext,
3992 ) -> Result<proto::OpenBufferResponse> {
3993 let peer_id = envelope.original_sender_id()?;
3994 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3995 let open_buffer = this.update(&mut cx, |this, cx| {
3996 this.open_buffer(
3997 ProjectPath {
3998 worktree_id,
3999 path: PathBuf::from(envelope.payload.path).into(),
4000 },
4001 cx,
4002 )
4003 });
4004
4005 let buffer = open_buffer.await?;
4006 this.update(&mut cx, |this, cx| {
4007 Ok(proto::OpenBufferResponse {
4008 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4009 })
4010 })
4011 }
4012
4013 fn serialize_project_transaction_for_peer(
4014 &mut self,
4015 project_transaction: ProjectTransaction,
4016 peer_id: PeerId,
4017 cx: &AppContext,
4018 ) -> proto::ProjectTransaction {
4019 let mut serialized_transaction = proto::ProjectTransaction {
4020 buffers: Default::default(),
4021 transactions: Default::default(),
4022 };
4023 for (buffer, transaction) in project_transaction.0 {
4024 serialized_transaction
4025 .buffers
4026 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4027 serialized_transaction
4028 .transactions
4029 .push(language::proto::serialize_transaction(&transaction));
4030 }
4031 serialized_transaction
4032 }
4033
4034 fn deserialize_project_transaction(
4035 &mut self,
4036 message: proto::ProjectTransaction,
4037 push_to_history: bool,
4038 cx: &mut ModelContext<Self>,
4039 ) -> Task<Result<ProjectTransaction>> {
4040 cx.spawn(|this, mut cx| async move {
4041 let mut project_transaction = ProjectTransaction::default();
4042 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4043 let buffer = this
4044 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4045 .await?;
4046 let transaction = language::proto::deserialize_transaction(transaction)?;
4047 project_transaction.0.insert(buffer, transaction);
4048 }
4049
4050 for (buffer, transaction) in &project_transaction.0 {
4051 buffer
4052 .update(&mut cx, |buffer, _| {
4053 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4054 })
4055 .await;
4056
4057 if push_to_history {
4058 buffer.update(&mut cx, |buffer, _| {
4059 buffer.push_transaction(transaction.clone(), Instant::now());
4060 });
4061 }
4062 }
4063
4064 Ok(project_transaction)
4065 })
4066 }
4067
4068 fn serialize_buffer_for_peer(
4069 &mut self,
4070 buffer: &ModelHandle<Buffer>,
4071 peer_id: PeerId,
4072 cx: &AppContext,
4073 ) -> proto::Buffer {
4074 let buffer_id = buffer.read(cx).remote_id();
4075 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4076 if shared_buffers.insert(buffer_id) {
4077 proto::Buffer {
4078 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4079 }
4080 } else {
4081 proto::Buffer {
4082 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4083 }
4084 }
4085 }
4086
4087 fn deserialize_buffer(
4088 &mut self,
4089 buffer: proto::Buffer,
4090 cx: &mut ModelContext<Self>,
4091 ) -> Task<Result<ModelHandle<Buffer>>> {
4092 let replica_id = self.replica_id();
4093
4094 let opened_buffer_tx = self.opened_buffer.0.clone();
4095 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4096 cx.spawn(|this, mut cx| async move {
4097 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4098 proto::buffer::Variant::Id(id) => {
4099 let buffer = loop {
4100 let buffer = this.read_with(&cx, |this, cx| {
4101 this.opened_buffers
4102 .get(&id)
4103 .and_then(|buffer| buffer.upgrade(cx))
4104 });
4105 if let Some(buffer) = buffer {
4106 break buffer;
4107 }
4108 opened_buffer_rx
4109 .next()
4110 .await
4111 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4112 };
4113 Ok(buffer)
4114 }
4115 proto::buffer::Variant::State(mut buffer) => {
4116 let mut buffer_worktree = None;
4117 let mut buffer_file = None;
4118 if let Some(file) = buffer.file.take() {
4119 this.read_with(&cx, |this, cx| {
4120 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4121 let worktree =
4122 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4123 anyhow!("no worktree found for id {}", file.worktree_id)
4124 })?;
4125 buffer_file =
4126 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4127 as Box<dyn language::File>);
4128 buffer_worktree = Some(worktree);
4129 Ok::<_, anyhow::Error>(())
4130 })?;
4131 }
4132
4133 let buffer = cx.add_model(|cx| {
4134 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4135 });
4136
4137 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4138
4139 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4140 Ok(buffer)
4141 }
4142 }
4143 })
4144 }
4145
4146 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4147 let language = self
4148 .languages
4149 .get_language(&serialized_symbol.language_name);
4150 let start = serialized_symbol
4151 .start
4152 .ok_or_else(|| anyhow!("invalid start"))?;
4153 let end = serialized_symbol
4154 .end
4155 .ok_or_else(|| anyhow!("invalid end"))?;
4156 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4157 Ok(Symbol {
4158 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4159 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4160 language_name: serialized_symbol.language_name.clone(),
4161 label: language
4162 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4163 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4164 name: serialized_symbol.name,
4165 path: PathBuf::from(serialized_symbol.path),
4166 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4167 kind,
4168 signature: serialized_symbol
4169 .signature
4170 .try_into()
4171 .map_err(|_| anyhow!("invalid signature"))?,
4172 })
4173 }
4174
4175 async fn handle_buffer_saved(
4176 this: ModelHandle<Self>,
4177 envelope: TypedEnvelope<proto::BufferSaved>,
4178 _: Arc<Client>,
4179 mut cx: AsyncAppContext,
4180 ) -> Result<()> {
4181 let version = deserialize_version(envelope.payload.version);
4182 let mtime = envelope
4183 .payload
4184 .mtime
4185 .ok_or_else(|| anyhow!("missing mtime"))?
4186 .into();
4187
4188 this.update(&mut cx, |this, cx| {
4189 let buffer = this
4190 .opened_buffers
4191 .get(&envelope.payload.buffer_id)
4192 .and_then(|buffer| buffer.upgrade(cx));
4193 if let Some(buffer) = buffer {
4194 buffer.update(cx, |buffer, cx| {
4195 buffer.did_save(version, mtime, None, cx);
4196 });
4197 }
4198 Ok(())
4199 })
4200 }
4201
4202 async fn handle_buffer_reloaded(
4203 this: ModelHandle<Self>,
4204 envelope: TypedEnvelope<proto::BufferReloaded>,
4205 _: Arc<Client>,
4206 mut cx: AsyncAppContext,
4207 ) -> Result<()> {
4208 let payload = envelope.payload.clone();
4209 let version = deserialize_version(payload.version);
4210 let mtime = payload
4211 .mtime
4212 .ok_or_else(|| anyhow!("missing mtime"))?
4213 .into();
4214 this.update(&mut cx, |this, cx| {
4215 let buffer = this
4216 .opened_buffers
4217 .get(&payload.buffer_id)
4218 .and_then(|buffer| buffer.upgrade(cx));
4219 if let Some(buffer) = buffer {
4220 buffer.update(cx, |buffer, cx| {
4221 buffer.did_reload(version, mtime, cx);
4222 });
4223 }
4224 Ok(())
4225 })
4226 }
4227
4228 pub fn match_paths<'a>(
4229 &self,
4230 query: &'a str,
4231 include_ignored: bool,
4232 smart_case: bool,
4233 max_results: usize,
4234 cancel_flag: &'a AtomicBool,
4235 cx: &AppContext,
4236 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4237 let worktrees = self
4238 .worktrees(cx)
4239 .filter(|worktree| worktree.read(cx).is_visible())
4240 .collect::<Vec<_>>();
4241 let include_root_name = worktrees.len() > 1;
4242 let candidate_sets = worktrees
4243 .into_iter()
4244 .map(|worktree| CandidateSet {
4245 snapshot: worktree.read(cx).snapshot(),
4246 include_ignored,
4247 include_root_name,
4248 })
4249 .collect::<Vec<_>>();
4250
4251 let background = cx.background().clone();
4252 async move {
4253 fuzzy::match_paths(
4254 candidate_sets.as_slice(),
4255 query,
4256 smart_case,
4257 max_results,
4258 cancel_flag,
4259 background,
4260 )
4261 .await
4262 }
4263 }
4264
4265 fn edits_from_lsp(
4266 &mut self,
4267 buffer: &ModelHandle<Buffer>,
4268 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4269 version: Option<i32>,
4270 cx: &mut ModelContext<Self>,
4271 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4272 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4273 cx.background().spawn(async move {
4274 let snapshot = snapshot?;
4275 let mut lsp_edits = lsp_edits
4276 .into_iter()
4277 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4278 .peekable();
4279
4280 let mut edits = Vec::new();
4281 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4282 // Combine any LSP edits that are adjacent.
4283 //
4284 // Also, combine LSP edits that are separated from each other by only
4285 // a newline. This is important because for some code actions,
4286 // Rust-analyzer rewrites the entire buffer via a series of edits that
4287 // are separated by unchanged newline characters.
4288 //
4289 // In order for the diffing logic below to work properly, any edits that
4290 // cancel each other out must be combined into one.
4291 while let Some((next_range, next_text)) = lsp_edits.peek() {
4292 if next_range.start > range.end {
4293 if next_range.start.row > range.end.row + 1
4294 || next_range.start.column > 0
4295 || snapshot.clip_point_utf16(
4296 PointUtf16::new(range.end.row, u32::MAX),
4297 Bias::Left,
4298 ) > range.end
4299 {
4300 break;
4301 }
4302 new_text.push('\n');
4303 }
4304 range.end = next_range.end;
4305 new_text.push_str(&next_text);
4306 lsp_edits.next();
4307 }
4308
4309 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4310 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4311 {
4312 return Err(anyhow!("invalid edits received from language server"));
4313 }
4314
4315 // For multiline edits, perform a diff of the old and new text so that
4316 // we can identify the changes more precisely, preserving the locations
4317 // of any anchors positioned in the unchanged regions.
4318 if range.end.row > range.start.row {
4319 let mut offset = range.start.to_offset(&snapshot);
4320 let old_text = snapshot.text_for_range(range).collect::<String>();
4321
4322 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4323 let mut moved_since_edit = true;
4324 for change in diff.iter_all_changes() {
4325 let tag = change.tag();
4326 let value = change.value();
4327 match tag {
4328 ChangeTag::Equal => {
4329 offset += value.len();
4330 moved_since_edit = true;
4331 }
4332 ChangeTag::Delete => {
4333 let start = snapshot.anchor_after(offset);
4334 let end = snapshot.anchor_before(offset + value.len());
4335 if moved_since_edit {
4336 edits.push((start..end, String::new()));
4337 } else {
4338 edits.last_mut().unwrap().0.end = end;
4339 }
4340 offset += value.len();
4341 moved_since_edit = false;
4342 }
4343 ChangeTag::Insert => {
4344 if moved_since_edit {
4345 let anchor = snapshot.anchor_after(offset);
4346 edits.push((anchor.clone()..anchor, value.to_string()));
4347 } else {
4348 edits.last_mut().unwrap().1.push_str(value);
4349 }
4350 moved_since_edit = false;
4351 }
4352 }
4353 }
4354 } else if range.end == range.start {
4355 let anchor = snapshot.anchor_after(range.start);
4356 edits.push((anchor.clone()..anchor, new_text));
4357 } else {
4358 let edit_start = snapshot.anchor_after(range.start);
4359 let edit_end = snapshot.anchor_before(range.end);
4360 edits.push((edit_start..edit_end, new_text));
4361 }
4362 }
4363
4364 Ok(edits)
4365 })
4366 }
4367
4368 fn buffer_snapshot_for_lsp_version(
4369 &mut self,
4370 buffer: &ModelHandle<Buffer>,
4371 version: Option<i32>,
4372 cx: &AppContext,
4373 ) -> Result<TextBufferSnapshot> {
4374 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4375
4376 if let Some(version) = version {
4377 let buffer_id = buffer.read(cx).remote_id();
4378 let snapshots = self
4379 .buffer_snapshots
4380 .get_mut(&buffer_id)
4381 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4382 let mut found_snapshot = None;
4383 snapshots.retain(|(snapshot_version, snapshot)| {
4384 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4385 false
4386 } else {
4387 if *snapshot_version == version {
4388 found_snapshot = Some(snapshot.clone());
4389 }
4390 true
4391 }
4392 });
4393
4394 found_snapshot.ok_or_else(|| {
4395 anyhow!(
4396 "snapshot not found for buffer {} at version {}",
4397 buffer_id,
4398 version
4399 )
4400 })
4401 } else {
4402 Ok((buffer.read(cx)).text_snapshot())
4403 }
4404 }
4405
4406 fn language_server_for_buffer(
4407 &self,
4408 buffer: &Buffer,
4409 cx: &AppContext,
4410 ) -> Option<&Arc<LanguageServer>> {
4411 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4412 let worktree_id = file.worktree_id(cx);
4413 self.language_servers.get(&(worktree_id, language.name()))
4414 } else {
4415 None
4416 }
4417 }
4418}
4419
4420impl WorktreeHandle {
4421 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4422 match self {
4423 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4424 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4425 }
4426 }
4427}
4428
4429impl OpenBuffer {
4430 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4431 match self {
4432 OpenBuffer::Strong(handle) => Some(handle.clone()),
4433 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4434 OpenBuffer::Loading(_) => None,
4435 }
4436 }
4437}
4438
4439struct CandidateSet {
4440 snapshot: Snapshot,
4441 include_ignored: bool,
4442 include_root_name: bool,
4443}
4444
4445impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4446 type Candidates = CandidateSetIter<'a>;
4447
4448 fn id(&self) -> usize {
4449 self.snapshot.id().to_usize()
4450 }
4451
4452 fn len(&self) -> usize {
4453 if self.include_ignored {
4454 self.snapshot.file_count()
4455 } else {
4456 self.snapshot.visible_file_count()
4457 }
4458 }
4459
4460 fn prefix(&self) -> Arc<str> {
4461 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4462 self.snapshot.root_name().into()
4463 } else if self.include_root_name {
4464 format!("{}/", self.snapshot.root_name()).into()
4465 } else {
4466 "".into()
4467 }
4468 }
4469
4470 fn candidates(&'a self, start: usize) -> Self::Candidates {
4471 CandidateSetIter {
4472 traversal: self.snapshot.files(self.include_ignored, start),
4473 }
4474 }
4475}
4476
4477struct CandidateSetIter<'a> {
4478 traversal: Traversal<'a>,
4479}
4480
4481impl<'a> Iterator for CandidateSetIter<'a> {
4482 type Item = PathMatchCandidate<'a>;
4483
4484 fn next(&mut self) -> Option<Self::Item> {
4485 self.traversal.next().map(|entry| {
4486 if let EntryKind::File(char_bag) = entry.kind {
4487 PathMatchCandidate {
4488 path: &entry.path,
4489 char_bag,
4490 }
4491 } else {
4492 unreachable!()
4493 }
4494 })
4495 }
4496}
4497
4498impl Entity for Project {
4499 type Event = Event;
4500
4501 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4502 match &self.client_state {
4503 ProjectClientState::Local { remote_id_rx, .. } => {
4504 if let Some(project_id) = *remote_id_rx.borrow() {
4505 self.client
4506 .send(proto::UnregisterProject { project_id })
4507 .log_err();
4508 }
4509 }
4510 ProjectClientState::Remote { remote_id, .. } => {
4511 self.client
4512 .send(proto::LeaveProject {
4513 project_id: *remote_id,
4514 })
4515 .log_err();
4516 }
4517 }
4518 }
4519
4520 fn app_will_quit(
4521 &mut self,
4522 _: &mut MutableAppContext,
4523 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4524 let shutdown_futures = self
4525 .language_servers
4526 .drain()
4527 .filter_map(|(_, server)| server.shutdown())
4528 .collect::<Vec<_>>();
4529 Some(
4530 async move {
4531 futures::future::join_all(shutdown_futures).await;
4532 }
4533 .boxed(),
4534 )
4535 }
4536}
4537
4538impl Collaborator {
4539 fn from_proto(
4540 message: proto::Collaborator,
4541 user_store: &ModelHandle<UserStore>,
4542 cx: &mut AsyncAppContext,
4543 ) -> impl Future<Output = Result<Self>> {
4544 let user = user_store.update(cx, |user_store, cx| {
4545 user_store.fetch_user(message.user_id, cx)
4546 });
4547
4548 async move {
4549 Ok(Self {
4550 peer_id: PeerId(message.peer_id),
4551 user: user.await?,
4552 replica_id: message.replica_id as ReplicaId,
4553 })
4554 }
4555 }
4556}
4557
4558impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4559 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4560 Self {
4561 worktree_id,
4562 path: path.as_ref().into(),
4563 }
4564 }
4565}
4566
4567impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4568 fn from(options: lsp::CreateFileOptions) -> Self {
4569 Self {
4570 overwrite: options.overwrite.unwrap_or(false),
4571 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4572 }
4573 }
4574}
4575
4576impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4577 fn from(options: lsp::RenameFileOptions) -> Self {
4578 Self {
4579 overwrite: options.overwrite.unwrap_or(false),
4580 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4581 }
4582 }
4583}
4584
4585impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4586 fn from(options: lsp::DeleteFileOptions) -> Self {
4587 Self {
4588 recursive: options.recursive.unwrap_or(false),
4589 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4590 }
4591 }
4592}
4593
4594fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4595 proto::Symbol {
4596 source_worktree_id: symbol.source_worktree_id.to_proto(),
4597 worktree_id: symbol.worktree_id.to_proto(),
4598 language_name: symbol.language_name.clone(),
4599 name: symbol.name.clone(),
4600 kind: unsafe { mem::transmute(symbol.kind) },
4601 path: symbol.path.to_string_lossy().to_string(),
4602 start: Some(proto::Point {
4603 row: symbol.range.start.row,
4604 column: symbol.range.start.column,
4605 }),
4606 end: Some(proto::Point {
4607 row: symbol.range.end.row,
4608 column: symbol.range.end.column,
4609 }),
4610 signature: symbol.signature.to_vec(),
4611 }
4612}
4613
4614fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4615 let mut path_components = path.components();
4616 let mut base_components = base.components();
4617 let mut components: Vec<Component> = Vec::new();
4618 loop {
4619 match (path_components.next(), base_components.next()) {
4620 (None, None) => break,
4621 (Some(a), None) => {
4622 components.push(a);
4623 components.extend(path_components.by_ref());
4624 break;
4625 }
4626 (None, _) => components.push(Component::ParentDir),
4627 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4628 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4629 (Some(a), Some(_)) => {
4630 components.push(Component::ParentDir);
4631 for _ in base_components {
4632 components.push(Component::ParentDir);
4633 }
4634 components.push(a);
4635 components.extend(path_components.by_ref());
4636 break;
4637 }
4638 }
4639 }
4640 components.iter().map(|c| c.as_os_str()).collect()
4641}
4642
4643impl Item for Buffer {
4644 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4645 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4646 }
4647}
4648
4649#[cfg(test)]
4650mod tests {
4651 use super::{Event, *};
4652 use fs::RealFs;
4653 use futures::{future, StreamExt};
4654 use gpui::test::subscribe;
4655 use language::{
4656 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4657 ToPoint,
4658 };
4659 use lsp::Url;
4660 use serde_json::json;
4661 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4662 use unindent::Unindent as _;
4663 use util::{assert_set_eq, test::temp_tree};
4664 use worktree::WorktreeHandle as _;
4665
4666 #[gpui::test]
4667 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4668 let dir = temp_tree(json!({
4669 "root": {
4670 "apple": "",
4671 "banana": {
4672 "carrot": {
4673 "date": "",
4674 "endive": "",
4675 }
4676 },
4677 "fennel": {
4678 "grape": "",
4679 }
4680 }
4681 }));
4682
4683 let root_link_path = dir.path().join("root_link");
4684 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4685 unix::fs::symlink(
4686 &dir.path().join("root/fennel"),
4687 &dir.path().join("root/finnochio"),
4688 )
4689 .unwrap();
4690
4691 let project = Project::test(Arc::new(RealFs), cx);
4692
4693 let (tree, _) = project
4694 .update(cx, |project, cx| {
4695 project.find_or_create_local_worktree(&root_link_path, true, cx)
4696 })
4697 .await
4698 .unwrap();
4699
4700 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4701 .await;
4702 cx.read(|cx| {
4703 let tree = tree.read(cx);
4704 assert_eq!(tree.file_count(), 5);
4705 assert_eq!(
4706 tree.inode_for_path("fennel/grape"),
4707 tree.inode_for_path("finnochio/grape")
4708 );
4709 });
4710
4711 let cancel_flag = Default::default();
4712 let results = project
4713 .read_with(cx, |project, cx| {
4714 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4715 })
4716 .await;
4717 assert_eq!(
4718 results
4719 .into_iter()
4720 .map(|result| result.path)
4721 .collect::<Vec<Arc<Path>>>(),
4722 vec![
4723 PathBuf::from("banana/carrot/date").into(),
4724 PathBuf::from("banana/carrot/endive").into(),
4725 ]
4726 );
4727 }
4728
4729 #[gpui::test]
4730 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4731 cx.foreground().forbid_parking();
4732
4733 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4734 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4735 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4736 completion_provider: Some(lsp::CompletionOptions {
4737 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4738 ..Default::default()
4739 }),
4740 ..Default::default()
4741 });
4742 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4743 completion_provider: Some(lsp::CompletionOptions {
4744 trigger_characters: Some(vec![":".to_string()]),
4745 ..Default::default()
4746 }),
4747 ..Default::default()
4748 });
4749
4750 let rust_language = Arc::new(Language::new(
4751 LanguageConfig {
4752 name: "Rust".into(),
4753 path_suffixes: vec!["rs".to_string()],
4754 language_server: Some(rust_lsp_config),
4755 ..Default::default()
4756 },
4757 Some(tree_sitter_rust::language()),
4758 ));
4759 let json_language = Arc::new(Language::new(
4760 LanguageConfig {
4761 name: "JSON".into(),
4762 path_suffixes: vec!["json".to_string()],
4763 language_server: Some(json_lsp_config),
4764 ..Default::default()
4765 },
4766 None,
4767 ));
4768
4769 let fs = FakeFs::new(cx.background());
4770 fs.insert_tree(
4771 "/the-root",
4772 json!({
4773 "test.rs": "const A: i32 = 1;",
4774 "test2.rs": "",
4775 "Cargo.toml": "a = 1",
4776 "package.json": "{\"a\": 1}",
4777 }),
4778 )
4779 .await;
4780
4781 let project = Project::test(fs, cx);
4782 project.update(cx, |project, _| {
4783 project.languages.add(rust_language);
4784 project.languages.add(json_language);
4785 });
4786
4787 let worktree_id = project
4788 .update(cx, |project, cx| {
4789 project.find_or_create_local_worktree("/the-root", true, cx)
4790 })
4791 .await
4792 .unwrap()
4793 .0
4794 .read_with(cx, |tree, _| tree.id());
4795
4796 // Open a buffer without an associated language server.
4797 let toml_buffer = project
4798 .update(cx, |project, cx| {
4799 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4800 })
4801 .await
4802 .unwrap();
4803
4804 // Open a buffer with an associated language server.
4805 let rust_buffer = project
4806 .update(cx, |project, cx| {
4807 project.open_buffer((worktree_id, "test.rs"), cx)
4808 })
4809 .await
4810 .unwrap();
4811
4812 // A server is started up, and it is notified about Rust files.
4813 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4814 assert_eq!(
4815 fake_rust_server
4816 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4817 .await
4818 .text_document,
4819 lsp::TextDocumentItem {
4820 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4821 version: 0,
4822 text: "const A: i32 = 1;".to_string(),
4823 language_id: Default::default()
4824 }
4825 );
4826
4827 // The buffer is configured based on the language server's capabilities.
4828 rust_buffer.read_with(cx, |buffer, _| {
4829 assert_eq!(
4830 buffer.completion_triggers(),
4831 &[".".to_string(), "::".to_string()]
4832 );
4833 });
4834 toml_buffer.read_with(cx, |buffer, _| {
4835 assert!(buffer.completion_triggers().is_empty());
4836 });
4837
4838 // Edit a buffer. The changes are reported to the language server.
4839 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4840 assert_eq!(
4841 fake_rust_server
4842 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4843 .await
4844 .text_document,
4845 lsp::VersionedTextDocumentIdentifier::new(
4846 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4847 1
4848 )
4849 );
4850
4851 // Open a third buffer with a different associated language server.
4852 let json_buffer = project
4853 .update(cx, |project, cx| {
4854 project.open_buffer((worktree_id, "package.json"), cx)
4855 })
4856 .await
4857 .unwrap();
4858
4859 // A json language server is started up and is only notified about the json buffer.
4860 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4861 assert_eq!(
4862 fake_json_server
4863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4864 .await
4865 .text_document,
4866 lsp::TextDocumentItem {
4867 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4868 version: 0,
4869 text: "{\"a\": 1}".to_string(),
4870 language_id: Default::default()
4871 }
4872 );
4873
4874 // This buffer is configured based on the second language server's
4875 // capabilities.
4876 json_buffer.read_with(cx, |buffer, _| {
4877 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4878 });
4879
4880 // When opening another buffer whose language server is already running,
4881 // it is also configured based on the existing language server's capabilities.
4882 let rust_buffer2 = project
4883 .update(cx, |project, cx| {
4884 project.open_buffer((worktree_id, "test2.rs"), cx)
4885 })
4886 .await
4887 .unwrap();
4888 rust_buffer2.read_with(cx, |buffer, _| {
4889 assert_eq!(
4890 buffer.completion_triggers(),
4891 &[".".to_string(), "::".to_string()]
4892 );
4893 });
4894
4895 // Changes are reported only to servers matching the buffer's language.
4896 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4897 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4898 assert_eq!(
4899 fake_rust_server
4900 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4901 .await
4902 .text_document,
4903 lsp::VersionedTextDocumentIdentifier::new(
4904 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4905 1
4906 )
4907 );
4908
4909 // Save notifications are reported to all servers.
4910 toml_buffer
4911 .update(cx, |buffer, cx| buffer.save(cx))
4912 .await
4913 .unwrap();
4914 assert_eq!(
4915 fake_rust_server
4916 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4917 .await
4918 .text_document,
4919 lsp::TextDocumentIdentifier::new(
4920 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4921 )
4922 );
4923 assert_eq!(
4924 fake_json_server
4925 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4926 .await
4927 .text_document,
4928 lsp::TextDocumentIdentifier::new(
4929 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4930 )
4931 );
4932
4933 // Restart language servers
4934 project.update(cx, |project, cx| {
4935 project.restart_language_servers_for_buffers(
4936 vec![rust_buffer.clone(), json_buffer.clone()],
4937 cx,
4938 );
4939 });
4940
4941 let mut rust_shutdown_requests = fake_rust_server
4942 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(()));
4943 let mut json_shutdown_requests = fake_json_server
4944 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(()));
4945 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
4946
4947 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4949
4950 // Ensure both rust documents are reopened in new rust language server without worrying about order
4951 assert_set_eq!(
4952 [
4953 fake_rust_server
4954 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4955 .await
4956 .text_document,
4957 fake_rust_server
4958 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4959 .await
4960 .text_document,
4961 ],
4962 [
4963 lsp::TextDocumentItem {
4964 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4965 version: 1,
4966 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
4967 language_id: Default::default()
4968 },
4969 lsp::TextDocumentItem {
4970 uri: lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4971 version: 1,
4972 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
4973 language_id: Default::default()
4974 },
4975 ]
4976 );
4977
4978 // Ensure json document is reopened in new json language server
4979 assert_eq!(
4980 fake_json_server
4981 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4982 .await
4983 .text_document,
4984 lsp::TextDocumentItem {
4985 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4986 version: 0,
4987 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
4988 language_id: Default::default()
4989 }
4990 );
4991
4992 // Close notifications are reported only to servers matching the buffer's language.
4993 cx.update(|_| drop(json_buffer));
4994 let close_message = lsp::DidCloseTextDocumentParams {
4995 text_document: lsp::TextDocumentIdentifier::new(
4996 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4997 ),
4998 };
4999 assert_eq!(
5000 fake_json_server
5001 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5002 .await,
5003 close_message,
5004 );
5005 }
5006
5007 #[gpui::test]
5008 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5009 cx.foreground().forbid_parking();
5010
5011 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5012 let progress_token = language_server_config
5013 .disk_based_diagnostics_progress_token
5014 .clone()
5015 .unwrap();
5016
5017 let language = Arc::new(Language::new(
5018 LanguageConfig {
5019 name: "Rust".into(),
5020 path_suffixes: vec!["rs".to_string()],
5021 language_server: Some(language_server_config),
5022 ..Default::default()
5023 },
5024 Some(tree_sitter_rust::language()),
5025 ));
5026
5027 let fs = FakeFs::new(cx.background());
5028 fs.insert_tree(
5029 "/dir",
5030 json!({
5031 "a.rs": "fn a() { A }",
5032 "b.rs": "const y: i32 = 1",
5033 }),
5034 )
5035 .await;
5036
5037 let project = Project::test(fs, cx);
5038 project.update(cx, |project, _| project.languages.add(language));
5039
5040 let (tree, _) = project
5041 .update(cx, |project, cx| {
5042 project.find_or_create_local_worktree("/dir", true, cx)
5043 })
5044 .await
5045 .unwrap();
5046 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5047
5048 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5049 .await;
5050
5051 // Cause worktree to start the fake language server
5052 let _buffer = project
5053 .update(cx, |project, cx| {
5054 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5055 })
5056 .await
5057 .unwrap();
5058
5059 let mut events = subscribe(&project, cx);
5060
5061 let mut fake_server = fake_servers.next().await.unwrap();
5062 fake_server.start_progress(&progress_token).await;
5063 assert_eq!(
5064 events.next().await.unwrap(),
5065 Event::DiskBasedDiagnosticsStarted
5066 );
5067
5068 fake_server.start_progress(&progress_token).await;
5069 fake_server.end_progress(&progress_token).await;
5070 fake_server.start_progress(&progress_token).await;
5071
5072 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5073 lsp::PublishDiagnosticsParams {
5074 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5075 version: None,
5076 diagnostics: vec![lsp::Diagnostic {
5077 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5078 severity: Some(lsp::DiagnosticSeverity::ERROR),
5079 message: "undefined variable 'A'".to_string(),
5080 ..Default::default()
5081 }],
5082 },
5083 );
5084 assert_eq!(
5085 events.next().await.unwrap(),
5086 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5087 );
5088
5089 fake_server.end_progress(&progress_token).await;
5090 fake_server.end_progress(&progress_token).await;
5091 assert_eq!(
5092 events.next().await.unwrap(),
5093 Event::DiskBasedDiagnosticsUpdated
5094 );
5095 assert_eq!(
5096 events.next().await.unwrap(),
5097 Event::DiskBasedDiagnosticsFinished
5098 );
5099
5100 let buffer = project
5101 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5102 .await
5103 .unwrap();
5104
5105 buffer.read_with(cx, |buffer, _| {
5106 let snapshot = buffer.snapshot();
5107 let diagnostics = snapshot
5108 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5109 .collect::<Vec<_>>();
5110 assert_eq!(
5111 diagnostics,
5112 &[DiagnosticEntry {
5113 range: Point::new(0, 9)..Point::new(0, 10),
5114 diagnostic: Diagnostic {
5115 severity: lsp::DiagnosticSeverity::ERROR,
5116 message: "undefined variable 'A'".to_string(),
5117 group_id: 0,
5118 is_primary: true,
5119 ..Default::default()
5120 }
5121 }]
5122 )
5123 });
5124 }
5125
5126 #[gpui::test]
5127 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5128 cx.foreground().forbid_parking();
5129
5130 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5131 lsp_config
5132 .disk_based_diagnostic_sources
5133 .insert("disk".to_string());
5134 let language = Arc::new(Language::new(
5135 LanguageConfig {
5136 name: "Rust".into(),
5137 path_suffixes: vec!["rs".to_string()],
5138 language_server: Some(lsp_config),
5139 ..Default::default()
5140 },
5141 Some(tree_sitter_rust::language()),
5142 ));
5143
5144 let text = "
5145 fn a() { A }
5146 fn b() { BB }
5147 fn c() { CCC }
5148 "
5149 .unindent();
5150
5151 let fs = FakeFs::new(cx.background());
5152 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5153
5154 let project = Project::test(fs, cx);
5155 project.update(cx, |project, _| project.languages.add(language));
5156
5157 let worktree_id = project
5158 .update(cx, |project, cx| {
5159 project.find_or_create_local_worktree("/dir", true, cx)
5160 })
5161 .await
5162 .unwrap()
5163 .0
5164 .read_with(cx, |tree, _| tree.id());
5165
5166 let buffer = project
5167 .update(cx, |project, cx| {
5168 project.open_buffer((worktree_id, "a.rs"), cx)
5169 })
5170 .await
5171 .unwrap();
5172
5173 let mut fake_server = fake_servers.next().await.unwrap();
5174 let open_notification = fake_server
5175 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5176 .await;
5177
5178 // Edit the buffer, moving the content down
5179 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5180 let change_notification_1 = fake_server
5181 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5182 .await;
5183 assert!(
5184 change_notification_1.text_document.version > open_notification.text_document.version
5185 );
5186
5187 // Report some diagnostics for the initial version of the buffer
5188 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5189 lsp::PublishDiagnosticsParams {
5190 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5191 version: Some(open_notification.text_document.version),
5192 diagnostics: vec![
5193 lsp::Diagnostic {
5194 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5195 severity: Some(DiagnosticSeverity::ERROR),
5196 message: "undefined variable 'A'".to_string(),
5197 source: Some("disk".to_string()),
5198 ..Default::default()
5199 },
5200 lsp::Diagnostic {
5201 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5202 severity: Some(DiagnosticSeverity::ERROR),
5203 message: "undefined variable 'BB'".to_string(),
5204 source: Some("disk".to_string()),
5205 ..Default::default()
5206 },
5207 lsp::Diagnostic {
5208 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5209 severity: Some(DiagnosticSeverity::ERROR),
5210 source: Some("disk".to_string()),
5211 message: "undefined variable 'CCC'".to_string(),
5212 ..Default::default()
5213 },
5214 ],
5215 },
5216 );
5217
5218 // The diagnostics have moved down since they were created.
5219 buffer.next_notification(cx).await;
5220 buffer.read_with(cx, |buffer, _| {
5221 assert_eq!(
5222 buffer
5223 .snapshot()
5224 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5225 .collect::<Vec<_>>(),
5226 &[
5227 DiagnosticEntry {
5228 range: Point::new(3, 9)..Point::new(3, 11),
5229 diagnostic: Diagnostic {
5230 severity: DiagnosticSeverity::ERROR,
5231 message: "undefined variable 'BB'".to_string(),
5232 is_disk_based: true,
5233 group_id: 1,
5234 is_primary: true,
5235 ..Default::default()
5236 },
5237 },
5238 DiagnosticEntry {
5239 range: Point::new(4, 9)..Point::new(4, 12),
5240 diagnostic: Diagnostic {
5241 severity: DiagnosticSeverity::ERROR,
5242 message: "undefined variable 'CCC'".to_string(),
5243 is_disk_based: true,
5244 group_id: 2,
5245 is_primary: true,
5246 ..Default::default()
5247 }
5248 }
5249 ]
5250 );
5251 assert_eq!(
5252 chunks_with_diagnostics(buffer, 0..buffer.len()),
5253 [
5254 ("\n\nfn a() { ".to_string(), None),
5255 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5256 (" }\nfn b() { ".to_string(), None),
5257 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5258 (" }\nfn c() { ".to_string(), None),
5259 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5260 (" }\n".to_string(), None),
5261 ]
5262 );
5263 assert_eq!(
5264 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5265 [
5266 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5267 (" }\nfn c() { ".to_string(), None),
5268 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5269 ]
5270 );
5271 });
5272
5273 // Ensure overlapping diagnostics are highlighted correctly.
5274 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5275 lsp::PublishDiagnosticsParams {
5276 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5277 version: Some(open_notification.text_document.version),
5278 diagnostics: vec![
5279 lsp::Diagnostic {
5280 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5281 severity: Some(DiagnosticSeverity::ERROR),
5282 message: "undefined variable 'A'".to_string(),
5283 source: Some("disk".to_string()),
5284 ..Default::default()
5285 },
5286 lsp::Diagnostic {
5287 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5288 severity: Some(DiagnosticSeverity::WARNING),
5289 message: "unreachable statement".to_string(),
5290 source: Some("disk".to_string()),
5291 ..Default::default()
5292 },
5293 ],
5294 },
5295 );
5296
5297 buffer.next_notification(cx).await;
5298 buffer.read_with(cx, |buffer, _| {
5299 assert_eq!(
5300 buffer
5301 .snapshot()
5302 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5303 .collect::<Vec<_>>(),
5304 &[
5305 DiagnosticEntry {
5306 range: Point::new(2, 9)..Point::new(2, 12),
5307 diagnostic: Diagnostic {
5308 severity: DiagnosticSeverity::WARNING,
5309 message: "unreachable statement".to_string(),
5310 is_disk_based: true,
5311 group_id: 1,
5312 is_primary: true,
5313 ..Default::default()
5314 }
5315 },
5316 DiagnosticEntry {
5317 range: Point::new(2, 9)..Point::new(2, 10),
5318 diagnostic: Diagnostic {
5319 severity: DiagnosticSeverity::ERROR,
5320 message: "undefined variable 'A'".to_string(),
5321 is_disk_based: true,
5322 group_id: 0,
5323 is_primary: true,
5324 ..Default::default()
5325 },
5326 }
5327 ]
5328 );
5329 assert_eq!(
5330 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5331 [
5332 ("fn a() { ".to_string(), None),
5333 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5334 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5335 ("\n".to_string(), None),
5336 ]
5337 );
5338 assert_eq!(
5339 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5340 [
5341 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5342 ("\n".to_string(), None),
5343 ]
5344 );
5345 });
5346
5347 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5348 // changes since the last save.
5349 buffer.update(cx, |buffer, cx| {
5350 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5351 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5352 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5353 });
5354 let change_notification_2 = fake_server
5355 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5356 .await;
5357 assert!(
5358 change_notification_2.text_document.version
5359 > change_notification_1.text_document.version
5360 );
5361
5362 // Handle out-of-order diagnostics
5363 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5364 lsp::PublishDiagnosticsParams {
5365 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5366 version: Some(change_notification_2.text_document.version),
5367 diagnostics: vec![
5368 lsp::Diagnostic {
5369 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5370 severity: Some(DiagnosticSeverity::ERROR),
5371 message: "undefined variable 'BB'".to_string(),
5372 source: Some("disk".to_string()),
5373 ..Default::default()
5374 },
5375 lsp::Diagnostic {
5376 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5377 severity: Some(DiagnosticSeverity::WARNING),
5378 message: "undefined variable 'A'".to_string(),
5379 source: Some("disk".to_string()),
5380 ..Default::default()
5381 },
5382 ],
5383 },
5384 );
5385
5386 buffer.next_notification(cx).await;
5387 buffer.read_with(cx, |buffer, _| {
5388 assert_eq!(
5389 buffer
5390 .snapshot()
5391 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5392 .collect::<Vec<_>>(),
5393 &[
5394 DiagnosticEntry {
5395 range: Point::new(2, 21)..Point::new(2, 22),
5396 diagnostic: Diagnostic {
5397 severity: DiagnosticSeverity::WARNING,
5398 message: "undefined variable 'A'".to_string(),
5399 is_disk_based: true,
5400 group_id: 1,
5401 is_primary: true,
5402 ..Default::default()
5403 }
5404 },
5405 DiagnosticEntry {
5406 range: Point::new(3, 9)..Point::new(3, 14),
5407 diagnostic: Diagnostic {
5408 severity: DiagnosticSeverity::ERROR,
5409 message: "undefined variable 'BB'".to_string(),
5410 is_disk_based: true,
5411 group_id: 0,
5412 is_primary: true,
5413 ..Default::default()
5414 },
5415 }
5416 ]
5417 );
5418 });
5419 }
5420
5421 #[gpui::test]
5422 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5423 cx.foreground().forbid_parking();
5424
5425 let text = concat!(
5426 "let one = ;\n", //
5427 "let two = \n",
5428 "let three = 3;\n",
5429 );
5430
5431 let fs = FakeFs::new(cx.background());
5432 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5433
5434 let project = Project::test(fs, cx);
5435 let worktree_id = project
5436 .update(cx, |project, cx| {
5437 project.find_or_create_local_worktree("/dir", true, cx)
5438 })
5439 .await
5440 .unwrap()
5441 .0
5442 .read_with(cx, |tree, _| tree.id());
5443
5444 let buffer = project
5445 .update(cx, |project, cx| {
5446 project.open_buffer((worktree_id, "a.rs"), cx)
5447 })
5448 .await
5449 .unwrap();
5450
5451 project.update(cx, |project, cx| {
5452 project
5453 .update_buffer_diagnostics(
5454 &buffer,
5455 vec![
5456 DiagnosticEntry {
5457 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5458 diagnostic: Diagnostic {
5459 severity: DiagnosticSeverity::ERROR,
5460 message: "syntax error 1".to_string(),
5461 ..Default::default()
5462 },
5463 },
5464 DiagnosticEntry {
5465 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5466 diagnostic: Diagnostic {
5467 severity: DiagnosticSeverity::ERROR,
5468 message: "syntax error 2".to_string(),
5469 ..Default::default()
5470 },
5471 },
5472 ],
5473 None,
5474 cx,
5475 )
5476 .unwrap();
5477 });
5478
5479 // An empty range is extended forward to include the following character.
5480 // At the end of a line, an empty range is extended backward to include
5481 // the preceding character.
5482 buffer.read_with(cx, |buffer, _| {
5483 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5484 assert_eq!(
5485 chunks
5486 .iter()
5487 .map(|(s, d)| (s.as_str(), *d))
5488 .collect::<Vec<_>>(),
5489 &[
5490 ("let one = ", None),
5491 (";", Some(DiagnosticSeverity::ERROR)),
5492 ("\nlet two =", None),
5493 (" ", Some(DiagnosticSeverity::ERROR)),
5494 ("\nlet three = 3;\n", None)
5495 ]
5496 );
5497 });
5498 }
5499
5500 #[gpui::test]
5501 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5502 cx.foreground().forbid_parking();
5503
5504 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5505 let language = Arc::new(Language::new(
5506 LanguageConfig {
5507 name: "Rust".into(),
5508 path_suffixes: vec!["rs".to_string()],
5509 language_server: Some(lsp_config),
5510 ..Default::default()
5511 },
5512 Some(tree_sitter_rust::language()),
5513 ));
5514
5515 let text = "
5516 fn a() {
5517 f1();
5518 }
5519 fn b() {
5520 f2();
5521 }
5522 fn c() {
5523 f3();
5524 }
5525 "
5526 .unindent();
5527
5528 let fs = FakeFs::new(cx.background());
5529 fs.insert_tree(
5530 "/dir",
5531 json!({
5532 "a.rs": text.clone(),
5533 }),
5534 )
5535 .await;
5536
5537 let project = Project::test(fs, cx);
5538 project.update(cx, |project, _| project.languages.add(language));
5539
5540 let worktree_id = project
5541 .update(cx, |project, cx| {
5542 project.find_or_create_local_worktree("/dir", true, cx)
5543 })
5544 .await
5545 .unwrap()
5546 .0
5547 .read_with(cx, |tree, _| tree.id());
5548
5549 let buffer = project
5550 .update(cx, |project, cx| {
5551 project.open_buffer((worktree_id, "a.rs"), cx)
5552 })
5553 .await
5554 .unwrap();
5555
5556 let mut fake_server = fake_servers.next().await.unwrap();
5557 let lsp_document_version = fake_server
5558 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5559 .await
5560 .text_document
5561 .version;
5562
5563 // Simulate editing the buffer after the language server computes some edits.
5564 buffer.update(cx, |buffer, cx| {
5565 buffer.edit(
5566 [Point::new(0, 0)..Point::new(0, 0)],
5567 "// above first function\n",
5568 cx,
5569 );
5570 buffer.edit(
5571 [Point::new(2, 0)..Point::new(2, 0)],
5572 " // inside first function\n",
5573 cx,
5574 );
5575 buffer.edit(
5576 [Point::new(6, 4)..Point::new(6, 4)],
5577 "// inside second function ",
5578 cx,
5579 );
5580
5581 assert_eq!(
5582 buffer.text(),
5583 "
5584 // above first function
5585 fn a() {
5586 // inside first function
5587 f1();
5588 }
5589 fn b() {
5590 // inside second function f2();
5591 }
5592 fn c() {
5593 f3();
5594 }
5595 "
5596 .unindent()
5597 );
5598 });
5599
5600 let edits = project
5601 .update(cx, |project, cx| {
5602 project.edits_from_lsp(
5603 &buffer,
5604 vec![
5605 // replace body of first function
5606 lsp::TextEdit {
5607 range: lsp::Range::new(
5608 lsp::Position::new(0, 0),
5609 lsp::Position::new(3, 0),
5610 ),
5611 new_text: "
5612 fn a() {
5613 f10();
5614 }
5615 "
5616 .unindent(),
5617 },
5618 // edit inside second function
5619 lsp::TextEdit {
5620 range: lsp::Range::new(
5621 lsp::Position::new(4, 6),
5622 lsp::Position::new(4, 6),
5623 ),
5624 new_text: "00".into(),
5625 },
5626 // edit inside third function via two distinct edits
5627 lsp::TextEdit {
5628 range: lsp::Range::new(
5629 lsp::Position::new(7, 5),
5630 lsp::Position::new(7, 5),
5631 ),
5632 new_text: "4000".into(),
5633 },
5634 lsp::TextEdit {
5635 range: lsp::Range::new(
5636 lsp::Position::new(7, 5),
5637 lsp::Position::new(7, 6),
5638 ),
5639 new_text: "".into(),
5640 },
5641 ],
5642 Some(lsp_document_version),
5643 cx,
5644 )
5645 })
5646 .await
5647 .unwrap();
5648
5649 buffer.update(cx, |buffer, cx| {
5650 for (range, new_text) in edits {
5651 buffer.edit([range], new_text, cx);
5652 }
5653 assert_eq!(
5654 buffer.text(),
5655 "
5656 // above first function
5657 fn a() {
5658 // inside first function
5659 f10();
5660 }
5661 fn b() {
5662 // inside second function f200();
5663 }
5664 fn c() {
5665 f4000();
5666 }
5667 "
5668 .unindent()
5669 );
5670 });
5671 }
5672
5673 #[gpui::test]
5674 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5675 cx.foreground().forbid_parking();
5676
5677 let text = "
5678 use a::b;
5679 use a::c;
5680
5681 fn f() {
5682 b();
5683 c();
5684 }
5685 "
5686 .unindent();
5687
5688 let fs = FakeFs::new(cx.background());
5689 fs.insert_tree(
5690 "/dir",
5691 json!({
5692 "a.rs": text.clone(),
5693 }),
5694 )
5695 .await;
5696
5697 let project = Project::test(fs, cx);
5698 let worktree_id = project
5699 .update(cx, |project, cx| {
5700 project.find_or_create_local_worktree("/dir", true, cx)
5701 })
5702 .await
5703 .unwrap()
5704 .0
5705 .read_with(cx, |tree, _| tree.id());
5706
5707 let buffer = project
5708 .update(cx, |project, cx| {
5709 project.open_buffer((worktree_id, "a.rs"), cx)
5710 })
5711 .await
5712 .unwrap();
5713
5714 // Simulate the language server sending us a small edit in the form of a very large diff.
5715 // Rust-analyzer does this when performing a merge-imports code action.
5716 let edits = project
5717 .update(cx, |project, cx| {
5718 project.edits_from_lsp(
5719 &buffer,
5720 [
5721 // Replace the first use statement without editing the semicolon.
5722 lsp::TextEdit {
5723 range: lsp::Range::new(
5724 lsp::Position::new(0, 4),
5725 lsp::Position::new(0, 8),
5726 ),
5727 new_text: "a::{b, c}".into(),
5728 },
5729 // Reinsert the remainder of the file between the semicolon and the final
5730 // newline of the file.
5731 lsp::TextEdit {
5732 range: lsp::Range::new(
5733 lsp::Position::new(0, 9),
5734 lsp::Position::new(0, 9),
5735 ),
5736 new_text: "\n\n".into(),
5737 },
5738 lsp::TextEdit {
5739 range: lsp::Range::new(
5740 lsp::Position::new(0, 9),
5741 lsp::Position::new(0, 9),
5742 ),
5743 new_text: "
5744 fn f() {
5745 b();
5746 c();
5747 }"
5748 .unindent(),
5749 },
5750 // Delete everything after the first newline of the file.
5751 lsp::TextEdit {
5752 range: lsp::Range::new(
5753 lsp::Position::new(1, 0),
5754 lsp::Position::new(7, 0),
5755 ),
5756 new_text: "".into(),
5757 },
5758 ],
5759 None,
5760 cx,
5761 )
5762 })
5763 .await
5764 .unwrap();
5765
5766 buffer.update(cx, |buffer, cx| {
5767 let edits = edits
5768 .into_iter()
5769 .map(|(range, text)| {
5770 (
5771 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5772 text,
5773 )
5774 })
5775 .collect::<Vec<_>>();
5776
5777 assert_eq!(
5778 edits,
5779 [
5780 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5781 (Point::new(1, 0)..Point::new(2, 0), "".into())
5782 ]
5783 );
5784
5785 for (range, new_text) in edits {
5786 buffer.edit([range], new_text, cx);
5787 }
5788 assert_eq!(
5789 buffer.text(),
5790 "
5791 use a::{b, c};
5792
5793 fn f() {
5794 b();
5795 c();
5796 }
5797 "
5798 .unindent()
5799 );
5800 });
5801 }
5802
5803 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5804 buffer: &Buffer,
5805 range: Range<T>,
5806 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5807 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5808 for chunk in buffer.snapshot().chunks(range, true) {
5809 if chunks.last().map_or(false, |prev_chunk| {
5810 prev_chunk.1 == chunk.diagnostic_severity
5811 }) {
5812 chunks.last_mut().unwrap().0.push_str(chunk.text);
5813 } else {
5814 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5815 }
5816 }
5817 chunks
5818 }
5819
5820 #[gpui::test]
5821 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5822 let dir = temp_tree(json!({
5823 "root": {
5824 "dir1": {},
5825 "dir2": {
5826 "dir3": {}
5827 }
5828 }
5829 }));
5830
5831 let project = Project::test(Arc::new(RealFs), cx);
5832 let (tree, _) = project
5833 .update(cx, |project, cx| {
5834 project.find_or_create_local_worktree(&dir.path(), true, cx)
5835 })
5836 .await
5837 .unwrap();
5838
5839 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5840 .await;
5841
5842 let cancel_flag = Default::default();
5843 let results = project
5844 .read_with(cx, |project, cx| {
5845 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5846 })
5847 .await;
5848
5849 assert!(results.is_empty());
5850 }
5851
5852 #[gpui::test]
5853 async fn test_definition(cx: &mut gpui::TestAppContext) {
5854 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5855 let language = Arc::new(Language::new(
5856 LanguageConfig {
5857 name: "Rust".into(),
5858 path_suffixes: vec!["rs".to_string()],
5859 language_server: Some(language_server_config),
5860 ..Default::default()
5861 },
5862 Some(tree_sitter_rust::language()),
5863 ));
5864
5865 let fs = FakeFs::new(cx.background());
5866 fs.insert_tree(
5867 "/dir",
5868 json!({
5869 "a.rs": "const fn a() { A }",
5870 "b.rs": "const y: i32 = crate::a()",
5871 }),
5872 )
5873 .await;
5874
5875 let project = Project::test(fs, cx);
5876 project.update(cx, |project, _| {
5877 Arc::get_mut(&mut project.languages).unwrap().add(language);
5878 });
5879
5880 let (tree, _) = project
5881 .update(cx, |project, cx| {
5882 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5883 })
5884 .await
5885 .unwrap();
5886 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5887 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5888 .await;
5889
5890 let buffer = project
5891 .update(cx, |project, cx| {
5892 project.open_buffer(
5893 ProjectPath {
5894 worktree_id,
5895 path: Path::new("").into(),
5896 },
5897 cx,
5898 )
5899 })
5900 .await
5901 .unwrap();
5902
5903 let mut fake_server = fake_servers.next().await.unwrap();
5904 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
5905 let params = params.text_document_position_params;
5906 assert_eq!(
5907 params.text_document.uri.to_file_path().unwrap(),
5908 Path::new("/dir/b.rs"),
5909 );
5910 assert_eq!(params.position, lsp::Position::new(0, 22));
5911
5912 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5913 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5914 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5915 )))
5916 });
5917
5918 let mut definitions = project
5919 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5920 .await
5921 .unwrap();
5922
5923 assert_eq!(definitions.len(), 1);
5924 let definition = definitions.pop().unwrap();
5925 cx.update(|cx| {
5926 let target_buffer = definition.buffer.read(cx);
5927 assert_eq!(
5928 target_buffer
5929 .file()
5930 .unwrap()
5931 .as_local()
5932 .unwrap()
5933 .abs_path(cx),
5934 Path::new("/dir/a.rs"),
5935 );
5936 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5937 assert_eq!(
5938 list_worktrees(&project, cx),
5939 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5940 );
5941
5942 drop(definition);
5943 });
5944 cx.read(|cx| {
5945 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5946 });
5947
5948 fn list_worktrees<'a>(
5949 project: &'a ModelHandle<Project>,
5950 cx: &'a AppContext,
5951 ) -> Vec<(&'a Path, bool)> {
5952 project
5953 .read(cx)
5954 .worktrees(cx)
5955 .map(|worktree| {
5956 let worktree = worktree.read(cx);
5957 (
5958 worktree.as_local().unwrap().abs_path().as_ref(),
5959 worktree.is_visible(),
5960 )
5961 })
5962 .collect::<Vec<_>>()
5963 }
5964 }
5965
5966 #[gpui::test]
5967 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5968 let fs = FakeFs::new(cx.background());
5969 fs.insert_tree(
5970 "/dir",
5971 json!({
5972 "file1": "the old contents",
5973 }),
5974 )
5975 .await;
5976
5977 let project = Project::test(fs.clone(), cx);
5978 let worktree_id = project
5979 .update(cx, |p, cx| {
5980 p.find_or_create_local_worktree("/dir", true, cx)
5981 })
5982 .await
5983 .unwrap()
5984 .0
5985 .read_with(cx, |tree, _| tree.id());
5986
5987 let buffer = project
5988 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5989 .await
5990 .unwrap();
5991 buffer
5992 .update(cx, |buffer, cx| {
5993 assert_eq!(buffer.text(), "the old contents");
5994 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5995 buffer.save(cx)
5996 })
5997 .await
5998 .unwrap();
5999
6000 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6001 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6002 }
6003
6004 #[gpui::test]
6005 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6006 let fs = FakeFs::new(cx.background());
6007 fs.insert_tree(
6008 "/dir",
6009 json!({
6010 "file1": "the old contents",
6011 }),
6012 )
6013 .await;
6014
6015 let project = Project::test(fs.clone(), cx);
6016 let worktree_id = project
6017 .update(cx, |p, cx| {
6018 p.find_or_create_local_worktree("/dir/file1", true, cx)
6019 })
6020 .await
6021 .unwrap()
6022 .0
6023 .read_with(cx, |tree, _| tree.id());
6024
6025 let buffer = project
6026 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6027 .await
6028 .unwrap();
6029 buffer
6030 .update(cx, |buffer, cx| {
6031 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6032 buffer.save(cx)
6033 })
6034 .await
6035 .unwrap();
6036
6037 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6038 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6039 }
6040
6041 #[gpui::test]
6042 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6043 let fs = FakeFs::new(cx.background());
6044 fs.insert_tree("/dir", json!({})).await;
6045
6046 let project = Project::test(fs.clone(), cx);
6047 let (worktree, _) = project
6048 .update(cx, |project, cx| {
6049 project.find_or_create_local_worktree("/dir", true, cx)
6050 })
6051 .await
6052 .unwrap();
6053 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6054
6055 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6056 buffer.update(cx, |buffer, cx| {
6057 buffer.edit([0..0], "abc", cx);
6058 assert!(buffer.is_dirty());
6059 assert!(!buffer.has_conflict());
6060 });
6061 project
6062 .update(cx, |project, cx| {
6063 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6064 })
6065 .await
6066 .unwrap();
6067 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6068 buffer.read_with(cx, |buffer, cx| {
6069 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6070 assert!(!buffer.is_dirty());
6071 assert!(!buffer.has_conflict());
6072 });
6073
6074 let opened_buffer = project
6075 .update(cx, |project, cx| {
6076 project.open_buffer((worktree_id, "file1"), cx)
6077 })
6078 .await
6079 .unwrap();
6080 assert_eq!(opened_buffer, buffer);
6081 }
6082
6083 #[gpui::test(retries = 5)]
6084 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6085 let dir = temp_tree(json!({
6086 "a": {
6087 "file1": "",
6088 "file2": "",
6089 "file3": "",
6090 },
6091 "b": {
6092 "c": {
6093 "file4": "",
6094 "file5": "",
6095 }
6096 }
6097 }));
6098
6099 let project = Project::test(Arc::new(RealFs), cx);
6100 let rpc = project.read_with(cx, |p, _| p.client.clone());
6101
6102 let (tree, _) = project
6103 .update(cx, |p, cx| {
6104 p.find_or_create_local_worktree(dir.path(), true, cx)
6105 })
6106 .await
6107 .unwrap();
6108 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6109
6110 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6111 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6112 async move { buffer.await.unwrap() }
6113 };
6114 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6115 tree.read_with(cx, |tree, _| {
6116 tree.entry_for_path(path)
6117 .expect(&format!("no entry for path {}", path))
6118 .id
6119 })
6120 };
6121
6122 let buffer2 = buffer_for_path("a/file2", cx).await;
6123 let buffer3 = buffer_for_path("a/file3", cx).await;
6124 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6125 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6126
6127 let file2_id = id_for_path("a/file2", &cx);
6128 let file3_id = id_for_path("a/file3", &cx);
6129 let file4_id = id_for_path("b/c/file4", &cx);
6130
6131 // Wait for the initial scan.
6132 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6133 .await;
6134
6135 // Create a remote copy of this worktree.
6136 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6137 let (remote, load_task) = cx.update(|cx| {
6138 Worktree::remote(
6139 1,
6140 1,
6141 initial_snapshot.to_proto(&Default::default(), true),
6142 rpc.clone(),
6143 cx,
6144 )
6145 });
6146 load_task.await;
6147
6148 cx.read(|cx| {
6149 assert!(!buffer2.read(cx).is_dirty());
6150 assert!(!buffer3.read(cx).is_dirty());
6151 assert!(!buffer4.read(cx).is_dirty());
6152 assert!(!buffer5.read(cx).is_dirty());
6153 });
6154
6155 // Rename and delete files and directories.
6156 tree.flush_fs_events(&cx).await;
6157 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6158 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6159 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6160 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6161 tree.flush_fs_events(&cx).await;
6162
6163 let expected_paths = vec![
6164 "a",
6165 "a/file1",
6166 "a/file2.new",
6167 "b",
6168 "d",
6169 "d/file3",
6170 "d/file4",
6171 ];
6172
6173 cx.read(|app| {
6174 assert_eq!(
6175 tree.read(app)
6176 .paths()
6177 .map(|p| p.to_str().unwrap())
6178 .collect::<Vec<_>>(),
6179 expected_paths
6180 );
6181
6182 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6183 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6184 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6185
6186 assert_eq!(
6187 buffer2.read(app).file().unwrap().path().as_ref(),
6188 Path::new("a/file2.new")
6189 );
6190 assert_eq!(
6191 buffer3.read(app).file().unwrap().path().as_ref(),
6192 Path::new("d/file3")
6193 );
6194 assert_eq!(
6195 buffer4.read(app).file().unwrap().path().as_ref(),
6196 Path::new("d/file4")
6197 );
6198 assert_eq!(
6199 buffer5.read(app).file().unwrap().path().as_ref(),
6200 Path::new("b/c/file5")
6201 );
6202
6203 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6204 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6205 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6206 assert!(buffer5.read(app).file().unwrap().is_deleted());
6207 });
6208
6209 // Update the remote worktree. Check that it becomes consistent with the
6210 // local worktree.
6211 remote.update(cx, |remote, cx| {
6212 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6213 &initial_snapshot,
6214 1,
6215 1,
6216 true,
6217 );
6218 remote
6219 .as_remote_mut()
6220 .unwrap()
6221 .snapshot
6222 .apply_remote_update(update_message)
6223 .unwrap();
6224
6225 assert_eq!(
6226 remote
6227 .paths()
6228 .map(|p| p.to_str().unwrap())
6229 .collect::<Vec<_>>(),
6230 expected_paths
6231 );
6232 });
6233 }
6234
6235 #[gpui::test]
6236 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6237 let fs = FakeFs::new(cx.background());
6238 fs.insert_tree(
6239 "/the-dir",
6240 json!({
6241 "a.txt": "a-contents",
6242 "b.txt": "b-contents",
6243 }),
6244 )
6245 .await;
6246
6247 let project = Project::test(fs.clone(), cx);
6248 let worktree_id = project
6249 .update(cx, |p, cx| {
6250 p.find_or_create_local_worktree("/the-dir", true, cx)
6251 })
6252 .await
6253 .unwrap()
6254 .0
6255 .read_with(cx, |tree, _| tree.id());
6256
6257 // Spawn multiple tasks to open paths, repeating some paths.
6258 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6259 (
6260 p.open_buffer((worktree_id, "a.txt"), cx),
6261 p.open_buffer((worktree_id, "b.txt"), cx),
6262 p.open_buffer((worktree_id, "a.txt"), cx),
6263 )
6264 });
6265
6266 let buffer_a_1 = buffer_a_1.await.unwrap();
6267 let buffer_a_2 = buffer_a_2.await.unwrap();
6268 let buffer_b = buffer_b.await.unwrap();
6269 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6270 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6271
6272 // There is only one buffer per path.
6273 let buffer_a_id = buffer_a_1.id();
6274 assert_eq!(buffer_a_2.id(), buffer_a_id);
6275
6276 // Open the same path again while it is still open.
6277 drop(buffer_a_1);
6278 let buffer_a_3 = project
6279 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6280 .await
6281 .unwrap();
6282
6283 // There's still only one buffer per path.
6284 assert_eq!(buffer_a_3.id(), buffer_a_id);
6285 }
6286
6287 #[gpui::test]
6288 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6289 use std::fs;
6290
6291 let dir = temp_tree(json!({
6292 "file1": "abc",
6293 "file2": "def",
6294 "file3": "ghi",
6295 }));
6296
6297 let project = Project::test(Arc::new(RealFs), cx);
6298 let (worktree, _) = project
6299 .update(cx, |p, cx| {
6300 p.find_or_create_local_worktree(dir.path(), true, cx)
6301 })
6302 .await
6303 .unwrap();
6304 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6305
6306 worktree.flush_fs_events(&cx).await;
6307 worktree
6308 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6309 .await;
6310
6311 let buffer1 = project
6312 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6313 .await
6314 .unwrap();
6315 let events = Rc::new(RefCell::new(Vec::new()));
6316
6317 // initially, the buffer isn't dirty.
6318 buffer1.update(cx, |buffer, cx| {
6319 cx.subscribe(&buffer1, {
6320 let events = events.clone();
6321 move |_, _, event, _| match event {
6322 BufferEvent::Operation(_) => {}
6323 _ => events.borrow_mut().push(event.clone()),
6324 }
6325 })
6326 .detach();
6327
6328 assert!(!buffer.is_dirty());
6329 assert!(events.borrow().is_empty());
6330
6331 buffer.edit(vec![1..2], "", cx);
6332 });
6333
6334 // after the first edit, the buffer is dirty, and emits a dirtied event.
6335 buffer1.update(cx, |buffer, cx| {
6336 assert!(buffer.text() == "ac");
6337 assert!(buffer.is_dirty());
6338 assert_eq!(
6339 *events.borrow(),
6340 &[language::Event::Edited, language::Event::Dirtied]
6341 );
6342 events.borrow_mut().clear();
6343 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6344 });
6345
6346 // after saving, the buffer is not dirty, and emits a saved event.
6347 buffer1.update(cx, |buffer, cx| {
6348 assert!(!buffer.is_dirty());
6349 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6350 events.borrow_mut().clear();
6351
6352 buffer.edit(vec![1..1], "B", cx);
6353 buffer.edit(vec![2..2], "D", cx);
6354 });
6355
6356 // after editing again, the buffer is dirty, and emits another dirty event.
6357 buffer1.update(cx, |buffer, cx| {
6358 assert!(buffer.text() == "aBDc");
6359 assert!(buffer.is_dirty());
6360 assert_eq!(
6361 *events.borrow(),
6362 &[
6363 language::Event::Edited,
6364 language::Event::Dirtied,
6365 language::Event::Edited,
6366 ],
6367 );
6368 events.borrow_mut().clear();
6369
6370 // TODO - currently, after restoring the buffer to its
6371 // previously-saved state, the is still considered dirty.
6372 buffer.edit([1..3], "", cx);
6373 assert!(buffer.text() == "ac");
6374 assert!(buffer.is_dirty());
6375 });
6376
6377 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6378
6379 // When a file is deleted, the buffer is considered dirty.
6380 let events = Rc::new(RefCell::new(Vec::new()));
6381 let buffer2 = project
6382 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6383 .await
6384 .unwrap();
6385 buffer2.update(cx, |_, cx| {
6386 cx.subscribe(&buffer2, {
6387 let events = events.clone();
6388 move |_, _, event, _| events.borrow_mut().push(event.clone())
6389 })
6390 .detach();
6391 });
6392
6393 fs::remove_file(dir.path().join("file2")).unwrap();
6394 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6395 assert_eq!(
6396 *events.borrow(),
6397 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6398 );
6399
6400 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6401 let events = Rc::new(RefCell::new(Vec::new()));
6402 let buffer3 = project
6403 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6404 .await
6405 .unwrap();
6406 buffer3.update(cx, |_, cx| {
6407 cx.subscribe(&buffer3, {
6408 let events = events.clone();
6409 move |_, _, event, _| events.borrow_mut().push(event.clone())
6410 })
6411 .detach();
6412 });
6413
6414 worktree.flush_fs_events(&cx).await;
6415 buffer3.update(cx, |buffer, cx| {
6416 buffer.edit(Some(0..0), "x", cx);
6417 });
6418 events.borrow_mut().clear();
6419 fs::remove_file(dir.path().join("file3")).unwrap();
6420 buffer3
6421 .condition(&cx, |_, _| !events.borrow().is_empty())
6422 .await;
6423 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6424 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6425 }
6426
6427 #[gpui::test]
6428 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6429 use std::fs;
6430
6431 let initial_contents = "aaa\nbbbbb\nc\n";
6432 let dir = temp_tree(json!({ "the-file": initial_contents }));
6433
6434 let project = Project::test(Arc::new(RealFs), cx);
6435 let (worktree, _) = project
6436 .update(cx, |p, cx| {
6437 p.find_or_create_local_worktree(dir.path(), true, cx)
6438 })
6439 .await
6440 .unwrap();
6441 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6442
6443 worktree
6444 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6445 .await;
6446
6447 let abs_path = dir.path().join("the-file");
6448 let buffer = project
6449 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6450 .await
6451 .unwrap();
6452
6453 // TODO
6454 // Add a cursor on each row.
6455 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6456 // assert!(!buffer.is_dirty());
6457 // buffer.add_selection_set(
6458 // &(0..3)
6459 // .map(|row| Selection {
6460 // id: row as usize,
6461 // start: Point::new(row, 1),
6462 // end: Point::new(row, 1),
6463 // reversed: false,
6464 // goal: SelectionGoal::None,
6465 // })
6466 // .collect::<Vec<_>>(),
6467 // cx,
6468 // )
6469 // });
6470
6471 // Change the file on disk, adding two new lines of text, and removing
6472 // one line.
6473 buffer.read_with(cx, |buffer, _| {
6474 assert!(!buffer.is_dirty());
6475 assert!(!buffer.has_conflict());
6476 });
6477 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6478 fs::write(&abs_path, new_contents).unwrap();
6479
6480 // Because the buffer was not modified, it is reloaded from disk. Its
6481 // contents are edited according to the diff between the old and new
6482 // file contents.
6483 buffer
6484 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6485 .await;
6486
6487 buffer.update(cx, |buffer, _| {
6488 assert_eq!(buffer.text(), new_contents);
6489 assert!(!buffer.is_dirty());
6490 assert!(!buffer.has_conflict());
6491
6492 // TODO
6493 // let cursor_positions = buffer
6494 // .selection_set(selection_set_id)
6495 // .unwrap()
6496 // .selections::<Point>(&*buffer)
6497 // .map(|selection| {
6498 // assert_eq!(selection.start, selection.end);
6499 // selection.start
6500 // })
6501 // .collect::<Vec<_>>();
6502 // assert_eq!(
6503 // cursor_positions,
6504 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6505 // );
6506 });
6507
6508 // Modify the buffer
6509 buffer.update(cx, |buffer, cx| {
6510 buffer.edit(vec![0..0], " ", cx);
6511 assert!(buffer.is_dirty());
6512 assert!(!buffer.has_conflict());
6513 });
6514
6515 // Change the file on disk again, adding blank lines to the beginning.
6516 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6517
6518 // Because the buffer is modified, it doesn't reload from disk, but is
6519 // marked as having a conflict.
6520 buffer
6521 .condition(&cx, |buffer, _| buffer.has_conflict())
6522 .await;
6523 }
6524
6525 #[gpui::test]
6526 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6527 cx.foreground().forbid_parking();
6528
6529 let fs = FakeFs::new(cx.background());
6530 fs.insert_tree(
6531 "/the-dir",
6532 json!({
6533 "a.rs": "
6534 fn foo(mut v: Vec<usize>) {
6535 for x in &v {
6536 v.push(1);
6537 }
6538 }
6539 "
6540 .unindent(),
6541 }),
6542 )
6543 .await;
6544
6545 let project = Project::test(fs.clone(), cx);
6546 let (worktree, _) = project
6547 .update(cx, |p, cx| {
6548 p.find_or_create_local_worktree("/the-dir", true, cx)
6549 })
6550 .await
6551 .unwrap();
6552 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6553
6554 let buffer = project
6555 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6556 .await
6557 .unwrap();
6558
6559 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6560 let message = lsp::PublishDiagnosticsParams {
6561 uri: buffer_uri.clone(),
6562 diagnostics: vec![
6563 lsp::Diagnostic {
6564 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6565 severity: Some(DiagnosticSeverity::WARNING),
6566 message: "error 1".to_string(),
6567 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6568 location: lsp::Location {
6569 uri: buffer_uri.clone(),
6570 range: lsp::Range::new(
6571 lsp::Position::new(1, 8),
6572 lsp::Position::new(1, 9),
6573 ),
6574 },
6575 message: "error 1 hint 1".to_string(),
6576 }]),
6577 ..Default::default()
6578 },
6579 lsp::Diagnostic {
6580 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6581 severity: Some(DiagnosticSeverity::HINT),
6582 message: "error 1 hint 1".to_string(),
6583 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6584 location: lsp::Location {
6585 uri: buffer_uri.clone(),
6586 range: lsp::Range::new(
6587 lsp::Position::new(1, 8),
6588 lsp::Position::new(1, 9),
6589 ),
6590 },
6591 message: "original diagnostic".to_string(),
6592 }]),
6593 ..Default::default()
6594 },
6595 lsp::Diagnostic {
6596 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6597 severity: Some(DiagnosticSeverity::ERROR),
6598 message: "error 2".to_string(),
6599 related_information: Some(vec![
6600 lsp::DiagnosticRelatedInformation {
6601 location: lsp::Location {
6602 uri: buffer_uri.clone(),
6603 range: lsp::Range::new(
6604 lsp::Position::new(1, 13),
6605 lsp::Position::new(1, 15),
6606 ),
6607 },
6608 message: "error 2 hint 1".to_string(),
6609 },
6610 lsp::DiagnosticRelatedInformation {
6611 location: lsp::Location {
6612 uri: buffer_uri.clone(),
6613 range: lsp::Range::new(
6614 lsp::Position::new(1, 13),
6615 lsp::Position::new(1, 15),
6616 ),
6617 },
6618 message: "error 2 hint 2".to_string(),
6619 },
6620 ]),
6621 ..Default::default()
6622 },
6623 lsp::Diagnostic {
6624 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6625 severity: Some(DiagnosticSeverity::HINT),
6626 message: "error 2 hint 1".to_string(),
6627 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6628 location: lsp::Location {
6629 uri: buffer_uri.clone(),
6630 range: lsp::Range::new(
6631 lsp::Position::new(2, 8),
6632 lsp::Position::new(2, 17),
6633 ),
6634 },
6635 message: "original diagnostic".to_string(),
6636 }]),
6637 ..Default::default()
6638 },
6639 lsp::Diagnostic {
6640 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6641 severity: Some(DiagnosticSeverity::HINT),
6642 message: "error 2 hint 2".to_string(),
6643 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6644 location: lsp::Location {
6645 uri: buffer_uri.clone(),
6646 range: lsp::Range::new(
6647 lsp::Position::new(2, 8),
6648 lsp::Position::new(2, 17),
6649 ),
6650 },
6651 message: "original diagnostic".to_string(),
6652 }]),
6653 ..Default::default()
6654 },
6655 ],
6656 version: None,
6657 };
6658
6659 project
6660 .update(cx, |p, cx| {
6661 p.update_diagnostics(message, &Default::default(), cx)
6662 })
6663 .unwrap();
6664 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6665
6666 assert_eq!(
6667 buffer
6668 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6669 .collect::<Vec<_>>(),
6670 &[
6671 DiagnosticEntry {
6672 range: Point::new(1, 8)..Point::new(1, 9),
6673 diagnostic: Diagnostic {
6674 severity: DiagnosticSeverity::WARNING,
6675 message: "error 1".to_string(),
6676 group_id: 0,
6677 is_primary: true,
6678 ..Default::default()
6679 }
6680 },
6681 DiagnosticEntry {
6682 range: Point::new(1, 8)..Point::new(1, 9),
6683 diagnostic: Diagnostic {
6684 severity: DiagnosticSeverity::HINT,
6685 message: "error 1 hint 1".to_string(),
6686 group_id: 0,
6687 is_primary: false,
6688 ..Default::default()
6689 }
6690 },
6691 DiagnosticEntry {
6692 range: Point::new(1, 13)..Point::new(1, 15),
6693 diagnostic: Diagnostic {
6694 severity: DiagnosticSeverity::HINT,
6695 message: "error 2 hint 1".to_string(),
6696 group_id: 1,
6697 is_primary: false,
6698 ..Default::default()
6699 }
6700 },
6701 DiagnosticEntry {
6702 range: Point::new(1, 13)..Point::new(1, 15),
6703 diagnostic: Diagnostic {
6704 severity: DiagnosticSeverity::HINT,
6705 message: "error 2 hint 2".to_string(),
6706 group_id: 1,
6707 is_primary: false,
6708 ..Default::default()
6709 }
6710 },
6711 DiagnosticEntry {
6712 range: Point::new(2, 8)..Point::new(2, 17),
6713 diagnostic: Diagnostic {
6714 severity: DiagnosticSeverity::ERROR,
6715 message: "error 2".to_string(),
6716 group_id: 1,
6717 is_primary: true,
6718 ..Default::default()
6719 }
6720 }
6721 ]
6722 );
6723
6724 assert_eq!(
6725 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6726 &[
6727 DiagnosticEntry {
6728 range: Point::new(1, 8)..Point::new(1, 9),
6729 diagnostic: Diagnostic {
6730 severity: DiagnosticSeverity::WARNING,
6731 message: "error 1".to_string(),
6732 group_id: 0,
6733 is_primary: true,
6734 ..Default::default()
6735 }
6736 },
6737 DiagnosticEntry {
6738 range: Point::new(1, 8)..Point::new(1, 9),
6739 diagnostic: Diagnostic {
6740 severity: DiagnosticSeverity::HINT,
6741 message: "error 1 hint 1".to_string(),
6742 group_id: 0,
6743 is_primary: false,
6744 ..Default::default()
6745 }
6746 },
6747 ]
6748 );
6749 assert_eq!(
6750 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6751 &[
6752 DiagnosticEntry {
6753 range: Point::new(1, 13)..Point::new(1, 15),
6754 diagnostic: Diagnostic {
6755 severity: DiagnosticSeverity::HINT,
6756 message: "error 2 hint 1".to_string(),
6757 group_id: 1,
6758 is_primary: false,
6759 ..Default::default()
6760 }
6761 },
6762 DiagnosticEntry {
6763 range: Point::new(1, 13)..Point::new(1, 15),
6764 diagnostic: Diagnostic {
6765 severity: DiagnosticSeverity::HINT,
6766 message: "error 2 hint 2".to_string(),
6767 group_id: 1,
6768 is_primary: false,
6769 ..Default::default()
6770 }
6771 },
6772 DiagnosticEntry {
6773 range: Point::new(2, 8)..Point::new(2, 17),
6774 diagnostic: Diagnostic {
6775 severity: DiagnosticSeverity::ERROR,
6776 message: "error 2".to_string(),
6777 group_id: 1,
6778 is_primary: true,
6779 ..Default::default()
6780 }
6781 }
6782 ]
6783 );
6784 }
6785
6786 #[gpui::test]
6787 async fn test_rename(cx: &mut gpui::TestAppContext) {
6788 cx.foreground().forbid_parking();
6789
6790 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6791 let language = Arc::new(Language::new(
6792 LanguageConfig {
6793 name: "Rust".into(),
6794 path_suffixes: vec!["rs".to_string()],
6795 language_server: Some(language_server_config),
6796 ..Default::default()
6797 },
6798 Some(tree_sitter_rust::language()),
6799 ));
6800
6801 let fs = FakeFs::new(cx.background());
6802 fs.insert_tree(
6803 "/dir",
6804 json!({
6805 "one.rs": "const ONE: usize = 1;",
6806 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6807 }),
6808 )
6809 .await;
6810
6811 let project = Project::test(fs.clone(), cx);
6812 project.update(cx, |project, _| {
6813 Arc::get_mut(&mut project.languages).unwrap().add(language);
6814 });
6815
6816 let (tree, _) = project
6817 .update(cx, |project, cx| {
6818 project.find_or_create_local_worktree("/dir", true, cx)
6819 })
6820 .await
6821 .unwrap();
6822 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6823 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6824 .await;
6825
6826 let buffer = project
6827 .update(cx, |project, cx| {
6828 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6829 })
6830 .await
6831 .unwrap();
6832
6833 let mut fake_server = fake_servers.next().await.unwrap();
6834
6835 let response = project.update(cx, |project, cx| {
6836 project.prepare_rename(buffer.clone(), 7, cx)
6837 });
6838 fake_server
6839 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6840 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6841 assert_eq!(params.position, lsp::Position::new(0, 7));
6842 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6843 lsp::Position::new(0, 6),
6844 lsp::Position::new(0, 9),
6845 )))
6846 })
6847 .next()
6848 .await
6849 .unwrap();
6850 let range = response.await.unwrap().unwrap();
6851 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6852 assert_eq!(range, 6..9);
6853
6854 let response = project.update(cx, |project, cx| {
6855 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6856 });
6857 fake_server
6858 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
6859 assert_eq!(
6860 params.text_document_position.text_document.uri.as_str(),
6861 "file:///dir/one.rs"
6862 );
6863 assert_eq!(
6864 params.text_document_position.position,
6865 lsp::Position::new(0, 7)
6866 );
6867 assert_eq!(params.new_name, "THREE");
6868 Some(lsp::WorkspaceEdit {
6869 changes: Some(
6870 [
6871 (
6872 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6873 vec![lsp::TextEdit::new(
6874 lsp::Range::new(
6875 lsp::Position::new(0, 6),
6876 lsp::Position::new(0, 9),
6877 ),
6878 "THREE".to_string(),
6879 )],
6880 ),
6881 (
6882 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6883 vec![
6884 lsp::TextEdit::new(
6885 lsp::Range::new(
6886 lsp::Position::new(0, 24),
6887 lsp::Position::new(0, 27),
6888 ),
6889 "THREE".to_string(),
6890 ),
6891 lsp::TextEdit::new(
6892 lsp::Range::new(
6893 lsp::Position::new(0, 35),
6894 lsp::Position::new(0, 38),
6895 ),
6896 "THREE".to_string(),
6897 ),
6898 ],
6899 ),
6900 ]
6901 .into_iter()
6902 .collect(),
6903 ),
6904 ..Default::default()
6905 })
6906 })
6907 .next()
6908 .await
6909 .unwrap();
6910 let mut transaction = response.await.unwrap().0;
6911 assert_eq!(transaction.len(), 2);
6912 assert_eq!(
6913 transaction
6914 .remove_entry(&buffer)
6915 .unwrap()
6916 .0
6917 .read_with(cx, |buffer, _| buffer.text()),
6918 "const THREE: usize = 1;"
6919 );
6920 assert_eq!(
6921 transaction
6922 .into_keys()
6923 .next()
6924 .unwrap()
6925 .read_with(cx, |buffer, _| buffer.text()),
6926 "const TWO: usize = one::THREE + one::THREE;"
6927 );
6928 }
6929
6930 #[gpui::test]
6931 async fn test_search(cx: &mut gpui::TestAppContext) {
6932 let fs = FakeFs::new(cx.background());
6933 fs.insert_tree(
6934 "/dir",
6935 json!({
6936 "one.rs": "const ONE: usize = 1;",
6937 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6938 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6939 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6940 }),
6941 )
6942 .await;
6943 let project = Project::test(fs.clone(), cx);
6944 let (tree, _) = project
6945 .update(cx, |project, cx| {
6946 project.find_or_create_local_worktree("/dir", true, cx)
6947 })
6948 .await
6949 .unwrap();
6950 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6951 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6952 .await;
6953
6954 assert_eq!(
6955 search(&project, SearchQuery::text("TWO", false, true), cx)
6956 .await
6957 .unwrap(),
6958 HashMap::from_iter([
6959 ("two.rs".to_string(), vec![6..9]),
6960 ("three.rs".to_string(), vec![37..40])
6961 ])
6962 );
6963
6964 let buffer_4 = project
6965 .update(cx, |project, cx| {
6966 project.open_buffer((worktree_id, "four.rs"), cx)
6967 })
6968 .await
6969 .unwrap();
6970 buffer_4.update(cx, |buffer, cx| {
6971 buffer.edit([20..28, 31..43], "two::TWO", cx);
6972 });
6973
6974 assert_eq!(
6975 search(&project, SearchQuery::text("TWO", false, true), cx)
6976 .await
6977 .unwrap(),
6978 HashMap::from_iter([
6979 ("two.rs".to_string(), vec![6..9]),
6980 ("three.rs".to_string(), vec![37..40]),
6981 ("four.rs".to_string(), vec![25..28, 36..39])
6982 ])
6983 );
6984
6985 async fn search(
6986 project: &ModelHandle<Project>,
6987 query: SearchQuery,
6988 cx: &mut gpui::TestAppContext,
6989 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6990 let results = project
6991 .update(cx, |project, cx| project.search(query, cx))
6992 .await?;
6993
6994 Ok(results
6995 .into_iter()
6996 .map(|(buffer, ranges)| {
6997 buffer.read_with(cx, |buffer, _| {
6998 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6999 let ranges = ranges
7000 .into_iter()
7001 .map(|range| range.to_offset(buffer))
7002 .collect::<Vec<_>>();
7003 (path, ranges)
7004 })
7005 })
7006 .collect())
7007 }
7008 }
7009}