1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch, PointUtf16,
22 TextBufferSnapshot, ToLspPosition, ToOffset, ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers:
61 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
62 started_language_servers:
63 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
64 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
65 language_server_settings: Arc<Mutex<serde_json::Value>>,
66 next_language_server_id: usize,
67 client: Arc<client::Client>,
68 next_entry_id: Arc<AtomicUsize>,
69 user_store: ModelHandle<UserStore>,
70 fs: Arc<dyn Fs>,
71 client_state: ProjectClientState,
72 collaborators: HashMap<PeerId, Collaborator>,
73 subscriptions: Vec<client::Subscription>,
74 language_servers_with_diagnostics_running: isize,
75 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
76 shared_buffers: HashMap<PeerId, HashSet<u64>>,
77 loading_buffers: HashMap<
78 ProjectPath,
79 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
80 >,
81 loading_local_worktrees:
82 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
83 opened_buffers: HashMap<u64, OpenBuffer>,
84 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
85 nonce: u128,
86}
87
88enum OpenBuffer {
89 Strong(ModelHandle<Buffer>),
90 Weak(WeakModelHandle<Buffer>),
91 Loading(Vec<Operation>),
92}
93
94enum WorktreeHandle {
95 Strong(ModelHandle<Worktree>),
96 Weak(WeakModelHandle<Worktree>),
97}
98
99enum ProjectClientState {
100 Local {
101 is_shared: bool,
102 remote_id_tx: watch::Sender<Option<u64>>,
103 remote_id_rx: watch::Receiver<Option<u64>>,
104 _maintain_remote_id_task: Task<Option<()>>,
105 },
106 Remote {
107 sharing_has_stopped: bool,
108 remote_id: u64,
109 replica_id: ReplicaId,
110 _detect_unshare_task: Task<Option<()>>,
111 },
112}
113
114#[derive(Clone, Debug)]
115pub struct Collaborator {
116 pub user: Arc<User>,
117 pub peer_id: PeerId,
118 pub replica_id: ReplicaId,
119}
120
121#[derive(Clone, Debug, PartialEq)]
122pub enum Event {
123 ActiveEntryChanged(Option<ProjectEntryId>),
124 WorktreeRemoved(WorktreeId),
125 DiskBasedDiagnosticsStarted,
126 DiskBasedDiagnosticsUpdated,
127 DiskBasedDiagnosticsFinished,
128 DiagnosticsUpdated(ProjectPath),
129 RemoteIdChanged(Option<u64>),
130 CollaboratorLeft(PeerId),
131}
132
133enum LanguageServerEvent {
134 WorkStart {
135 token: String,
136 },
137 WorkProgress {
138 token: String,
139 progress: LanguageServerProgress,
140 },
141 WorkEnd {
142 token: String,
143 },
144 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
145}
146
147pub struct LanguageServerStatus {
148 pub name: String,
149 pub pending_work: BTreeMap<String, LanguageServerProgress>,
150 pending_diagnostic_updates: isize,
151}
152
153#[derive(Clone, Debug)]
154pub struct LanguageServerProgress {
155 pub message: Option<String>,
156 pub percentage: Option<usize>,
157 pub last_update_at: Instant,
158}
159
160#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
161pub struct ProjectPath {
162 pub worktree_id: WorktreeId,
163 pub path: Arc<Path>,
164}
165
166#[derive(Clone, Debug, Default, PartialEq)]
167pub struct DiagnosticSummary {
168 pub error_count: usize,
169 pub warning_count: usize,
170 pub info_count: usize,
171 pub hint_count: usize,
172}
173
174#[derive(Debug)]
175pub struct Location {
176 pub buffer: ModelHandle<Buffer>,
177 pub range: Range<language::Anchor>,
178}
179
180#[derive(Debug)]
181pub struct DocumentHighlight {
182 pub range: Range<language::Anchor>,
183 pub kind: DocumentHighlightKind,
184}
185
186#[derive(Clone, Debug)]
187pub struct Symbol {
188 pub source_worktree_id: WorktreeId,
189 pub worktree_id: WorktreeId,
190 pub language_server_name: LanguageServerName,
191 pub path: PathBuf,
192 pub label: CodeLabel,
193 pub name: String,
194 pub kind: lsp::SymbolKind,
195 pub range: Range<PointUtf16>,
196 pub signature: [u8; 32],
197}
198
199#[derive(Default)]
200pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
201
202impl DiagnosticSummary {
203 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
204 let mut this = Self {
205 error_count: 0,
206 warning_count: 0,
207 info_count: 0,
208 hint_count: 0,
209 };
210
211 for entry in diagnostics {
212 if entry.diagnostic.is_primary {
213 match entry.diagnostic.severity {
214 DiagnosticSeverity::ERROR => this.error_count += 1,
215 DiagnosticSeverity::WARNING => this.warning_count += 1,
216 DiagnosticSeverity::INFORMATION => this.info_count += 1,
217 DiagnosticSeverity::HINT => this.hint_count += 1,
218 _ => {}
219 }
220 }
221 }
222
223 this
224 }
225
226 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
227 proto::DiagnosticSummary {
228 path: path.to_string_lossy().to_string(),
229 error_count: self.error_count as u32,
230 warning_count: self.warning_count as u32,
231 info_count: self.info_count as u32,
232 hint_count: self.hint_count as u32,
233 }
234 }
235}
236
237#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
238pub struct ProjectEntryId(usize);
239
240impl ProjectEntryId {
241 pub fn new(counter: &AtomicUsize) -> Self {
242 Self(counter.fetch_add(1, SeqCst))
243 }
244
245 pub fn from_proto(id: u64) -> Self {
246 Self(id as usize)
247 }
248
249 pub fn to_proto(&self) -> u64 {
250 self.0 as u64
251 }
252
253 pub fn to_usize(&self) -> usize {
254 self.0
255 }
256}
257
258impl Project {
259 pub fn init(client: &Arc<Client>) {
260 client.add_model_message_handler(Self::handle_add_collaborator);
261 client.add_model_message_handler(Self::handle_buffer_reloaded);
262 client.add_model_message_handler(Self::handle_buffer_saved);
263 client.add_model_message_handler(Self::handle_start_language_server);
264 client.add_model_message_handler(Self::handle_update_language_server);
265 client.add_model_message_handler(Self::handle_remove_collaborator);
266 client.add_model_message_handler(Self::handle_register_worktree);
267 client.add_model_message_handler(Self::handle_unregister_worktree);
268 client.add_model_message_handler(Self::handle_unshare_project);
269 client.add_model_message_handler(Self::handle_update_buffer_file);
270 client.add_model_message_handler(Self::handle_update_buffer);
271 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
272 client.add_model_message_handler(Self::handle_update_worktree);
273 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
274 client.add_model_request_handler(Self::handle_apply_code_action);
275 client.add_model_request_handler(Self::handle_format_buffers);
276 client.add_model_request_handler(Self::handle_get_code_actions);
277 client.add_model_request_handler(Self::handle_get_completions);
278 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
279 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
280 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
281 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
282 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
283 client.add_model_request_handler(Self::handle_search_project);
284 client.add_model_request_handler(Self::handle_get_project_symbols);
285 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
286 client.add_model_request_handler(Self::handle_open_buffer_by_id);
287 client.add_model_request_handler(Self::handle_open_buffer_by_path);
288 client.add_model_request_handler(Self::handle_save_buffer);
289 }
290
291 pub fn local(
292 client: Arc<Client>,
293 user_store: ModelHandle<UserStore>,
294 languages: Arc<LanguageRegistry>,
295 fs: Arc<dyn Fs>,
296 cx: &mut MutableAppContext,
297 ) -> ModelHandle<Self> {
298 cx.add_model(|cx: &mut ModelContext<Self>| {
299 let (remote_id_tx, remote_id_rx) = watch::channel();
300 let _maintain_remote_id_task = cx.spawn_weak({
301 let rpc = client.clone();
302 move |this, mut cx| {
303 async move {
304 let mut status = rpc.status();
305 while let Some(status) = status.next().await {
306 if let Some(this) = this.upgrade(&cx) {
307 if status.is_connected() {
308 this.update(&mut cx, |this, cx| this.register(cx)).await?;
309 } else {
310 this.update(&mut cx, |this, cx| this.unregister(cx));
311 }
312 }
313 }
314 Ok(())
315 }
316 .log_err()
317 }
318 });
319
320 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
321 Self {
322 worktrees: Default::default(),
323 collaborators: Default::default(),
324 opened_buffers: Default::default(),
325 shared_buffers: Default::default(),
326 loading_buffers: Default::default(),
327 loading_local_worktrees: Default::default(),
328 buffer_snapshots: Default::default(),
329 client_state: ProjectClientState::Local {
330 is_shared: false,
331 remote_id_tx,
332 remote_id_rx,
333 _maintain_remote_id_task,
334 },
335 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
336 subscriptions: Vec::new(),
337 active_entry: None,
338 languages,
339 client,
340 user_store,
341 fs,
342 next_entry_id: Default::default(),
343 language_servers_with_diagnostics_running: 0,
344 language_servers: Default::default(),
345 started_language_servers: Default::default(),
346 language_server_statuses: Default::default(),
347 language_server_settings: Default::default(),
348 next_language_server_id: 0,
349 nonce: StdRng::from_entropy().gen(),
350 }
351 })
352 }
353
354 pub async fn remote(
355 remote_id: u64,
356 client: Arc<Client>,
357 user_store: ModelHandle<UserStore>,
358 languages: Arc<LanguageRegistry>,
359 fs: Arc<dyn Fs>,
360 cx: &mut AsyncAppContext,
361 ) -> Result<ModelHandle<Self>> {
362 client.authenticate_and_connect(true, &cx).await?;
363
364 let response = client
365 .request(proto::JoinProject {
366 project_id: remote_id,
367 })
368 .await?;
369
370 let replica_id = response.replica_id as ReplicaId;
371
372 let mut worktrees = Vec::new();
373 for worktree in response.worktrees {
374 let (worktree, load_task) = cx
375 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
376 worktrees.push(worktree);
377 load_task.detach();
378 }
379
380 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
381 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
382 let mut this = Self {
383 worktrees: Vec::new(),
384 loading_buffers: Default::default(),
385 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
386 shared_buffers: Default::default(),
387 loading_local_worktrees: Default::default(),
388 active_entry: None,
389 collaborators: Default::default(),
390 languages,
391 user_store: user_store.clone(),
392 fs,
393 next_entry_id: Default::default(),
394 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
395 client: client.clone(),
396 client_state: ProjectClientState::Remote {
397 sharing_has_stopped: false,
398 remote_id,
399 replica_id,
400 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
401 async move {
402 let mut status = client.status();
403 let is_connected =
404 status.next().await.map_or(false, |s| s.is_connected());
405 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
406 if !is_connected || status.next().await.is_some() {
407 if let Some(this) = this.upgrade(&cx) {
408 this.update(&mut cx, |this, cx| this.project_unshared(cx))
409 }
410 }
411 Ok(())
412 }
413 .log_err()
414 }),
415 },
416 language_servers_with_diagnostics_running: 0,
417 language_servers: Default::default(),
418 started_language_servers: Default::default(),
419 language_server_settings: Default::default(),
420 language_server_statuses: response
421 .language_servers
422 .into_iter()
423 .map(|server| {
424 (
425 server.id as usize,
426 LanguageServerStatus {
427 name: server.name,
428 pending_work: Default::default(),
429 pending_diagnostic_updates: 0,
430 },
431 )
432 })
433 .collect(),
434 next_language_server_id: 0,
435 opened_buffers: Default::default(),
436 buffer_snapshots: Default::default(),
437 nonce: StdRng::from_entropy().gen(),
438 };
439 for worktree in worktrees {
440 this.add_worktree(&worktree, cx);
441 }
442 this
443 });
444
445 let user_ids = response
446 .collaborators
447 .iter()
448 .map(|peer| peer.user_id)
449 .collect();
450 user_store
451 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
452 .await?;
453 let mut collaborators = HashMap::default();
454 for message in response.collaborators {
455 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
456 collaborators.insert(collaborator.peer_id, collaborator);
457 }
458
459 this.update(cx, |this, _| {
460 this.collaborators = collaborators;
461 });
462
463 Ok(this)
464 }
465
466 #[cfg(any(test, feature = "test-support"))]
467 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
468 let languages = Arc::new(LanguageRegistry::test());
469 let http_client = client::test::FakeHttpClient::with_404_response();
470 let client = client::Client::new(http_client.clone());
471 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
472 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
473 }
474
475 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
476 self.opened_buffers
477 .get(&remote_id)
478 .and_then(|buffer| buffer.upgrade(cx))
479 }
480
481 #[cfg(any(test, feature = "test-support"))]
482 pub fn languages(&self) -> &Arc<LanguageRegistry> {
483 &self.languages
484 }
485
486 #[cfg(any(test, feature = "test-support"))]
487 pub fn check_invariants(&self, cx: &AppContext) {
488 if self.is_local() {
489 let mut worktree_root_paths = HashMap::default();
490 for worktree in self.worktrees(cx) {
491 let worktree = worktree.read(cx);
492 let abs_path = worktree.as_local().unwrap().abs_path().clone();
493 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
494 assert_eq!(
495 prev_worktree_id,
496 None,
497 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
498 abs_path,
499 worktree.id(),
500 prev_worktree_id
501 )
502 }
503 } else {
504 let replica_id = self.replica_id();
505 for buffer in self.opened_buffers.values() {
506 if let Some(buffer) = buffer.upgrade(cx) {
507 let buffer = buffer.read(cx);
508 assert_eq!(
509 buffer.deferred_ops_len(),
510 0,
511 "replica {}, buffer {} has deferred operations",
512 replica_id,
513 buffer.remote_id()
514 );
515 }
516 }
517 }
518 }
519
520 #[cfg(any(test, feature = "test-support"))]
521 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
522 let path = path.into();
523 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
524 self.opened_buffers.iter().any(|(_, buffer)| {
525 if let Some(buffer) = buffer.upgrade(cx) {
526 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
527 if file.worktree == worktree && file.path() == &path.path {
528 return true;
529 }
530 }
531 }
532 false
533 })
534 } else {
535 false
536 }
537 }
538
539 pub fn fs(&self) -> &Arc<dyn Fs> {
540 &self.fs
541 }
542
543 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
544 self.unshare(cx);
545 for worktree in &self.worktrees {
546 if let Some(worktree) = worktree.upgrade(cx) {
547 worktree.update(cx, |worktree, _| {
548 worktree.as_local_mut().unwrap().unregister();
549 });
550 }
551 }
552
553 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
554 *remote_id_tx.borrow_mut() = None;
555 }
556
557 self.subscriptions.clear();
558 }
559
560 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
561 self.unregister(cx);
562
563 let response = self.client.request(proto::RegisterProject {});
564 cx.spawn(|this, mut cx| async move {
565 let remote_id = response.await?.project_id;
566
567 let mut registrations = Vec::new();
568 this.update(&mut cx, |this, cx| {
569 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
570 *remote_id_tx.borrow_mut() = Some(remote_id);
571 }
572
573 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
574
575 this.subscriptions
576 .push(this.client.add_model_for_remote_entity(remote_id, cx));
577
578 for worktree in &this.worktrees {
579 if let Some(worktree) = worktree.upgrade(cx) {
580 registrations.push(worktree.update(cx, |worktree, cx| {
581 let worktree = worktree.as_local_mut().unwrap();
582 worktree.register(remote_id, cx)
583 }));
584 }
585 }
586 });
587
588 futures::future::try_join_all(registrations).await?;
589 Ok(())
590 })
591 }
592
593 pub fn remote_id(&self) -> Option<u64> {
594 match &self.client_state {
595 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
596 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
597 }
598 }
599
600 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
601 let mut id = None;
602 let mut watch = None;
603 match &self.client_state {
604 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
605 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
606 }
607
608 async move {
609 if let Some(id) = id {
610 return id;
611 }
612 let mut watch = watch.unwrap();
613 loop {
614 let id = *watch.borrow();
615 if let Some(id) = id {
616 return id;
617 }
618 watch.next().await;
619 }
620 }
621 }
622
623 pub fn replica_id(&self) -> ReplicaId {
624 match &self.client_state {
625 ProjectClientState::Local { .. } => 0,
626 ProjectClientState::Remote { replica_id, .. } => *replica_id,
627 }
628 }
629
630 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
631 &self.collaborators
632 }
633
634 pub fn worktrees<'a>(
635 &'a self,
636 cx: &'a AppContext,
637 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
638 self.worktrees
639 .iter()
640 .filter_map(move |worktree| worktree.upgrade(cx))
641 }
642
643 pub fn visible_worktrees<'a>(
644 &'a self,
645 cx: &'a AppContext,
646 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
647 self.worktrees.iter().filter_map(|worktree| {
648 worktree.upgrade(cx).and_then(|worktree| {
649 if worktree.read(cx).is_visible() {
650 Some(worktree)
651 } else {
652 None
653 }
654 })
655 })
656 }
657
658 pub fn worktree_for_id(
659 &self,
660 id: WorktreeId,
661 cx: &AppContext,
662 ) -> Option<ModelHandle<Worktree>> {
663 self.worktrees(cx)
664 .find(|worktree| worktree.read(cx).id() == id)
665 }
666
667 pub fn worktree_for_entry(
668 &self,
669 entry_id: ProjectEntryId,
670 cx: &AppContext,
671 ) -> Option<ModelHandle<Worktree>> {
672 self.worktrees(cx)
673 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
674 }
675
676 pub fn worktree_id_for_entry(
677 &self,
678 entry_id: ProjectEntryId,
679 cx: &AppContext,
680 ) -> Option<WorktreeId> {
681 self.worktree_for_entry(entry_id, cx)
682 .map(|worktree| worktree.read(cx).id())
683 }
684
685 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
686 let rpc = self.client.clone();
687 cx.spawn(|this, mut cx| async move {
688 let project_id = this.update(&mut cx, |this, cx| {
689 if let ProjectClientState::Local {
690 is_shared,
691 remote_id_rx,
692 ..
693 } = &mut this.client_state
694 {
695 *is_shared = true;
696
697 for open_buffer in this.opened_buffers.values_mut() {
698 match open_buffer {
699 OpenBuffer::Strong(_) => {}
700 OpenBuffer::Weak(buffer) => {
701 if let Some(buffer) = buffer.upgrade(cx) {
702 *open_buffer = OpenBuffer::Strong(buffer);
703 }
704 }
705 OpenBuffer::Loading(_) => unreachable!(),
706 }
707 }
708
709 for worktree_handle in this.worktrees.iter_mut() {
710 match worktree_handle {
711 WorktreeHandle::Strong(_) => {}
712 WorktreeHandle::Weak(worktree) => {
713 if let Some(worktree) = worktree.upgrade(cx) {
714 *worktree_handle = WorktreeHandle::Strong(worktree);
715 }
716 }
717 }
718 }
719
720 remote_id_rx
721 .borrow()
722 .ok_or_else(|| anyhow!("no project id"))
723 } else {
724 Err(anyhow!("can't share a remote project"))
725 }
726 })?;
727
728 rpc.request(proto::ShareProject { project_id }).await?;
729
730 let mut tasks = Vec::new();
731 this.update(&mut cx, |this, cx| {
732 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
733 worktree.update(cx, |worktree, cx| {
734 let worktree = worktree.as_local_mut().unwrap();
735 tasks.push(worktree.share(project_id, cx));
736 });
737 }
738 });
739 for task in tasks {
740 task.await?;
741 }
742 this.update(&mut cx, |_, cx| cx.notify());
743 Ok(())
744 })
745 }
746
747 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
748 let rpc = self.client.clone();
749
750 if let ProjectClientState::Local {
751 is_shared,
752 remote_id_rx,
753 ..
754 } = &mut self.client_state
755 {
756 if !*is_shared {
757 return;
758 }
759
760 *is_shared = false;
761 self.collaborators.clear();
762 self.shared_buffers.clear();
763 for worktree_handle in self.worktrees.iter_mut() {
764 if let WorktreeHandle::Strong(worktree) = worktree_handle {
765 let is_visible = worktree.update(cx, |worktree, _| {
766 worktree.as_local_mut().unwrap().unshare();
767 worktree.is_visible()
768 });
769 if !is_visible {
770 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
771 }
772 }
773 }
774
775 for open_buffer in self.opened_buffers.values_mut() {
776 match open_buffer {
777 OpenBuffer::Strong(buffer) => {
778 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
779 }
780 _ => {}
781 }
782 }
783
784 if let Some(project_id) = *remote_id_rx.borrow() {
785 rpc.send(proto::UnshareProject { project_id }).log_err();
786 }
787
788 cx.notify();
789 } else {
790 log::error!("attempted to unshare a remote project");
791 }
792 }
793
794 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
795 if let ProjectClientState::Remote {
796 sharing_has_stopped,
797 ..
798 } = &mut self.client_state
799 {
800 *sharing_has_stopped = true;
801 self.collaborators.clear();
802 cx.notify();
803 }
804 }
805
806 pub fn is_read_only(&self) -> bool {
807 match &self.client_state {
808 ProjectClientState::Local { .. } => false,
809 ProjectClientState::Remote {
810 sharing_has_stopped,
811 ..
812 } => *sharing_has_stopped,
813 }
814 }
815
816 pub fn is_local(&self) -> bool {
817 match &self.client_state {
818 ProjectClientState::Local { .. } => true,
819 ProjectClientState::Remote { .. } => false,
820 }
821 }
822
823 pub fn is_remote(&self) -> bool {
824 !self.is_local()
825 }
826
827 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
828 if self.is_remote() {
829 return Err(anyhow!("creating buffers as a guest is not supported yet"));
830 }
831
832 let buffer = cx.add_model(|cx| {
833 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
834 });
835 self.register_buffer(&buffer, cx)?;
836 Ok(buffer)
837 }
838
839 pub fn open_path(
840 &mut self,
841 path: impl Into<ProjectPath>,
842 cx: &mut ModelContext<Self>,
843 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
844 let task = self.open_buffer(path, cx);
845 cx.spawn_weak(|_, cx| async move {
846 let buffer = task.await?;
847 let project_entry_id = buffer
848 .read_with(&cx, |buffer, cx| {
849 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
850 })
851 .ok_or_else(|| anyhow!("no project entry"))?;
852 Ok((project_entry_id, buffer.into()))
853 })
854 }
855
856 pub fn open_buffer(
857 &mut self,
858 path: impl Into<ProjectPath>,
859 cx: &mut ModelContext<Self>,
860 ) -> Task<Result<ModelHandle<Buffer>>> {
861 let project_path = path.into();
862 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
863 worktree
864 } else {
865 return Task::ready(Err(anyhow!("no such worktree")));
866 };
867
868 // If there is already a buffer for the given path, then return it.
869 let existing_buffer = self.get_open_buffer(&project_path, cx);
870 if let Some(existing_buffer) = existing_buffer {
871 return Task::ready(Ok(existing_buffer));
872 }
873
874 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
875 // If the given path is already being loaded, then wait for that existing
876 // task to complete and return the same buffer.
877 hash_map::Entry::Occupied(e) => e.get().clone(),
878
879 // Otherwise, record the fact that this path is now being loaded.
880 hash_map::Entry::Vacant(entry) => {
881 let (mut tx, rx) = postage::watch::channel();
882 entry.insert(rx.clone());
883
884 let load_buffer = if worktree.read(cx).is_local() {
885 self.open_local_buffer(&project_path.path, &worktree, cx)
886 } else {
887 self.open_remote_buffer(&project_path.path, &worktree, cx)
888 };
889
890 cx.spawn(move |this, mut cx| async move {
891 let load_result = load_buffer.await;
892 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
893 // Record the fact that the buffer is no longer loading.
894 this.loading_buffers.remove(&project_path);
895 let buffer = load_result.map_err(Arc::new)?;
896 Ok(buffer)
897 }));
898 })
899 .detach();
900 rx
901 }
902 };
903
904 cx.foreground().spawn(async move {
905 loop {
906 if let Some(result) = loading_watch.borrow().as_ref() {
907 match result {
908 Ok(buffer) => return Ok(buffer.clone()),
909 Err(error) => return Err(anyhow!("{}", error)),
910 }
911 }
912 loading_watch.next().await;
913 }
914 })
915 }
916
917 fn open_local_buffer(
918 &mut self,
919 path: &Arc<Path>,
920 worktree: &ModelHandle<Worktree>,
921 cx: &mut ModelContext<Self>,
922 ) -> Task<Result<ModelHandle<Buffer>>> {
923 let load_buffer = worktree.update(cx, |worktree, cx| {
924 let worktree = worktree.as_local_mut().unwrap();
925 worktree.load_buffer(path, cx)
926 });
927 cx.spawn(|this, mut cx| async move {
928 let buffer = load_buffer.await?;
929 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
930 Ok(buffer)
931 })
932 }
933
934 fn open_remote_buffer(
935 &mut self,
936 path: &Arc<Path>,
937 worktree: &ModelHandle<Worktree>,
938 cx: &mut ModelContext<Self>,
939 ) -> Task<Result<ModelHandle<Buffer>>> {
940 let rpc = self.client.clone();
941 let project_id = self.remote_id().unwrap();
942 let remote_worktree_id = worktree.read(cx).id();
943 let path = path.clone();
944 let path_string = path.to_string_lossy().to_string();
945 cx.spawn(|this, mut cx| async move {
946 let response = rpc
947 .request(proto::OpenBufferByPath {
948 project_id,
949 worktree_id: remote_worktree_id.to_proto(),
950 path: path_string,
951 })
952 .await?;
953 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
954 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
955 .await
956 })
957 }
958
959 fn open_local_buffer_via_lsp(
960 &mut self,
961 abs_path: lsp::Url,
962 lsp_adapter: Arc<dyn LspAdapter>,
963 lsp_server: Arc<LanguageServer>,
964 cx: &mut ModelContext<Self>,
965 ) -> Task<Result<ModelHandle<Buffer>>> {
966 cx.spawn(|this, mut cx| async move {
967 let abs_path = abs_path
968 .to_file_path()
969 .map_err(|_| anyhow!("can't convert URI to path"))?;
970 let (worktree, relative_path) = if let Some(result) =
971 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
972 {
973 result
974 } else {
975 let worktree = this
976 .update(&mut cx, |this, cx| {
977 this.create_local_worktree(&abs_path, false, cx)
978 })
979 .await?;
980 this.update(&mut cx, |this, cx| {
981 this.language_servers.insert(
982 (worktree.read(cx).id(), lsp_adapter.name()),
983 (lsp_adapter, lsp_server),
984 );
985 });
986 (worktree, PathBuf::new())
987 };
988
989 let project_path = ProjectPath {
990 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
991 path: relative_path.into(),
992 };
993 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
994 .await
995 })
996 }
997
998 pub fn open_buffer_by_id(
999 &mut self,
1000 id: u64,
1001 cx: &mut ModelContext<Self>,
1002 ) -> Task<Result<ModelHandle<Buffer>>> {
1003 if let Some(buffer) = self.buffer_for_id(id, cx) {
1004 Task::ready(Ok(buffer))
1005 } else if self.is_local() {
1006 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1007 } else if let Some(project_id) = self.remote_id() {
1008 let request = self
1009 .client
1010 .request(proto::OpenBufferById { project_id, id });
1011 cx.spawn(|this, mut cx| async move {
1012 let buffer = request
1013 .await?
1014 .buffer
1015 .ok_or_else(|| anyhow!("invalid buffer"))?;
1016 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1017 .await
1018 })
1019 } else {
1020 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1021 }
1022 }
1023
1024 pub fn save_buffer_as(
1025 &mut self,
1026 buffer: ModelHandle<Buffer>,
1027 abs_path: PathBuf,
1028 cx: &mut ModelContext<Project>,
1029 ) -> Task<Result<()>> {
1030 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1031 cx.spawn(|this, mut cx| async move {
1032 let (worktree, path) = worktree_task.await?;
1033 worktree
1034 .update(&mut cx, |worktree, cx| {
1035 worktree
1036 .as_local_mut()
1037 .unwrap()
1038 .save_buffer_as(buffer.clone(), path, cx)
1039 })
1040 .await?;
1041 this.update(&mut cx, |this, cx| {
1042 this.assign_language_to_buffer(&buffer, cx);
1043 this.register_buffer_with_language_server(&buffer, cx);
1044 });
1045 Ok(())
1046 })
1047 }
1048
1049 pub fn get_open_buffer(
1050 &mut self,
1051 path: &ProjectPath,
1052 cx: &mut ModelContext<Self>,
1053 ) -> Option<ModelHandle<Buffer>> {
1054 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1055 self.opened_buffers.values().find_map(|buffer| {
1056 let buffer = buffer.upgrade(cx)?;
1057 let file = File::from_dyn(buffer.read(cx).file())?;
1058 if file.worktree == worktree && file.path() == &path.path {
1059 Some(buffer)
1060 } else {
1061 None
1062 }
1063 })
1064 }
1065
1066 fn register_buffer(
1067 &mut self,
1068 buffer: &ModelHandle<Buffer>,
1069 cx: &mut ModelContext<Self>,
1070 ) -> Result<()> {
1071 let remote_id = buffer.read(cx).remote_id();
1072 let open_buffer = if self.is_remote() || self.is_shared() {
1073 OpenBuffer::Strong(buffer.clone())
1074 } else {
1075 OpenBuffer::Weak(buffer.downgrade())
1076 };
1077
1078 match self.opened_buffers.insert(remote_id, open_buffer) {
1079 None => {}
1080 Some(OpenBuffer::Loading(operations)) => {
1081 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1082 }
1083 Some(OpenBuffer::Weak(existing_handle)) => {
1084 if existing_handle.upgrade(cx).is_some() {
1085 Err(anyhow!(
1086 "already registered buffer with remote id {}",
1087 remote_id
1088 ))?
1089 }
1090 }
1091 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1092 "already registered buffer with remote id {}",
1093 remote_id
1094 ))?,
1095 }
1096 cx.subscribe(buffer, |this, buffer, event, cx| {
1097 this.on_buffer_event(buffer, event, cx);
1098 })
1099 .detach();
1100
1101 self.assign_language_to_buffer(buffer, cx);
1102 self.register_buffer_with_language_server(buffer, cx);
1103
1104 Ok(())
1105 }
1106
1107 fn register_buffer_with_language_server(
1108 &mut self,
1109 buffer_handle: &ModelHandle<Buffer>,
1110 cx: &mut ModelContext<Self>,
1111 ) {
1112 let buffer = buffer_handle.read(cx);
1113 let buffer_id = buffer.remote_id();
1114 if let Some(file) = File::from_dyn(buffer.file()) {
1115 if file.is_local() {
1116 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1117 let initial_snapshot = buffer.text_snapshot();
1118 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1119
1120 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1121 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1122 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1123 .log_err();
1124 }
1125 }
1126
1127 if let Some((_, server)) = language_server {
1128 server
1129 .notify::<lsp::notification::DidOpenTextDocument>(
1130 lsp::DidOpenTextDocumentParams {
1131 text_document: lsp::TextDocumentItem::new(
1132 uri,
1133 Default::default(),
1134 0,
1135 initial_snapshot.text(),
1136 ),
1137 }
1138 .clone(),
1139 )
1140 .log_err();
1141 buffer_handle.update(cx, |buffer, cx| {
1142 buffer.set_completion_triggers(
1143 server
1144 .capabilities()
1145 .completion_provider
1146 .as_ref()
1147 .and_then(|provider| provider.trigger_characters.clone())
1148 .unwrap_or(Vec::new()),
1149 cx,
1150 )
1151 });
1152 self.buffer_snapshots
1153 .insert(buffer_id, vec![(0, initial_snapshot)]);
1154 }
1155
1156 cx.observe_release(buffer_handle, |this, buffer, cx| {
1157 if let Some(file) = File::from_dyn(buffer.file()) {
1158 if file.is_local() {
1159 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1160 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1161 server
1162 .notify::<lsp::notification::DidCloseTextDocument>(
1163 lsp::DidCloseTextDocumentParams {
1164 text_document: lsp::TextDocumentIdentifier::new(
1165 uri.clone(),
1166 ),
1167 },
1168 )
1169 .log_err();
1170 }
1171 }
1172 }
1173 })
1174 .detach();
1175 }
1176 }
1177 }
1178
1179 fn on_buffer_event(
1180 &mut self,
1181 buffer: ModelHandle<Buffer>,
1182 event: &BufferEvent,
1183 cx: &mut ModelContext<Self>,
1184 ) -> Option<()> {
1185 match event {
1186 BufferEvent::Operation(operation) => {
1187 let project_id = self.remote_id()?;
1188 let request = self.client.request(proto::UpdateBuffer {
1189 project_id,
1190 buffer_id: buffer.read(cx).remote_id(),
1191 operations: vec![language::proto::serialize_operation(&operation)],
1192 });
1193 cx.background().spawn(request).detach_and_log_err(cx);
1194 }
1195 BufferEvent::Edited { .. } => {
1196 let (_, language_server) = self
1197 .language_server_for_buffer(buffer.read(cx), cx)?
1198 .clone();
1199 let buffer = buffer.read(cx);
1200 let file = File::from_dyn(buffer.file())?;
1201 let abs_path = file.as_local()?.abs_path(cx);
1202 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1203 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1204 let (version, prev_snapshot) = buffer_snapshots.last()?;
1205 let next_snapshot = buffer.text_snapshot();
1206 let next_version = version + 1;
1207
1208 let content_changes = buffer
1209 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1210 .map(|edit| {
1211 let edit_start = edit.new.start.0;
1212 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1213 let new_text = next_snapshot
1214 .text_for_range(edit.new.start.1..edit.new.end.1)
1215 .collect();
1216 lsp::TextDocumentContentChangeEvent {
1217 range: Some(lsp::Range::new(
1218 edit_start.to_lsp_position(),
1219 edit_end.to_lsp_position(),
1220 )),
1221 range_length: None,
1222 text: new_text,
1223 }
1224 })
1225 .collect();
1226
1227 buffer_snapshots.push((next_version, next_snapshot));
1228
1229 language_server
1230 .notify::<lsp::notification::DidChangeTextDocument>(
1231 lsp::DidChangeTextDocumentParams {
1232 text_document: lsp::VersionedTextDocumentIdentifier::new(
1233 uri,
1234 next_version,
1235 ),
1236 content_changes,
1237 },
1238 )
1239 .log_err();
1240 }
1241 BufferEvent::Saved => {
1242 let file = File::from_dyn(buffer.read(cx).file())?;
1243 let worktree_id = file.worktree_id(cx);
1244 let abs_path = file.as_local()?.abs_path(cx);
1245 let text_document = lsp::TextDocumentIdentifier {
1246 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1247 };
1248
1249 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1250 server
1251 .notify::<lsp::notification::DidSaveTextDocument>(
1252 lsp::DidSaveTextDocumentParams {
1253 text_document: text_document.clone(),
1254 text: None,
1255 },
1256 )
1257 .log_err();
1258 }
1259 }
1260 _ => {}
1261 }
1262
1263 None
1264 }
1265
1266 fn language_servers_for_worktree(
1267 &self,
1268 worktree_id: WorktreeId,
1269 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1270 self.language_servers.iter().filter_map(
1271 move |((language_server_worktree_id, _), server)| {
1272 if *language_server_worktree_id == worktree_id {
1273 Some(server)
1274 } else {
1275 None
1276 }
1277 },
1278 )
1279 }
1280
1281 fn assign_language_to_buffer(
1282 &mut self,
1283 buffer: &ModelHandle<Buffer>,
1284 cx: &mut ModelContext<Self>,
1285 ) -> Option<()> {
1286 // If the buffer has a language, set it and start the language server if we haven't already.
1287 let full_path = buffer.read(cx).file()?.full_path(cx);
1288 let language = self.languages.select_language(&full_path)?;
1289 buffer.update(cx, |buffer, cx| {
1290 buffer.set_language(Some(language.clone()), cx);
1291 });
1292
1293 let file = File::from_dyn(buffer.read(cx).file())?;
1294 let worktree = file.worktree.read(cx).as_local()?;
1295 let worktree_id = worktree.id();
1296 let worktree_abs_path = worktree.abs_path().clone();
1297 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1298
1299 None
1300 }
1301
1302 fn start_language_server(
1303 &mut self,
1304 worktree_id: WorktreeId,
1305 worktree_path: Arc<Path>,
1306 language: Arc<Language>,
1307 cx: &mut ModelContext<Self>,
1308 ) {
1309 let adapter = if let Some(adapter) = language.lsp_adapter() {
1310 adapter
1311 } else {
1312 return;
1313 };
1314 let key = (worktree_id, adapter.name());
1315 self.started_language_servers
1316 .entry(key.clone())
1317 .or_insert_with(|| {
1318 let server_id = post_inc(&mut self.next_language_server_id);
1319 let language_server = self.languages.start_language_server(
1320 server_id,
1321 language.clone(),
1322 worktree_path,
1323 self.client.http_client(),
1324 cx,
1325 );
1326 cx.spawn_weak(|this, mut cx| async move {
1327 let mut language_server = language_server?.await.log_err()?;
1328 let this = this.upgrade(&cx)?;
1329 let (language_server_events_tx, language_server_events_rx) =
1330 smol::channel::unbounded();
1331
1332 language_server
1333 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1334 let language_server_events_tx = language_server_events_tx.clone();
1335 move |params| {
1336 language_server_events_tx
1337 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1338 .ok();
1339 }
1340 })
1341 .detach();
1342
1343 language_server
1344 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1345 let settings = this
1346 .read_with(&cx, |this, _| this.language_server_settings.clone());
1347 move |params| {
1348 let settings = settings.lock();
1349 Ok(params
1350 .items
1351 .into_iter()
1352 .map(|item| {
1353 if let Some(section) = &item.section {
1354 settings
1355 .get(section)
1356 .cloned()
1357 .unwrap_or(serde_json::Value::Null)
1358 } else {
1359 settings.clone()
1360 }
1361 })
1362 .collect())
1363 }
1364 })
1365 .detach();
1366
1367 language_server
1368 .on_notification::<lsp::notification::Progress, _>(move |params| {
1369 let token = match params.token {
1370 lsp::NumberOrString::String(token) => token,
1371 lsp::NumberOrString::Number(token) => {
1372 log::info!("skipping numeric progress token {}", token);
1373 return;
1374 }
1375 };
1376
1377 match params.value {
1378 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1379 lsp::WorkDoneProgress::Begin(_) => {
1380 language_server_events_tx
1381 .try_send(LanguageServerEvent::WorkStart { token })
1382 .ok();
1383 }
1384 lsp::WorkDoneProgress::Report(report) => {
1385 language_server_events_tx
1386 .try_send(LanguageServerEvent::WorkProgress {
1387 token,
1388 progress: LanguageServerProgress {
1389 message: report.message,
1390 percentage: report
1391 .percentage
1392 .map(|p| p as usize),
1393 last_update_at: Instant::now(),
1394 },
1395 })
1396 .ok();
1397 }
1398 lsp::WorkDoneProgress::End(_) => {
1399 language_server_events_tx
1400 .try_send(LanguageServerEvent::WorkEnd { token })
1401 .ok();
1402 }
1403 },
1404 }
1405 })
1406 .detach();
1407
1408 // Process all the LSP events.
1409 cx.spawn(|mut cx| {
1410 let this = this.downgrade();
1411 async move {
1412 while let Ok(event) = language_server_events_rx.recv().await {
1413 let this = this.upgrade(&cx)?;
1414 this.update(&mut cx, |this, cx| {
1415 this.on_lsp_event(server_id, event, &language, cx)
1416 });
1417
1418 // Don't starve the main thread when lots of events arrive all at once.
1419 smol::future::yield_now().await;
1420 }
1421 Some(())
1422 }
1423 })
1424 .detach();
1425
1426 let language_server = language_server.initialize().await.log_err()?;
1427 this.update(&mut cx, |this, cx| {
1428 this.language_servers
1429 .insert(key.clone(), (adapter, language_server.clone()));
1430 this.language_server_statuses.insert(
1431 server_id,
1432 LanguageServerStatus {
1433 name: language_server.name().to_string(),
1434 pending_work: Default::default(),
1435 pending_diagnostic_updates: 0,
1436 },
1437 );
1438 language_server
1439 .notify::<lsp::notification::DidChangeConfiguration>(
1440 lsp::DidChangeConfigurationParams {
1441 settings: this.language_server_settings.lock().clone(),
1442 },
1443 )
1444 .ok();
1445
1446 if let Some(project_id) = this.remote_id() {
1447 this.client
1448 .send(proto::StartLanguageServer {
1449 project_id,
1450 server: Some(proto::LanguageServer {
1451 id: server_id as u64,
1452 name: language_server.name().to_string(),
1453 }),
1454 })
1455 .log_err();
1456 }
1457
1458 // Tell the language server about every open buffer in the worktree that matches the language.
1459 for buffer in this.opened_buffers.values() {
1460 if let Some(buffer_handle) = buffer.upgrade(cx) {
1461 let buffer = buffer_handle.read(cx);
1462 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1463 file
1464 } else {
1465 continue;
1466 };
1467 let language = if let Some(language) = buffer.language() {
1468 language
1469 } else {
1470 continue;
1471 };
1472 if file.worktree.read(cx).id() != key.0
1473 || language.lsp_adapter().map(|a| a.name())
1474 != Some(key.1.clone())
1475 {
1476 continue;
1477 }
1478
1479 let file = file.as_local()?;
1480 let versions = this
1481 .buffer_snapshots
1482 .entry(buffer.remote_id())
1483 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1484 let (version, initial_snapshot) = versions.last().unwrap();
1485 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1486 language_server
1487 .notify::<lsp::notification::DidOpenTextDocument>(
1488 lsp::DidOpenTextDocumentParams {
1489 text_document: lsp::TextDocumentItem::new(
1490 uri,
1491 Default::default(),
1492 *version,
1493 initial_snapshot.text(),
1494 ),
1495 },
1496 )
1497 .log_err()?;
1498 buffer_handle.update(cx, |buffer, cx| {
1499 buffer.set_completion_triggers(
1500 language_server
1501 .capabilities()
1502 .completion_provider
1503 .as_ref()
1504 .and_then(|provider| {
1505 provider.trigger_characters.clone()
1506 })
1507 .unwrap_or(Vec::new()),
1508 cx,
1509 )
1510 });
1511 }
1512 }
1513
1514 cx.notify();
1515 Some(())
1516 });
1517
1518 Some(language_server)
1519 })
1520 });
1521 }
1522
1523 pub fn restart_language_servers_for_buffers(
1524 &mut self,
1525 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1526 cx: &mut ModelContext<Self>,
1527 ) -> Option<()> {
1528 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1529 .into_iter()
1530 .filter_map(|buffer| {
1531 let file = File::from_dyn(buffer.read(cx).file())?;
1532 let worktree = file.worktree.read(cx).as_local()?;
1533 let worktree_id = worktree.id();
1534 let worktree_abs_path = worktree.abs_path().clone();
1535 let full_path = file.full_path(cx);
1536 Some((worktree_id, worktree_abs_path, full_path))
1537 })
1538 .collect();
1539 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1540 let language = self.languages.select_language(&full_path)?;
1541 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1542 }
1543
1544 None
1545 }
1546
1547 fn restart_language_server(
1548 &mut self,
1549 worktree_id: WorktreeId,
1550 worktree_path: Arc<Path>,
1551 language: Arc<Language>,
1552 cx: &mut ModelContext<Self>,
1553 ) {
1554 let adapter = if let Some(adapter) = language.lsp_adapter() {
1555 adapter
1556 } else {
1557 return;
1558 };
1559 let key = (worktree_id, adapter.name());
1560 let server_to_shutdown = self.language_servers.remove(&key);
1561 self.started_language_servers.remove(&key);
1562 server_to_shutdown
1563 .as_ref()
1564 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1565 cx.spawn_weak(|this, mut cx| async move {
1566 if let Some(this) = this.upgrade(&cx) {
1567 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1568 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1569 shutdown_task.await;
1570 }
1571 }
1572
1573 this.update(&mut cx, |this, cx| {
1574 this.start_language_server(worktree_id, worktree_path, language, cx);
1575 });
1576 }
1577 })
1578 .detach();
1579 }
1580
1581 fn on_lsp_event(
1582 &mut self,
1583 language_server_id: usize,
1584 event: LanguageServerEvent,
1585 language: &Arc<Language>,
1586 cx: &mut ModelContext<Self>,
1587 ) {
1588 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1589 let language_server_status =
1590 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1591 status
1592 } else {
1593 return;
1594 };
1595
1596 match event {
1597 LanguageServerEvent::WorkStart { token } => {
1598 if Some(token.as_str()) == disk_diagnostics_token {
1599 language_server_status.pending_diagnostic_updates += 1;
1600 if language_server_status.pending_diagnostic_updates == 1 {
1601 self.disk_based_diagnostics_started(cx);
1602 self.broadcast_language_server_update(
1603 language_server_id,
1604 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1605 proto::LspDiskBasedDiagnosticsUpdating {},
1606 ),
1607 );
1608 }
1609 } else {
1610 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1611 self.broadcast_language_server_update(
1612 language_server_id,
1613 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1614 token,
1615 }),
1616 );
1617 }
1618 }
1619 LanguageServerEvent::WorkProgress { token, progress } => {
1620 if Some(token.as_str()) != disk_diagnostics_token {
1621 self.on_lsp_work_progress(
1622 language_server_id,
1623 token.clone(),
1624 progress.clone(),
1625 cx,
1626 );
1627 self.broadcast_language_server_update(
1628 language_server_id,
1629 proto::update_language_server::Variant::WorkProgress(
1630 proto::LspWorkProgress {
1631 token,
1632 message: progress.message,
1633 percentage: progress.percentage.map(|p| p as u32),
1634 },
1635 ),
1636 );
1637 }
1638 }
1639 LanguageServerEvent::WorkEnd { token } => {
1640 if Some(token.as_str()) == disk_diagnostics_token {
1641 language_server_status.pending_diagnostic_updates -= 1;
1642 if language_server_status.pending_diagnostic_updates == 0 {
1643 self.disk_based_diagnostics_finished(cx);
1644 self.broadcast_language_server_update(
1645 language_server_id,
1646 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1647 proto::LspDiskBasedDiagnosticsUpdated {},
1648 ),
1649 );
1650 }
1651 } else {
1652 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1653 self.broadcast_language_server_update(
1654 language_server_id,
1655 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1656 token,
1657 }),
1658 );
1659 }
1660 }
1661 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1662 language.process_diagnostics(&mut params);
1663
1664 if disk_diagnostics_token.is_none() {
1665 self.disk_based_diagnostics_started(cx);
1666 self.broadcast_language_server_update(
1667 language_server_id,
1668 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1669 proto::LspDiskBasedDiagnosticsUpdating {},
1670 ),
1671 );
1672 }
1673 self.update_diagnostics(params, language.disk_based_diagnostic_sources(), cx)
1674 .log_err();
1675 if disk_diagnostics_token.is_none() {
1676 self.disk_based_diagnostics_finished(cx);
1677 self.broadcast_language_server_update(
1678 language_server_id,
1679 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1680 proto::LspDiskBasedDiagnosticsUpdated {},
1681 ),
1682 );
1683 }
1684 }
1685 }
1686 }
1687
1688 fn on_lsp_work_start(
1689 &mut self,
1690 language_server_id: usize,
1691 token: String,
1692 cx: &mut ModelContext<Self>,
1693 ) {
1694 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1695 status.pending_work.insert(
1696 token,
1697 LanguageServerProgress {
1698 message: None,
1699 percentage: None,
1700 last_update_at: Instant::now(),
1701 },
1702 );
1703 cx.notify();
1704 }
1705 }
1706
1707 fn on_lsp_work_progress(
1708 &mut self,
1709 language_server_id: usize,
1710 token: String,
1711 progress: LanguageServerProgress,
1712 cx: &mut ModelContext<Self>,
1713 ) {
1714 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1715 status.pending_work.insert(token, progress);
1716 cx.notify();
1717 }
1718 }
1719
1720 fn on_lsp_work_end(
1721 &mut self,
1722 language_server_id: usize,
1723 token: String,
1724 cx: &mut ModelContext<Self>,
1725 ) {
1726 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1727 status.pending_work.remove(&token);
1728 cx.notify();
1729 }
1730 }
1731
1732 fn broadcast_language_server_update(
1733 &self,
1734 language_server_id: usize,
1735 event: proto::update_language_server::Variant,
1736 ) {
1737 if let Some(project_id) = self.remote_id() {
1738 self.client
1739 .send(proto::UpdateLanguageServer {
1740 project_id,
1741 language_server_id: language_server_id as u64,
1742 variant: Some(event),
1743 })
1744 .log_err();
1745 }
1746 }
1747
1748 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1749 for (_, server) in self.language_servers.values() {
1750 server
1751 .notify::<lsp::notification::DidChangeConfiguration>(
1752 lsp::DidChangeConfigurationParams {
1753 settings: settings.clone(),
1754 },
1755 )
1756 .ok();
1757 }
1758 *self.language_server_settings.lock() = settings;
1759 }
1760
1761 pub fn language_server_statuses(
1762 &self,
1763 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1764 self.language_server_statuses.values()
1765 }
1766
1767 pub fn update_diagnostics(
1768 &mut self,
1769 params: lsp::PublishDiagnosticsParams,
1770 disk_based_sources: &[&str],
1771 cx: &mut ModelContext<Self>,
1772 ) -> Result<()> {
1773 let abs_path = params
1774 .uri
1775 .to_file_path()
1776 .map_err(|_| anyhow!("URI is not a file"))?;
1777 let mut next_group_id = 0;
1778 let mut diagnostics = Vec::default();
1779 let mut primary_diagnostic_group_ids = HashMap::default();
1780 let mut sources_by_group_id = HashMap::default();
1781 let mut supporting_diagnostics = HashMap::default();
1782 for diagnostic in ¶ms.diagnostics {
1783 let source = diagnostic.source.as_ref();
1784 let code = diagnostic.code.as_ref().map(|code| match code {
1785 lsp::NumberOrString::Number(code) => code.to_string(),
1786 lsp::NumberOrString::String(code) => code.clone(),
1787 });
1788 let range = range_from_lsp(diagnostic.range);
1789 let is_supporting = diagnostic
1790 .related_information
1791 .as_ref()
1792 .map_or(false, |infos| {
1793 infos.iter().any(|info| {
1794 primary_diagnostic_group_ids.contains_key(&(
1795 source,
1796 code.clone(),
1797 range_from_lsp(info.location.range),
1798 ))
1799 })
1800 });
1801
1802 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1803 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1804 });
1805
1806 if is_supporting {
1807 supporting_diagnostics.insert(
1808 (source, code.clone(), range),
1809 (diagnostic.severity, is_unnecessary),
1810 );
1811 } else {
1812 let group_id = post_inc(&mut next_group_id);
1813 let is_disk_based = source.map_or(false, |source| {
1814 disk_based_sources.contains(&source.as_str())
1815 });
1816
1817 sources_by_group_id.insert(group_id, source);
1818 primary_diagnostic_group_ids
1819 .insert((source, code.clone(), range.clone()), group_id);
1820
1821 diagnostics.push(DiagnosticEntry {
1822 range,
1823 diagnostic: Diagnostic {
1824 code: code.clone(),
1825 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1826 message: diagnostic.message.clone(),
1827 group_id,
1828 is_primary: true,
1829 is_valid: true,
1830 is_disk_based,
1831 is_unnecessary,
1832 },
1833 });
1834 if let Some(infos) = &diagnostic.related_information {
1835 for info in infos {
1836 if info.location.uri == params.uri && !info.message.is_empty() {
1837 let range = range_from_lsp(info.location.range);
1838 diagnostics.push(DiagnosticEntry {
1839 range,
1840 diagnostic: Diagnostic {
1841 code: code.clone(),
1842 severity: DiagnosticSeverity::INFORMATION,
1843 message: info.message.clone(),
1844 group_id,
1845 is_primary: false,
1846 is_valid: true,
1847 is_disk_based,
1848 is_unnecessary: false,
1849 },
1850 });
1851 }
1852 }
1853 }
1854 }
1855 }
1856
1857 for entry in &mut diagnostics {
1858 let diagnostic = &mut entry.diagnostic;
1859 if !diagnostic.is_primary {
1860 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1861 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1862 source,
1863 diagnostic.code.clone(),
1864 entry.range.clone(),
1865 )) {
1866 if let Some(severity) = severity {
1867 diagnostic.severity = severity;
1868 }
1869 diagnostic.is_unnecessary = is_unnecessary;
1870 }
1871 }
1872 }
1873
1874 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1875 Ok(())
1876 }
1877
1878 pub fn update_diagnostic_entries(
1879 &mut self,
1880 abs_path: PathBuf,
1881 version: Option<i32>,
1882 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1883 cx: &mut ModelContext<Project>,
1884 ) -> Result<(), anyhow::Error> {
1885 let (worktree, relative_path) = self
1886 .find_local_worktree(&abs_path, cx)
1887 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1888 if !worktree.read(cx).is_visible() {
1889 return Ok(());
1890 }
1891
1892 let project_path = ProjectPath {
1893 worktree_id: worktree.read(cx).id(),
1894 path: relative_path.into(),
1895 };
1896
1897 for buffer in self.opened_buffers.values() {
1898 if let Some(buffer) = buffer.upgrade(cx) {
1899 if buffer
1900 .read(cx)
1901 .file()
1902 .map_or(false, |file| *file.path() == project_path.path)
1903 {
1904 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1905 break;
1906 }
1907 }
1908 }
1909 worktree.update(cx, |worktree, cx| {
1910 worktree
1911 .as_local_mut()
1912 .ok_or_else(|| anyhow!("not a local worktree"))?
1913 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1914 })?;
1915 cx.emit(Event::DiagnosticsUpdated(project_path));
1916 Ok(())
1917 }
1918
1919 fn update_buffer_diagnostics(
1920 &mut self,
1921 buffer: &ModelHandle<Buffer>,
1922 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1923 version: Option<i32>,
1924 cx: &mut ModelContext<Self>,
1925 ) -> Result<()> {
1926 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1927 Ordering::Equal
1928 .then_with(|| b.is_primary.cmp(&a.is_primary))
1929 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1930 .then_with(|| a.severity.cmp(&b.severity))
1931 .then_with(|| a.message.cmp(&b.message))
1932 }
1933
1934 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1935
1936 diagnostics.sort_unstable_by(|a, b| {
1937 Ordering::Equal
1938 .then_with(|| a.range.start.cmp(&b.range.start))
1939 .then_with(|| b.range.end.cmp(&a.range.end))
1940 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1941 });
1942
1943 let mut sanitized_diagnostics = Vec::new();
1944 let edits_since_save = Patch::new(
1945 snapshot
1946 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1947 .collect(),
1948 );
1949 for entry in diagnostics {
1950 let start;
1951 let end;
1952 if entry.diagnostic.is_disk_based {
1953 // Some diagnostics are based on files on disk instead of buffers'
1954 // current contents. Adjust these diagnostics' ranges to reflect
1955 // any unsaved edits.
1956 start = edits_since_save.old_to_new(entry.range.start);
1957 end = edits_since_save.old_to_new(entry.range.end);
1958 } else {
1959 start = entry.range.start;
1960 end = entry.range.end;
1961 }
1962
1963 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1964 ..snapshot.clip_point_utf16(end, Bias::Right);
1965
1966 // Expand empty ranges by one character
1967 if range.start == range.end {
1968 range.end.column += 1;
1969 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1970 if range.start == range.end && range.end.column > 0 {
1971 range.start.column -= 1;
1972 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1973 }
1974 }
1975
1976 sanitized_diagnostics.push(DiagnosticEntry {
1977 range,
1978 diagnostic: entry.diagnostic,
1979 });
1980 }
1981 drop(edits_since_save);
1982
1983 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1984 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1985 Ok(())
1986 }
1987
1988 pub fn format(
1989 &self,
1990 buffers: HashSet<ModelHandle<Buffer>>,
1991 push_to_history: bool,
1992 cx: &mut ModelContext<Project>,
1993 ) -> Task<Result<ProjectTransaction>> {
1994 let mut local_buffers = Vec::new();
1995 let mut remote_buffers = None;
1996 for buffer_handle in buffers {
1997 let buffer = buffer_handle.read(cx);
1998 if let Some(file) = File::from_dyn(buffer.file()) {
1999 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2000 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2001 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2002 }
2003 } else {
2004 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2005 }
2006 } else {
2007 return Task::ready(Ok(Default::default()));
2008 }
2009 }
2010
2011 let remote_buffers = self.remote_id().zip(remote_buffers);
2012 let client = self.client.clone();
2013
2014 cx.spawn(|this, mut cx| async move {
2015 let mut project_transaction = ProjectTransaction::default();
2016
2017 if let Some((project_id, remote_buffers)) = remote_buffers {
2018 let response = client
2019 .request(proto::FormatBuffers {
2020 project_id,
2021 buffer_ids: remote_buffers
2022 .iter()
2023 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2024 .collect(),
2025 })
2026 .await?
2027 .transaction
2028 .ok_or_else(|| anyhow!("missing transaction"))?;
2029 project_transaction = this
2030 .update(&mut cx, |this, cx| {
2031 this.deserialize_project_transaction(response, push_to_history, cx)
2032 })
2033 .await?;
2034 }
2035
2036 for (buffer, buffer_abs_path, language_server) in local_buffers {
2037 let text_document = lsp::TextDocumentIdentifier::new(
2038 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2039 );
2040 let capabilities = &language_server.capabilities();
2041 let lsp_edits = if capabilities
2042 .document_formatting_provider
2043 .as_ref()
2044 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2045 {
2046 language_server
2047 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2048 text_document,
2049 options: lsp::FormattingOptions {
2050 tab_size: 4,
2051 insert_spaces: true,
2052 insert_final_newline: Some(true),
2053 ..Default::default()
2054 },
2055 work_done_progress_params: Default::default(),
2056 })
2057 .await?
2058 } else if capabilities
2059 .document_range_formatting_provider
2060 .as_ref()
2061 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2062 {
2063 let buffer_start = lsp::Position::new(0, 0);
2064 let buffer_end = buffer
2065 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
2066 .to_lsp_position();
2067 language_server
2068 .request::<lsp::request::RangeFormatting>(
2069 lsp::DocumentRangeFormattingParams {
2070 text_document,
2071 range: lsp::Range::new(buffer_start, buffer_end),
2072 options: lsp::FormattingOptions {
2073 tab_size: 4,
2074 insert_spaces: true,
2075 insert_final_newline: Some(true),
2076 ..Default::default()
2077 },
2078 work_done_progress_params: Default::default(),
2079 },
2080 )
2081 .await?
2082 } else {
2083 continue;
2084 };
2085
2086 if let Some(lsp_edits) = lsp_edits {
2087 let edits = this
2088 .update(&mut cx, |this, cx| {
2089 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2090 })
2091 .await?;
2092 buffer.update(&mut cx, |buffer, cx| {
2093 buffer.finalize_last_transaction();
2094 buffer.start_transaction();
2095 for (range, text) in edits {
2096 buffer.edit([range], text, cx);
2097 }
2098 if buffer.end_transaction(cx).is_some() {
2099 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2100 if !push_to_history {
2101 buffer.forget_transaction(transaction.id);
2102 }
2103 project_transaction.0.insert(cx.handle(), transaction);
2104 }
2105 });
2106 }
2107 }
2108
2109 Ok(project_transaction)
2110 })
2111 }
2112
2113 pub fn definition<T: ToPointUtf16>(
2114 &self,
2115 buffer: &ModelHandle<Buffer>,
2116 position: T,
2117 cx: &mut ModelContext<Self>,
2118 ) -> Task<Result<Vec<Location>>> {
2119 let position = position.to_point_utf16(buffer.read(cx));
2120 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2121 }
2122
2123 pub fn references<T: ToPointUtf16>(
2124 &self,
2125 buffer: &ModelHandle<Buffer>,
2126 position: T,
2127 cx: &mut ModelContext<Self>,
2128 ) -> Task<Result<Vec<Location>>> {
2129 let position = position.to_point_utf16(buffer.read(cx));
2130 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2131 }
2132
2133 pub fn document_highlights<T: ToPointUtf16>(
2134 &self,
2135 buffer: &ModelHandle<Buffer>,
2136 position: T,
2137 cx: &mut ModelContext<Self>,
2138 ) -> Task<Result<Vec<DocumentHighlight>>> {
2139 let position = position.to_point_utf16(buffer.read(cx));
2140
2141 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2142 }
2143
2144 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2145 if self.is_local() {
2146 let mut language_servers = HashMap::default();
2147 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2148 if let Some(worktree) = self
2149 .worktree_for_id(*worktree_id, cx)
2150 .and_then(|worktree| worktree.read(cx).as_local())
2151 {
2152 language_servers
2153 .entry(Arc::as_ptr(language_server))
2154 .or_insert((
2155 lsp_adapter.clone(),
2156 language_server.clone(),
2157 *worktree_id,
2158 worktree.abs_path().clone(),
2159 ));
2160 }
2161 }
2162
2163 let mut requests = Vec::new();
2164 for (_, language_server, _, _) in language_servers.values() {
2165 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2166 lsp::WorkspaceSymbolParams {
2167 query: query.to_string(),
2168 ..Default::default()
2169 },
2170 ));
2171 }
2172
2173 cx.spawn_weak(|this, cx| async move {
2174 let responses = futures::future::try_join_all(requests).await?;
2175
2176 let mut symbols = Vec::new();
2177 if let Some(this) = this.upgrade(&cx) {
2178 this.read_with(&cx, |this, cx| {
2179 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2180 language_servers.into_values().zip(responses)
2181 {
2182 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2183 |lsp_symbol| {
2184 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2185 let mut worktree_id = source_worktree_id;
2186 let path;
2187 if let Some((worktree, rel_path)) =
2188 this.find_local_worktree(&abs_path, cx)
2189 {
2190 worktree_id = worktree.read(cx).id();
2191 path = rel_path;
2192 } else {
2193 path = relativize_path(&worktree_abs_path, &abs_path);
2194 }
2195
2196 let label = this
2197 .languages
2198 .select_language(&path)
2199 .and_then(|language| {
2200 language
2201 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2202 })
2203 .unwrap_or_else(|| {
2204 CodeLabel::plain(lsp_symbol.name.clone(), None)
2205 });
2206 let signature = this.symbol_signature(worktree_id, &path);
2207
2208 Some(Symbol {
2209 source_worktree_id,
2210 worktree_id,
2211 language_server_name: adapter.name(),
2212 name: lsp_symbol.name,
2213 kind: lsp_symbol.kind,
2214 label,
2215 path,
2216 range: range_from_lsp(lsp_symbol.location.range),
2217 signature,
2218 })
2219 },
2220 ));
2221 }
2222 })
2223 }
2224
2225 Ok(symbols)
2226 })
2227 } else if let Some(project_id) = self.remote_id() {
2228 let request = self.client.request(proto::GetProjectSymbols {
2229 project_id,
2230 query: query.to_string(),
2231 });
2232 cx.spawn_weak(|this, cx| async move {
2233 let response = request.await?;
2234 let mut symbols = Vec::new();
2235 if let Some(this) = this.upgrade(&cx) {
2236 this.read_with(&cx, |this, _| {
2237 symbols.extend(
2238 response
2239 .symbols
2240 .into_iter()
2241 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2242 );
2243 })
2244 }
2245 Ok(symbols)
2246 })
2247 } else {
2248 Task::ready(Ok(Default::default()))
2249 }
2250 }
2251
2252 pub fn open_buffer_for_symbol(
2253 &mut self,
2254 symbol: &Symbol,
2255 cx: &mut ModelContext<Self>,
2256 ) -> Task<Result<ModelHandle<Buffer>>> {
2257 if self.is_local() {
2258 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2259 symbol.source_worktree_id,
2260 symbol.language_server_name.clone(),
2261 )) {
2262 server.clone()
2263 } else {
2264 return Task::ready(Err(anyhow!(
2265 "language server for worktree and language not found"
2266 )));
2267 };
2268
2269 let worktree_abs_path = if let Some(worktree_abs_path) = self
2270 .worktree_for_id(symbol.worktree_id, cx)
2271 .and_then(|worktree| worktree.read(cx).as_local())
2272 .map(|local_worktree| local_worktree.abs_path())
2273 {
2274 worktree_abs_path
2275 } else {
2276 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2277 };
2278 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2279 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2280 uri
2281 } else {
2282 return Task::ready(Err(anyhow!("invalid symbol path")));
2283 };
2284
2285 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2286 } else if let Some(project_id) = self.remote_id() {
2287 let request = self.client.request(proto::OpenBufferForSymbol {
2288 project_id,
2289 symbol: Some(serialize_symbol(symbol)),
2290 });
2291 cx.spawn(|this, mut cx| async move {
2292 let response = request.await?;
2293 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2294 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2295 .await
2296 })
2297 } else {
2298 Task::ready(Err(anyhow!("project does not have a remote id")))
2299 }
2300 }
2301
2302 pub fn completions<T: ToPointUtf16>(
2303 &self,
2304 source_buffer_handle: &ModelHandle<Buffer>,
2305 position: T,
2306 cx: &mut ModelContext<Self>,
2307 ) -> Task<Result<Vec<Completion>>> {
2308 let source_buffer_handle = source_buffer_handle.clone();
2309 let source_buffer = source_buffer_handle.read(cx);
2310 let buffer_id = source_buffer.remote_id();
2311 let language = source_buffer.language().cloned();
2312 let worktree;
2313 let buffer_abs_path;
2314 if let Some(file) = File::from_dyn(source_buffer.file()) {
2315 worktree = file.worktree.clone();
2316 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2317 } else {
2318 return Task::ready(Ok(Default::default()));
2319 };
2320
2321 let position = position.to_point_utf16(source_buffer);
2322 let anchor = source_buffer.anchor_after(position);
2323
2324 if worktree.read(cx).as_local().is_some() {
2325 let buffer_abs_path = buffer_abs_path.unwrap();
2326 let (_, lang_server) =
2327 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2328 server.clone()
2329 } else {
2330 return Task::ready(Ok(Default::default()));
2331 };
2332
2333 cx.spawn(|_, cx| async move {
2334 let completions = lang_server
2335 .request::<lsp::request::Completion>(lsp::CompletionParams {
2336 text_document_position: lsp::TextDocumentPositionParams::new(
2337 lsp::TextDocumentIdentifier::new(
2338 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2339 ),
2340 position.to_lsp_position(),
2341 ),
2342 context: Default::default(),
2343 work_done_progress_params: Default::default(),
2344 partial_result_params: Default::default(),
2345 })
2346 .await
2347 .context("lsp completion request failed")?;
2348
2349 let completions = if let Some(completions) = completions {
2350 match completions {
2351 lsp::CompletionResponse::Array(completions) => completions,
2352 lsp::CompletionResponse::List(list) => list.items,
2353 }
2354 } else {
2355 Default::default()
2356 };
2357
2358 source_buffer_handle.read_with(&cx, |this, _| {
2359 Ok(completions
2360 .into_iter()
2361 .filter_map(|lsp_completion| {
2362 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2363 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2364 (range_from_lsp(edit.range), edit.new_text.clone())
2365 }
2366 None => (
2367 this.common_prefix_at(position, &lsp_completion.label),
2368 lsp_completion.label.clone(),
2369 ),
2370 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2371 log::info!("unsupported insert/replace completion");
2372 return None;
2373 }
2374 };
2375
2376 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2377 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2378 if clipped_start == old_range.start && clipped_end == old_range.end {
2379 Some(Completion {
2380 old_range: this.anchor_before(old_range.start)
2381 ..this.anchor_after(old_range.end),
2382 new_text,
2383 label: language
2384 .as_ref()
2385 .and_then(|l| l.label_for_completion(&lsp_completion))
2386 .unwrap_or_else(|| {
2387 CodeLabel::plain(
2388 lsp_completion.label.clone(),
2389 lsp_completion.filter_text.as_deref(),
2390 )
2391 }),
2392 lsp_completion,
2393 })
2394 } else {
2395 log::info!("completion out of expected range");
2396 None
2397 }
2398 })
2399 .collect())
2400 })
2401 })
2402 } else if let Some(project_id) = self.remote_id() {
2403 let rpc = self.client.clone();
2404 let message = proto::GetCompletions {
2405 project_id,
2406 buffer_id,
2407 position: Some(language::proto::serialize_anchor(&anchor)),
2408 version: serialize_version(&source_buffer.version()),
2409 };
2410 cx.spawn_weak(|_, mut cx| async move {
2411 let response = rpc.request(message).await?;
2412
2413 source_buffer_handle
2414 .update(&mut cx, |buffer, _| {
2415 buffer.wait_for_version(deserialize_version(response.version))
2416 })
2417 .await;
2418
2419 response
2420 .completions
2421 .into_iter()
2422 .map(|completion| {
2423 language::proto::deserialize_completion(completion, language.as_ref())
2424 })
2425 .collect()
2426 })
2427 } else {
2428 Task::ready(Ok(Default::default()))
2429 }
2430 }
2431
2432 pub fn apply_additional_edits_for_completion(
2433 &self,
2434 buffer_handle: ModelHandle<Buffer>,
2435 completion: Completion,
2436 push_to_history: bool,
2437 cx: &mut ModelContext<Self>,
2438 ) -> Task<Result<Option<Transaction>>> {
2439 let buffer = buffer_handle.read(cx);
2440 let buffer_id = buffer.remote_id();
2441
2442 if self.is_local() {
2443 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2444 {
2445 server.clone()
2446 } else {
2447 return Task::ready(Ok(Default::default()));
2448 };
2449
2450 cx.spawn(|this, mut cx| async move {
2451 let resolved_completion = lang_server
2452 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2453 .await?;
2454 if let Some(edits) = resolved_completion.additional_text_edits {
2455 let edits = this
2456 .update(&mut cx, |this, cx| {
2457 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2458 })
2459 .await?;
2460 buffer_handle.update(&mut cx, |buffer, cx| {
2461 buffer.finalize_last_transaction();
2462 buffer.start_transaction();
2463 for (range, text) in edits {
2464 buffer.edit([range], text, cx);
2465 }
2466 let transaction = if buffer.end_transaction(cx).is_some() {
2467 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2468 if !push_to_history {
2469 buffer.forget_transaction(transaction.id);
2470 }
2471 Some(transaction)
2472 } else {
2473 None
2474 };
2475 Ok(transaction)
2476 })
2477 } else {
2478 Ok(None)
2479 }
2480 })
2481 } else if let Some(project_id) = self.remote_id() {
2482 let client = self.client.clone();
2483 cx.spawn(|_, mut cx| async move {
2484 let response = client
2485 .request(proto::ApplyCompletionAdditionalEdits {
2486 project_id,
2487 buffer_id,
2488 completion: Some(language::proto::serialize_completion(&completion)),
2489 })
2490 .await?;
2491
2492 if let Some(transaction) = response.transaction {
2493 let transaction = language::proto::deserialize_transaction(transaction)?;
2494 buffer_handle
2495 .update(&mut cx, |buffer, _| {
2496 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2497 })
2498 .await;
2499 if push_to_history {
2500 buffer_handle.update(&mut cx, |buffer, _| {
2501 buffer.push_transaction(transaction.clone(), Instant::now());
2502 });
2503 }
2504 Ok(Some(transaction))
2505 } else {
2506 Ok(None)
2507 }
2508 })
2509 } else {
2510 Task::ready(Err(anyhow!("project does not have a remote id")))
2511 }
2512 }
2513
2514 pub fn code_actions<T: ToOffset>(
2515 &self,
2516 buffer_handle: &ModelHandle<Buffer>,
2517 range: Range<T>,
2518 cx: &mut ModelContext<Self>,
2519 ) -> Task<Result<Vec<CodeAction>>> {
2520 let buffer_handle = buffer_handle.clone();
2521 let buffer = buffer_handle.read(cx);
2522 let buffer_id = buffer.remote_id();
2523 let worktree;
2524 let buffer_abs_path;
2525 if let Some(file) = File::from_dyn(buffer.file()) {
2526 worktree = file.worktree.clone();
2527 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2528 } else {
2529 return Task::ready(Ok(Default::default()));
2530 };
2531 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2532
2533 if worktree.read(cx).as_local().is_some() {
2534 let buffer_abs_path = buffer_abs_path.unwrap();
2535 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2536 {
2537 server.clone()
2538 } else {
2539 return Task::ready(Ok(Default::default()));
2540 };
2541
2542 let lsp_range = lsp::Range::new(
2543 range.start.to_point_utf16(buffer).to_lsp_position(),
2544 range.end.to_point_utf16(buffer).to_lsp_position(),
2545 );
2546 cx.foreground().spawn(async move {
2547 if !lang_server.capabilities().code_action_provider.is_some() {
2548 return Ok(Default::default());
2549 }
2550
2551 Ok(lang_server
2552 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2553 text_document: lsp::TextDocumentIdentifier::new(
2554 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2555 ),
2556 range: lsp_range,
2557 work_done_progress_params: Default::default(),
2558 partial_result_params: Default::default(),
2559 context: lsp::CodeActionContext {
2560 diagnostics: Default::default(),
2561 only: Some(vec![
2562 lsp::CodeActionKind::QUICKFIX,
2563 lsp::CodeActionKind::REFACTOR,
2564 lsp::CodeActionKind::REFACTOR_EXTRACT,
2565 ]),
2566 },
2567 })
2568 .await?
2569 .unwrap_or_default()
2570 .into_iter()
2571 .filter_map(|entry| {
2572 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2573 Some(CodeAction {
2574 range: range.clone(),
2575 lsp_action,
2576 })
2577 } else {
2578 None
2579 }
2580 })
2581 .collect())
2582 })
2583 } else if let Some(project_id) = self.remote_id() {
2584 let rpc = self.client.clone();
2585 let version = buffer.version();
2586 cx.spawn_weak(|_, mut cx| async move {
2587 let response = rpc
2588 .request(proto::GetCodeActions {
2589 project_id,
2590 buffer_id,
2591 start: Some(language::proto::serialize_anchor(&range.start)),
2592 end: Some(language::proto::serialize_anchor(&range.end)),
2593 version: serialize_version(&version),
2594 })
2595 .await?;
2596
2597 buffer_handle
2598 .update(&mut cx, |buffer, _| {
2599 buffer.wait_for_version(deserialize_version(response.version))
2600 })
2601 .await;
2602
2603 response
2604 .actions
2605 .into_iter()
2606 .map(language::proto::deserialize_code_action)
2607 .collect()
2608 })
2609 } else {
2610 Task::ready(Ok(Default::default()))
2611 }
2612 }
2613
2614 pub fn apply_code_action(
2615 &self,
2616 buffer_handle: ModelHandle<Buffer>,
2617 mut action: CodeAction,
2618 push_to_history: bool,
2619 cx: &mut ModelContext<Self>,
2620 ) -> Task<Result<ProjectTransaction>> {
2621 if self.is_local() {
2622 let buffer = buffer_handle.read(cx);
2623 let (lsp_adapter, lang_server) =
2624 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2625 server.clone()
2626 } else {
2627 return Task::ready(Ok(Default::default()));
2628 };
2629 let range = action.range.to_point_utf16(buffer);
2630
2631 cx.spawn(|this, mut cx| async move {
2632 if let Some(lsp_range) = action
2633 .lsp_action
2634 .data
2635 .as_mut()
2636 .and_then(|d| d.get_mut("codeActionParams"))
2637 .and_then(|d| d.get_mut("range"))
2638 {
2639 *lsp_range = serde_json::to_value(&lsp::Range::new(
2640 range.start.to_lsp_position(),
2641 range.end.to_lsp_position(),
2642 ))
2643 .unwrap();
2644 action.lsp_action = lang_server
2645 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2646 .await?;
2647 } else {
2648 let actions = this
2649 .update(&mut cx, |this, cx| {
2650 this.code_actions(&buffer_handle, action.range, cx)
2651 })
2652 .await?;
2653 action.lsp_action = actions
2654 .into_iter()
2655 .find(|a| a.lsp_action.title == action.lsp_action.title)
2656 .ok_or_else(|| anyhow!("code action is outdated"))?
2657 .lsp_action;
2658 }
2659
2660 if let Some(edit) = action.lsp_action.edit {
2661 Self::deserialize_workspace_edit(
2662 this,
2663 edit,
2664 push_to_history,
2665 lsp_adapter,
2666 lang_server,
2667 &mut cx,
2668 )
2669 .await
2670 } else {
2671 Ok(ProjectTransaction::default())
2672 }
2673 })
2674 } else if let Some(project_id) = self.remote_id() {
2675 let client = self.client.clone();
2676 let request = proto::ApplyCodeAction {
2677 project_id,
2678 buffer_id: buffer_handle.read(cx).remote_id(),
2679 action: Some(language::proto::serialize_code_action(&action)),
2680 };
2681 cx.spawn(|this, mut cx| async move {
2682 let response = client
2683 .request(request)
2684 .await?
2685 .transaction
2686 .ok_or_else(|| anyhow!("missing transaction"))?;
2687 this.update(&mut cx, |this, cx| {
2688 this.deserialize_project_transaction(response, push_to_history, cx)
2689 })
2690 .await
2691 })
2692 } else {
2693 Task::ready(Err(anyhow!("project does not have a remote id")))
2694 }
2695 }
2696
2697 async fn deserialize_workspace_edit(
2698 this: ModelHandle<Self>,
2699 edit: lsp::WorkspaceEdit,
2700 push_to_history: bool,
2701 lsp_adapter: Arc<dyn LspAdapter>,
2702 language_server: Arc<LanguageServer>,
2703 cx: &mut AsyncAppContext,
2704 ) -> Result<ProjectTransaction> {
2705 let fs = this.read_with(cx, |this, _| this.fs.clone());
2706 let mut operations = Vec::new();
2707 if let Some(document_changes) = edit.document_changes {
2708 match document_changes {
2709 lsp::DocumentChanges::Edits(edits) => {
2710 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2711 }
2712 lsp::DocumentChanges::Operations(ops) => operations = ops,
2713 }
2714 } else if let Some(changes) = edit.changes {
2715 operations.extend(changes.into_iter().map(|(uri, edits)| {
2716 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2717 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2718 uri,
2719 version: None,
2720 },
2721 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2722 })
2723 }));
2724 }
2725
2726 let mut project_transaction = ProjectTransaction::default();
2727 for operation in operations {
2728 match operation {
2729 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2730 let abs_path = op
2731 .uri
2732 .to_file_path()
2733 .map_err(|_| anyhow!("can't convert URI to path"))?;
2734
2735 if let Some(parent_path) = abs_path.parent() {
2736 fs.create_dir(parent_path).await?;
2737 }
2738 if abs_path.ends_with("/") {
2739 fs.create_dir(&abs_path).await?;
2740 } else {
2741 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2742 .await?;
2743 }
2744 }
2745 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2746 let source_abs_path = op
2747 .old_uri
2748 .to_file_path()
2749 .map_err(|_| anyhow!("can't convert URI to path"))?;
2750 let target_abs_path = op
2751 .new_uri
2752 .to_file_path()
2753 .map_err(|_| anyhow!("can't convert URI to path"))?;
2754 fs.rename(
2755 &source_abs_path,
2756 &target_abs_path,
2757 op.options.map(Into::into).unwrap_or_default(),
2758 )
2759 .await?;
2760 }
2761 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2762 let abs_path = op
2763 .uri
2764 .to_file_path()
2765 .map_err(|_| anyhow!("can't convert URI to path"))?;
2766 let options = op.options.map(Into::into).unwrap_or_default();
2767 if abs_path.ends_with("/") {
2768 fs.remove_dir(&abs_path, options).await?;
2769 } else {
2770 fs.remove_file(&abs_path, options).await?;
2771 }
2772 }
2773 lsp::DocumentChangeOperation::Edit(op) => {
2774 let buffer_to_edit = this
2775 .update(cx, |this, cx| {
2776 this.open_local_buffer_via_lsp(
2777 op.text_document.uri,
2778 lsp_adapter.clone(),
2779 language_server.clone(),
2780 cx,
2781 )
2782 })
2783 .await?;
2784
2785 let edits = this
2786 .update(cx, |this, cx| {
2787 let edits = op.edits.into_iter().map(|edit| match edit {
2788 lsp::OneOf::Left(edit) => edit,
2789 lsp::OneOf::Right(edit) => edit.text_edit,
2790 });
2791 this.edits_from_lsp(
2792 &buffer_to_edit,
2793 edits,
2794 op.text_document.version,
2795 cx,
2796 )
2797 })
2798 .await?;
2799
2800 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2801 buffer.finalize_last_transaction();
2802 buffer.start_transaction();
2803 for (range, text) in edits {
2804 buffer.edit([range], text, cx);
2805 }
2806 let transaction = if buffer.end_transaction(cx).is_some() {
2807 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2808 if !push_to_history {
2809 buffer.forget_transaction(transaction.id);
2810 }
2811 Some(transaction)
2812 } else {
2813 None
2814 };
2815
2816 transaction
2817 });
2818 if let Some(transaction) = transaction {
2819 project_transaction.0.insert(buffer_to_edit, transaction);
2820 }
2821 }
2822 }
2823 }
2824
2825 Ok(project_transaction)
2826 }
2827
2828 pub fn prepare_rename<T: ToPointUtf16>(
2829 &self,
2830 buffer: ModelHandle<Buffer>,
2831 position: T,
2832 cx: &mut ModelContext<Self>,
2833 ) -> Task<Result<Option<Range<Anchor>>>> {
2834 let position = position.to_point_utf16(buffer.read(cx));
2835 self.request_lsp(buffer, PrepareRename { position }, cx)
2836 }
2837
2838 pub fn perform_rename<T: ToPointUtf16>(
2839 &self,
2840 buffer: ModelHandle<Buffer>,
2841 position: T,
2842 new_name: String,
2843 push_to_history: bool,
2844 cx: &mut ModelContext<Self>,
2845 ) -> Task<Result<ProjectTransaction>> {
2846 let position = position.to_point_utf16(buffer.read(cx));
2847 self.request_lsp(
2848 buffer,
2849 PerformRename {
2850 position,
2851 new_name,
2852 push_to_history,
2853 },
2854 cx,
2855 )
2856 }
2857
2858 pub fn search(
2859 &self,
2860 query: SearchQuery,
2861 cx: &mut ModelContext<Self>,
2862 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2863 if self.is_local() {
2864 let snapshots = self
2865 .visible_worktrees(cx)
2866 .filter_map(|tree| {
2867 let tree = tree.read(cx).as_local()?;
2868 Some(tree.snapshot())
2869 })
2870 .collect::<Vec<_>>();
2871
2872 let background = cx.background().clone();
2873 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2874 if path_count == 0 {
2875 return Task::ready(Ok(Default::default()));
2876 }
2877 let workers = background.num_cpus().min(path_count);
2878 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2879 cx.background()
2880 .spawn({
2881 let fs = self.fs.clone();
2882 let background = cx.background().clone();
2883 let query = query.clone();
2884 async move {
2885 let fs = &fs;
2886 let query = &query;
2887 let matching_paths_tx = &matching_paths_tx;
2888 let paths_per_worker = (path_count + workers - 1) / workers;
2889 let snapshots = &snapshots;
2890 background
2891 .scoped(|scope| {
2892 for worker_ix in 0..workers {
2893 let worker_start_ix = worker_ix * paths_per_worker;
2894 let worker_end_ix = worker_start_ix + paths_per_worker;
2895 scope.spawn(async move {
2896 let mut snapshot_start_ix = 0;
2897 let mut abs_path = PathBuf::new();
2898 for snapshot in snapshots {
2899 let snapshot_end_ix =
2900 snapshot_start_ix + snapshot.visible_file_count();
2901 if worker_end_ix <= snapshot_start_ix {
2902 break;
2903 } else if worker_start_ix > snapshot_end_ix {
2904 snapshot_start_ix = snapshot_end_ix;
2905 continue;
2906 } else {
2907 let start_in_snapshot = worker_start_ix
2908 .saturating_sub(snapshot_start_ix);
2909 let end_in_snapshot =
2910 cmp::min(worker_end_ix, snapshot_end_ix)
2911 - snapshot_start_ix;
2912
2913 for entry in snapshot
2914 .files(false, start_in_snapshot)
2915 .take(end_in_snapshot - start_in_snapshot)
2916 {
2917 if matching_paths_tx.is_closed() {
2918 break;
2919 }
2920
2921 abs_path.clear();
2922 abs_path.push(&snapshot.abs_path());
2923 abs_path.push(&entry.path);
2924 let matches = if let Some(file) =
2925 fs.open_sync(&abs_path).await.log_err()
2926 {
2927 query.detect(file).unwrap_or(false)
2928 } else {
2929 false
2930 };
2931
2932 if matches {
2933 let project_path =
2934 (snapshot.id(), entry.path.clone());
2935 if matching_paths_tx
2936 .send(project_path)
2937 .await
2938 .is_err()
2939 {
2940 break;
2941 }
2942 }
2943 }
2944
2945 snapshot_start_ix = snapshot_end_ix;
2946 }
2947 }
2948 });
2949 }
2950 })
2951 .await;
2952 }
2953 })
2954 .detach();
2955
2956 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2957 let open_buffers = self
2958 .opened_buffers
2959 .values()
2960 .filter_map(|b| b.upgrade(cx))
2961 .collect::<HashSet<_>>();
2962 cx.spawn(|this, cx| async move {
2963 for buffer in &open_buffers {
2964 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2965 buffers_tx.send((buffer.clone(), snapshot)).await?;
2966 }
2967
2968 let open_buffers = Rc::new(RefCell::new(open_buffers));
2969 while let Some(project_path) = matching_paths_rx.next().await {
2970 if buffers_tx.is_closed() {
2971 break;
2972 }
2973
2974 let this = this.clone();
2975 let open_buffers = open_buffers.clone();
2976 let buffers_tx = buffers_tx.clone();
2977 cx.spawn(|mut cx| async move {
2978 if let Some(buffer) = this
2979 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2980 .await
2981 .log_err()
2982 {
2983 if open_buffers.borrow_mut().insert(buffer.clone()) {
2984 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2985 buffers_tx.send((buffer, snapshot)).await?;
2986 }
2987 }
2988
2989 Ok::<_, anyhow::Error>(())
2990 })
2991 .detach();
2992 }
2993
2994 Ok::<_, anyhow::Error>(())
2995 })
2996 .detach_and_log_err(cx);
2997
2998 let background = cx.background().clone();
2999 cx.background().spawn(async move {
3000 let query = &query;
3001 let mut matched_buffers = Vec::new();
3002 for _ in 0..workers {
3003 matched_buffers.push(HashMap::default());
3004 }
3005 background
3006 .scoped(|scope| {
3007 for worker_matched_buffers in matched_buffers.iter_mut() {
3008 let mut buffers_rx = buffers_rx.clone();
3009 scope.spawn(async move {
3010 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3011 let buffer_matches = query
3012 .search(snapshot.as_rope())
3013 .await
3014 .iter()
3015 .map(|range| {
3016 snapshot.anchor_before(range.start)
3017 ..snapshot.anchor_after(range.end)
3018 })
3019 .collect::<Vec<_>>();
3020 if !buffer_matches.is_empty() {
3021 worker_matched_buffers
3022 .insert(buffer.clone(), buffer_matches);
3023 }
3024 }
3025 });
3026 }
3027 })
3028 .await;
3029 Ok(matched_buffers.into_iter().flatten().collect())
3030 })
3031 } else if let Some(project_id) = self.remote_id() {
3032 let request = self.client.request(query.to_proto(project_id));
3033 cx.spawn(|this, mut cx| async move {
3034 let response = request.await?;
3035 let mut result = HashMap::default();
3036 for location in response.locations {
3037 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3038 let target_buffer = this
3039 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3040 .await?;
3041 let start = location
3042 .start
3043 .and_then(deserialize_anchor)
3044 .ok_or_else(|| anyhow!("missing target start"))?;
3045 let end = location
3046 .end
3047 .and_then(deserialize_anchor)
3048 .ok_or_else(|| anyhow!("missing target end"))?;
3049 result
3050 .entry(target_buffer)
3051 .or_insert(Vec::new())
3052 .push(start..end)
3053 }
3054 Ok(result)
3055 })
3056 } else {
3057 Task::ready(Ok(Default::default()))
3058 }
3059 }
3060
3061 fn request_lsp<R: LspCommand>(
3062 &self,
3063 buffer_handle: ModelHandle<Buffer>,
3064 request: R,
3065 cx: &mut ModelContext<Self>,
3066 ) -> Task<Result<R::Response>>
3067 where
3068 <R::LspRequest as lsp::request::Request>::Result: Send,
3069 {
3070 let buffer = buffer_handle.read(cx);
3071 if self.is_local() {
3072 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3073 if let Some((file, (_, language_server))) =
3074 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3075 {
3076 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3077 return cx.spawn(|this, cx| async move {
3078 if !request.check_capabilities(&language_server.capabilities()) {
3079 return Ok(Default::default());
3080 }
3081
3082 let response = language_server
3083 .request::<R::LspRequest>(lsp_params)
3084 .await
3085 .context("lsp request failed")?;
3086 request
3087 .response_from_lsp(response, this, buffer_handle, cx)
3088 .await
3089 });
3090 }
3091 } else if let Some(project_id) = self.remote_id() {
3092 let rpc = self.client.clone();
3093 let message = request.to_proto(project_id, buffer);
3094 return cx.spawn(|this, cx| async move {
3095 let response = rpc.request(message).await?;
3096 request
3097 .response_from_proto(response, this, buffer_handle, cx)
3098 .await
3099 });
3100 }
3101 Task::ready(Ok(Default::default()))
3102 }
3103
3104 pub fn find_or_create_local_worktree(
3105 &mut self,
3106 abs_path: impl AsRef<Path>,
3107 visible: bool,
3108 cx: &mut ModelContext<Self>,
3109 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3110 let abs_path = abs_path.as_ref();
3111 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3112 Task::ready(Ok((tree.clone(), relative_path.into())))
3113 } else {
3114 let worktree = self.create_local_worktree(abs_path, visible, cx);
3115 cx.foreground()
3116 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3117 }
3118 }
3119
3120 pub fn find_local_worktree(
3121 &self,
3122 abs_path: &Path,
3123 cx: &AppContext,
3124 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3125 for tree in self.worktrees(cx) {
3126 if let Some(relative_path) = tree
3127 .read(cx)
3128 .as_local()
3129 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3130 {
3131 return Some((tree.clone(), relative_path.into()));
3132 }
3133 }
3134 None
3135 }
3136
3137 pub fn is_shared(&self) -> bool {
3138 match &self.client_state {
3139 ProjectClientState::Local { is_shared, .. } => *is_shared,
3140 ProjectClientState::Remote { .. } => false,
3141 }
3142 }
3143
3144 fn create_local_worktree(
3145 &mut self,
3146 abs_path: impl AsRef<Path>,
3147 visible: bool,
3148 cx: &mut ModelContext<Self>,
3149 ) -> Task<Result<ModelHandle<Worktree>>> {
3150 let fs = self.fs.clone();
3151 let client = self.client.clone();
3152 let next_entry_id = self.next_entry_id.clone();
3153 let path: Arc<Path> = abs_path.as_ref().into();
3154 let task = self
3155 .loading_local_worktrees
3156 .entry(path.clone())
3157 .or_insert_with(|| {
3158 cx.spawn(|project, mut cx| {
3159 async move {
3160 let worktree = Worktree::local(
3161 client.clone(),
3162 path.clone(),
3163 visible,
3164 fs,
3165 next_entry_id,
3166 &mut cx,
3167 )
3168 .await;
3169 project.update(&mut cx, |project, _| {
3170 project.loading_local_worktrees.remove(&path);
3171 });
3172 let worktree = worktree?;
3173
3174 let (remote_project_id, is_shared) =
3175 project.update(&mut cx, |project, cx| {
3176 project.add_worktree(&worktree, cx);
3177 (project.remote_id(), project.is_shared())
3178 });
3179
3180 if let Some(project_id) = remote_project_id {
3181 if is_shared {
3182 worktree
3183 .update(&mut cx, |worktree, cx| {
3184 worktree.as_local_mut().unwrap().share(project_id, cx)
3185 })
3186 .await?;
3187 } else {
3188 worktree
3189 .update(&mut cx, |worktree, cx| {
3190 worktree.as_local_mut().unwrap().register(project_id, cx)
3191 })
3192 .await?;
3193 }
3194 }
3195
3196 Ok(worktree)
3197 }
3198 .map_err(|err| Arc::new(err))
3199 })
3200 .shared()
3201 })
3202 .clone();
3203 cx.foreground().spawn(async move {
3204 match task.await {
3205 Ok(worktree) => Ok(worktree),
3206 Err(err) => Err(anyhow!("{}", err)),
3207 }
3208 })
3209 }
3210
3211 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3212 self.worktrees.retain(|worktree| {
3213 worktree
3214 .upgrade(cx)
3215 .map_or(false, |w| w.read(cx).id() != id)
3216 });
3217 cx.notify();
3218 }
3219
3220 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3221 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3222 if worktree.read(cx).is_local() {
3223 cx.subscribe(&worktree, |this, worktree, _, cx| {
3224 this.update_local_worktree_buffers(worktree, cx);
3225 })
3226 .detach();
3227 }
3228
3229 let push_strong_handle = {
3230 let worktree = worktree.read(cx);
3231 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3232 };
3233 if push_strong_handle {
3234 self.worktrees
3235 .push(WorktreeHandle::Strong(worktree.clone()));
3236 } else {
3237 cx.observe_release(&worktree, |this, _, cx| {
3238 this.worktrees
3239 .retain(|worktree| worktree.upgrade(cx).is_some());
3240 cx.notify();
3241 })
3242 .detach();
3243 self.worktrees
3244 .push(WorktreeHandle::Weak(worktree.downgrade()));
3245 }
3246 cx.notify();
3247 }
3248
3249 fn update_local_worktree_buffers(
3250 &mut self,
3251 worktree_handle: ModelHandle<Worktree>,
3252 cx: &mut ModelContext<Self>,
3253 ) {
3254 let snapshot = worktree_handle.read(cx).snapshot();
3255 let mut buffers_to_delete = Vec::new();
3256 for (buffer_id, buffer) in &self.opened_buffers {
3257 if let Some(buffer) = buffer.upgrade(cx) {
3258 buffer.update(cx, |buffer, cx| {
3259 if let Some(old_file) = File::from_dyn(buffer.file()) {
3260 if old_file.worktree != worktree_handle {
3261 return;
3262 }
3263
3264 let new_file = if let Some(entry) = old_file
3265 .entry_id
3266 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3267 {
3268 File {
3269 is_local: true,
3270 entry_id: Some(entry.id),
3271 mtime: entry.mtime,
3272 path: entry.path.clone(),
3273 worktree: worktree_handle.clone(),
3274 }
3275 } else if let Some(entry) =
3276 snapshot.entry_for_path(old_file.path().as_ref())
3277 {
3278 File {
3279 is_local: true,
3280 entry_id: Some(entry.id),
3281 mtime: entry.mtime,
3282 path: entry.path.clone(),
3283 worktree: worktree_handle.clone(),
3284 }
3285 } else {
3286 File {
3287 is_local: true,
3288 entry_id: None,
3289 path: old_file.path().clone(),
3290 mtime: old_file.mtime(),
3291 worktree: worktree_handle.clone(),
3292 }
3293 };
3294
3295 if let Some(project_id) = self.remote_id() {
3296 self.client
3297 .send(proto::UpdateBufferFile {
3298 project_id,
3299 buffer_id: *buffer_id as u64,
3300 file: Some(new_file.to_proto()),
3301 })
3302 .log_err();
3303 }
3304 buffer.file_updated(Box::new(new_file), cx).detach();
3305 }
3306 });
3307 } else {
3308 buffers_to_delete.push(*buffer_id);
3309 }
3310 }
3311
3312 for buffer_id in buffers_to_delete {
3313 self.opened_buffers.remove(&buffer_id);
3314 }
3315 }
3316
3317 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3318 let new_active_entry = entry.and_then(|project_path| {
3319 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3320 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3321 Some(entry.id)
3322 });
3323 if new_active_entry != self.active_entry {
3324 self.active_entry = new_active_entry;
3325 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3326 }
3327 }
3328
3329 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3330 self.language_servers_with_diagnostics_running > 0
3331 }
3332
3333 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3334 let mut summary = DiagnosticSummary::default();
3335 for (_, path_summary) in self.diagnostic_summaries(cx) {
3336 summary.error_count += path_summary.error_count;
3337 summary.warning_count += path_summary.warning_count;
3338 summary.info_count += path_summary.info_count;
3339 summary.hint_count += path_summary.hint_count;
3340 }
3341 summary
3342 }
3343
3344 pub fn diagnostic_summaries<'a>(
3345 &'a self,
3346 cx: &'a AppContext,
3347 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3348 self.worktrees(cx).flat_map(move |worktree| {
3349 let worktree = worktree.read(cx);
3350 let worktree_id = worktree.id();
3351 worktree
3352 .diagnostic_summaries()
3353 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3354 })
3355 }
3356
3357 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3358 self.language_servers_with_diagnostics_running += 1;
3359 if self.language_servers_with_diagnostics_running == 1 {
3360 cx.emit(Event::DiskBasedDiagnosticsStarted);
3361 }
3362 }
3363
3364 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3365 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3366 self.language_servers_with_diagnostics_running -= 1;
3367 if self.language_servers_with_diagnostics_running == 0 {
3368 cx.emit(Event::DiskBasedDiagnosticsFinished);
3369 }
3370 }
3371
3372 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3373 self.active_entry
3374 }
3375
3376 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3377 self.worktree_for_id(path.worktree_id, cx)?
3378 .read(cx)
3379 .entry_for_path(&path.path)
3380 .map(|entry| entry.id)
3381 }
3382
3383 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3384 let worktree = self.worktree_for_entry(entry_id, cx)?;
3385 let worktree = worktree.read(cx);
3386 let worktree_id = worktree.id();
3387 let path = worktree.entry_for_id(entry_id)?.path.clone();
3388 Some(ProjectPath { worktree_id, path })
3389 }
3390
3391 // RPC message handlers
3392
3393 async fn handle_unshare_project(
3394 this: ModelHandle<Self>,
3395 _: TypedEnvelope<proto::UnshareProject>,
3396 _: Arc<Client>,
3397 mut cx: AsyncAppContext,
3398 ) -> Result<()> {
3399 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3400 Ok(())
3401 }
3402
3403 async fn handle_add_collaborator(
3404 this: ModelHandle<Self>,
3405 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3406 _: Arc<Client>,
3407 mut cx: AsyncAppContext,
3408 ) -> Result<()> {
3409 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3410 let collaborator = envelope
3411 .payload
3412 .collaborator
3413 .take()
3414 .ok_or_else(|| anyhow!("empty collaborator"))?;
3415
3416 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3417 this.update(&mut cx, |this, cx| {
3418 this.collaborators
3419 .insert(collaborator.peer_id, collaborator);
3420 cx.notify();
3421 });
3422
3423 Ok(())
3424 }
3425
3426 async fn handle_remove_collaborator(
3427 this: ModelHandle<Self>,
3428 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3429 _: Arc<Client>,
3430 mut cx: AsyncAppContext,
3431 ) -> Result<()> {
3432 this.update(&mut cx, |this, cx| {
3433 let peer_id = PeerId(envelope.payload.peer_id);
3434 let replica_id = this
3435 .collaborators
3436 .remove(&peer_id)
3437 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3438 .replica_id;
3439 for (_, buffer) in &this.opened_buffers {
3440 if let Some(buffer) = buffer.upgrade(cx) {
3441 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3442 }
3443 }
3444 cx.emit(Event::CollaboratorLeft(peer_id));
3445 cx.notify();
3446 Ok(())
3447 })
3448 }
3449
3450 async fn handle_register_worktree(
3451 this: ModelHandle<Self>,
3452 envelope: TypedEnvelope<proto::RegisterWorktree>,
3453 client: Arc<Client>,
3454 mut cx: AsyncAppContext,
3455 ) -> Result<()> {
3456 this.update(&mut cx, |this, cx| {
3457 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3458 let replica_id = this.replica_id();
3459 let worktree = proto::Worktree {
3460 id: envelope.payload.worktree_id,
3461 root_name: envelope.payload.root_name,
3462 entries: Default::default(),
3463 diagnostic_summaries: Default::default(),
3464 visible: envelope.payload.visible,
3465 };
3466 let (worktree, load_task) =
3467 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3468 this.add_worktree(&worktree, cx);
3469 load_task.detach();
3470 Ok(())
3471 })
3472 }
3473
3474 async fn handle_unregister_worktree(
3475 this: ModelHandle<Self>,
3476 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3477 _: Arc<Client>,
3478 mut cx: AsyncAppContext,
3479 ) -> Result<()> {
3480 this.update(&mut cx, |this, cx| {
3481 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3482 this.remove_worktree(worktree_id, cx);
3483 Ok(())
3484 })
3485 }
3486
3487 async fn handle_update_worktree(
3488 this: ModelHandle<Self>,
3489 envelope: TypedEnvelope<proto::UpdateWorktree>,
3490 _: Arc<Client>,
3491 mut cx: AsyncAppContext,
3492 ) -> Result<()> {
3493 this.update(&mut cx, |this, cx| {
3494 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3495 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3496 worktree.update(cx, |worktree, _| {
3497 let worktree = worktree.as_remote_mut().unwrap();
3498 worktree.update_from_remote(envelope)
3499 })?;
3500 }
3501 Ok(())
3502 })
3503 }
3504
3505 async fn handle_update_diagnostic_summary(
3506 this: ModelHandle<Self>,
3507 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3508 _: Arc<Client>,
3509 mut cx: AsyncAppContext,
3510 ) -> Result<()> {
3511 this.update(&mut cx, |this, cx| {
3512 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3513 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3514 if let Some(summary) = envelope.payload.summary {
3515 let project_path = ProjectPath {
3516 worktree_id,
3517 path: Path::new(&summary.path).into(),
3518 };
3519 worktree.update(cx, |worktree, _| {
3520 worktree
3521 .as_remote_mut()
3522 .unwrap()
3523 .update_diagnostic_summary(project_path.path.clone(), &summary);
3524 });
3525 cx.emit(Event::DiagnosticsUpdated(project_path));
3526 }
3527 }
3528 Ok(())
3529 })
3530 }
3531
3532 async fn handle_start_language_server(
3533 this: ModelHandle<Self>,
3534 envelope: TypedEnvelope<proto::StartLanguageServer>,
3535 _: Arc<Client>,
3536 mut cx: AsyncAppContext,
3537 ) -> Result<()> {
3538 let server = envelope
3539 .payload
3540 .server
3541 .ok_or_else(|| anyhow!("invalid server"))?;
3542 this.update(&mut cx, |this, cx| {
3543 this.language_server_statuses.insert(
3544 server.id as usize,
3545 LanguageServerStatus {
3546 name: server.name,
3547 pending_work: Default::default(),
3548 pending_diagnostic_updates: 0,
3549 },
3550 );
3551 cx.notify();
3552 });
3553 Ok(())
3554 }
3555
3556 async fn handle_update_language_server(
3557 this: ModelHandle<Self>,
3558 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3559 _: Arc<Client>,
3560 mut cx: AsyncAppContext,
3561 ) -> Result<()> {
3562 let language_server_id = envelope.payload.language_server_id as usize;
3563 match envelope
3564 .payload
3565 .variant
3566 .ok_or_else(|| anyhow!("invalid variant"))?
3567 {
3568 proto::update_language_server::Variant::WorkStart(payload) => {
3569 this.update(&mut cx, |this, cx| {
3570 this.on_lsp_work_start(language_server_id, payload.token, cx);
3571 })
3572 }
3573 proto::update_language_server::Variant::WorkProgress(payload) => {
3574 this.update(&mut cx, |this, cx| {
3575 this.on_lsp_work_progress(
3576 language_server_id,
3577 payload.token,
3578 LanguageServerProgress {
3579 message: payload.message,
3580 percentage: payload.percentage.map(|p| p as usize),
3581 last_update_at: Instant::now(),
3582 },
3583 cx,
3584 );
3585 })
3586 }
3587 proto::update_language_server::Variant::WorkEnd(payload) => {
3588 this.update(&mut cx, |this, cx| {
3589 this.on_lsp_work_end(language_server_id, payload.token, cx);
3590 })
3591 }
3592 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3593 this.update(&mut cx, |this, cx| {
3594 this.disk_based_diagnostics_started(cx);
3595 })
3596 }
3597 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3598 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3599 }
3600 }
3601
3602 Ok(())
3603 }
3604
3605 async fn handle_update_buffer(
3606 this: ModelHandle<Self>,
3607 envelope: TypedEnvelope<proto::UpdateBuffer>,
3608 _: Arc<Client>,
3609 mut cx: AsyncAppContext,
3610 ) -> Result<()> {
3611 this.update(&mut cx, |this, cx| {
3612 let payload = envelope.payload.clone();
3613 let buffer_id = payload.buffer_id;
3614 let ops = payload
3615 .operations
3616 .into_iter()
3617 .map(|op| language::proto::deserialize_operation(op))
3618 .collect::<Result<Vec<_>, _>>()?;
3619 match this.opened_buffers.entry(buffer_id) {
3620 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3621 OpenBuffer::Strong(buffer) => {
3622 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3623 }
3624 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3625 OpenBuffer::Weak(_) => {}
3626 },
3627 hash_map::Entry::Vacant(e) => {
3628 e.insert(OpenBuffer::Loading(ops));
3629 }
3630 }
3631 Ok(())
3632 })
3633 }
3634
3635 async fn handle_update_buffer_file(
3636 this: ModelHandle<Self>,
3637 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3638 _: Arc<Client>,
3639 mut cx: AsyncAppContext,
3640 ) -> Result<()> {
3641 this.update(&mut cx, |this, cx| {
3642 let payload = envelope.payload.clone();
3643 let buffer_id = payload.buffer_id;
3644 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3645 let worktree = this
3646 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3647 .ok_or_else(|| anyhow!("no such worktree"))?;
3648 let file = File::from_proto(file, worktree.clone(), cx)?;
3649 let buffer = this
3650 .opened_buffers
3651 .get_mut(&buffer_id)
3652 .and_then(|b| b.upgrade(cx))
3653 .ok_or_else(|| anyhow!("no such buffer"))?;
3654 buffer.update(cx, |buffer, cx| {
3655 buffer.file_updated(Box::new(file), cx).detach();
3656 });
3657 Ok(())
3658 })
3659 }
3660
3661 async fn handle_save_buffer(
3662 this: ModelHandle<Self>,
3663 envelope: TypedEnvelope<proto::SaveBuffer>,
3664 _: Arc<Client>,
3665 mut cx: AsyncAppContext,
3666 ) -> Result<proto::BufferSaved> {
3667 let buffer_id = envelope.payload.buffer_id;
3668 let requested_version = deserialize_version(envelope.payload.version);
3669
3670 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3671 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3672 let buffer = this
3673 .opened_buffers
3674 .get(&buffer_id)
3675 .map(|buffer| buffer.upgrade(cx).unwrap())
3676 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3677 Ok::<_, anyhow::Error>((project_id, buffer))
3678 })?;
3679 buffer
3680 .update(&mut cx, |buffer, _| {
3681 buffer.wait_for_version(requested_version)
3682 })
3683 .await;
3684
3685 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3686 Ok(proto::BufferSaved {
3687 project_id,
3688 buffer_id,
3689 version: serialize_version(&saved_version),
3690 mtime: Some(mtime.into()),
3691 })
3692 }
3693
3694 async fn handle_format_buffers(
3695 this: ModelHandle<Self>,
3696 envelope: TypedEnvelope<proto::FormatBuffers>,
3697 _: Arc<Client>,
3698 mut cx: AsyncAppContext,
3699 ) -> Result<proto::FormatBuffersResponse> {
3700 let sender_id = envelope.original_sender_id()?;
3701 let format = this.update(&mut cx, |this, cx| {
3702 let mut buffers = HashSet::default();
3703 for buffer_id in &envelope.payload.buffer_ids {
3704 buffers.insert(
3705 this.opened_buffers
3706 .get(buffer_id)
3707 .map(|buffer| buffer.upgrade(cx).unwrap())
3708 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3709 );
3710 }
3711 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3712 })?;
3713
3714 let project_transaction = format.await?;
3715 let project_transaction = this.update(&mut cx, |this, cx| {
3716 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3717 });
3718 Ok(proto::FormatBuffersResponse {
3719 transaction: Some(project_transaction),
3720 })
3721 }
3722
3723 async fn handle_get_completions(
3724 this: ModelHandle<Self>,
3725 envelope: TypedEnvelope<proto::GetCompletions>,
3726 _: Arc<Client>,
3727 mut cx: AsyncAppContext,
3728 ) -> Result<proto::GetCompletionsResponse> {
3729 let position = envelope
3730 .payload
3731 .position
3732 .and_then(language::proto::deserialize_anchor)
3733 .ok_or_else(|| anyhow!("invalid position"))?;
3734 let version = deserialize_version(envelope.payload.version);
3735 let buffer = this.read_with(&cx, |this, cx| {
3736 this.opened_buffers
3737 .get(&envelope.payload.buffer_id)
3738 .map(|buffer| buffer.upgrade(cx).unwrap())
3739 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3740 })?;
3741 buffer
3742 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3743 .await;
3744 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3745 let completions = this
3746 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3747 .await?;
3748
3749 Ok(proto::GetCompletionsResponse {
3750 completions: completions
3751 .iter()
3752 .map(language::proto::serialize_completion)
3753 .collect(),
3754 version: serialize_version(&version),
3755 })
3756 }
3757
3758 async fn handle_apply_additional_edits_for_completion(
3759 this: ModelHandle<Self>,
3760 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3761 _: Arc<Client>,
3762 mut cx: AsyncAppContext,
3763 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3764 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3765 let buffer = this
3766 .opened_buffers
3767 .get(&envelope.payload.buffer_id)
3768 .map(|buffer| buffer.upgrade(cx).unwrap())
3769 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3770 let language = buffer.read(cx).language();
3771 let completion = language::proto::deserialize_completion(
3772 envelope
3773 .payload
3774 .completion
3775 .ok_or_else(|| anyhow!("invalid completion"))?,
3776 language,
3777 )?;
3778 Ok::<_, anyhow::Error>(
3779 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3780 )
3781 })?;
3782
3783 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3784 transaction: apply_additional_edits
3785 .await?
3786 .as_ref()
3787 .map(language::proto::serialize_transaction),
3788 })
3789 }
3790
3791 async fn handle_get_code_actions(
3792 this: ModelHandle<Self>,
3793 envelope: TypedEnvelope<proto::GetCodeActions>,
3794 _: Arc<Client>,
3795 mut cx: AsyncAppContext,
3796 ) -> Result<proto::GetCodeActionsResponse> {
3797 let start = envelope
3798 .payload
3799 .start
3800 .and_then(language::proto::deserialize_anchor)
3801 .ok_or_else(|| anyhow!("invalid start"))?;
3802 let end = envelope
3803 .payload
3804 .end
3805 .and_then(language::proto::deserialize_anchor)
3806 .ok_or_else(|| anyhow!("invalid end"))?;
3807 let buffer = this.update(&mut cx, |this, cx| {
3808 this.opened_buffers
3809 .get(&envelope.payload.buffer_id)
3810 .map(|buffer| buffer.upgrade(cx).unwrap())
3811 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3812 })?;
3813 buffer
3814 .update(&mut cx, |buffer, _| {
3815 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3816 })
3817 .await;
3818
3819 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3820 let code_actions = this.update(&mut cx, |this, cx| {
3821 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3822 })?;
3823
3824 Ok(proto::GetCodeActionsResponse {
3825 actions: code_actions
3826 .await?
3827 .iter()
3828 .map(language::proto::serialize_code_action)
3829 .collect(),
3830 version: serialize_version(&version),
3831 })
3832 }
3833
3834 async fn handle_apply_code_action(
3835 this: ModelHandle<Self>,
3836 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3837 _: Arc<Client>,
3838 mut cx: AsyncAppContext,
3839 ) -> Result<proto::ApplyCodeActionResponse> {
3840 let sender_id = envelope.original_sender_id()?;
3841 let action = language::proto::deserialize_code_action(
3842 envelope
3843 .payload
3844 .action
3845 .ok_or_else(|| anyhow!("invalid action"))?,
3846 )?;
3847 let apply_code_action = this.update(&mut cx, |this, cx| {
3848 let buffer = this
3849 .opened_buffers
3850 .get(&envelope.payload.buffer_id)
3851 .map(|buffer| buffer.upgrade(cx).unwrap())
3852 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3853 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3854 })?;
3855
3856 let project_transaction = apply_code_action.await?;
3857 let project_transaction = this.update(&mut cx, |this, cx| {
3858 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3859 });
3860 Ok(proto::ApplyCodeActionResponse {
3861 transaction: Some(project_transaction),
3862 })
3863 }
3864
3865 async fn handle_lsp_command<T: LspCommand>(
3866 this: ModelHandle<Self>,
3867 envelope: TypedEnvelope<T::ProtoRequest>,
3868 _: Arc<Client>,
3869 mut cx: AsyncAppContext,
3870 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3871 where
3872 <T::LspRequest as lsp::request::Request>::Result: Send,
3873 {
3874 let sender_id = envelope.original_sender_id()?;
3875 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3876 let buffer_handle = this.read_with(&cx, |this, _| {
3877 this.opened_buffers
3878 .get(&buffer_id)
3879 .and_then(|buffer| buffer.upgrade(&cx))
3880 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3881 })?;
3882 let request = T::from_proto(
3883 envelope.payload,
3884 this.clone(),
3885 buffer_handle.clone(),
3886 cx.clone(),
3887 )
3888 .await?;
3889 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3890 let response = this
3891 .update(&mut cx, |this, cx| {
3892 this.request_lsp(buffer_handle, request, cx)
3893 })
3894 .await?;
3895 this.update(&mut cx, |this, cx| {
3896 Ok(T::response_to_proto(
3897 response,
3898 this,
3899 sender_id,
3900 &buffer_version,
3901 cx,
3902 ))
3903 })
3904 }
3905
3906 async fn handle_get_project_symbols(
3907 this: ModelHandle<Self>,
3908 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3909 _: Arc<Client>,
3910 mut cx: AsyncAppContext,
3911 ) -> Result<proto::GetProjectSymbolsResponse> {
3912 let symbols = this
3913 .update(&mut cx, |this, cx| {
3914 this.symbols(&envelope.payload.query, cx)
3915 })
3916 .await?;
3917
3918 Ok(proto::GetProjectSymbolsResponse {
3919 symbols: symbols.iter().map(serialize_symbol).collect(),
3920 })
3921 }
3922
3923 async fn handle_search_project(
3924 this: ModelHandle<Self>,
3925 envelope: TypedEnvelope<proto::SearchProject>,
3926 _: Arc<Client>,
3927 mut cx: AsyncAppContext,
3928 ) -> Result<proto::SearchProjectResponse> {
3929 let peer_id = envelope.original_sender_id()?;
3930 let query = SearchQuery::from_proto(envelope.payload)?;
3931 let result = this
3932 .update(&mut cx, |this, cx| this.search(query, cx))
3933 .await?;
3934
3935 this.update(&mut cx, |this, cx| {
3936 let mut locations = Vec::new();
3937 for (buffer, ranges) in result {
3938 for range in ranges {
3939 let start = serialize_anchor(&range.start);
3940 let end = serialize_anchor(&range.end);
3941 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3942 locations.push(proto::Location {
3943 buffer: Some(buffer),
3944 start: Some(start),
3945 end: Some(end),
3946 });
3947 }
3948 }
3949 Ok(proto::SearchProjectResponse { locations })
3950 })
3951 }
3952
3953 async fn handle_open_buffer_for_symbol(
3954 this: ModelHandle<Self>,
3955 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3956 _: Arc<Client>,
3957 mut cx: AsyncAppContext,
3958 ) -> Result<proto::OpenBufferForSymbolResponse> {
3959 let peer_id = envelope.original_sender_id()?;
3960 let symbol = envelope
3961 .payload
3962 .symbol
3963 .ok_or_else(|| anyhow!("invalid symbol"))?;
3964 let symbol = this.read_with(&cx, |this, _| {
3965 let symbol = this.deserialize_symbol(symbol)?;
3966 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3967 if signature == symbol.signature {
3968 Ok(symbol)
3969 } else {
3970 Err(anyhow!("invalid symbol signature"))
3971 }
3972 })?;
3973 let buffer = this
3974 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3975 .await?;
3976
3977 Ok(proto::OpenBufferForSymbolResponse {
3978 buffer: Some(this.update(&mut cx, |this, cx| {
3979 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3980 })),
3981 })
3982 }
3983
3984 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3985 let mut hasher = Sha256::new();
3986 hasher.update(worktree_id.to_proto().to_be_bytes());
3987 hasher.update(path.to_string_lossy().as_bytes());
3988 hasher.update(self.nonce.to_be_bytes());
3989 hasher.finalize().as_slice().try_into().unwrap()
3990 }
3991
3992 async fn handle_open_buffer_by_id(
3993 this: ModelHandle<Self>,
3994 envelope: TypedEnvelope<proto::OpenBufferById>,
3995 _: Arc<Client>,
3996 mut cx: AsyncAppContext,
3997 ) -> Result<proto::OpenBufferResponse> {
3998 let peer_id = envelope.original_sender_id()?;
3999 let buffer = this
4000 .update(&mut cx, |this, cx| {
4001 this.open_buffer_by_id(envelope.payload.id, cx)
4002 })
4003 .await?;
4004 this.update(&mut cx, |this, cx| {
4005 Ok(proto::OpenBufferResponse {
4006 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4007 })
4008 })
4009 }
4010
4011 async fn handle_open_buffer_by_path(
4012 this: ModelHandle<Self>,
4013 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4014 _: Arc<Client>,
4015 mut cx: AsyncAppContext,
4016 ) -> Result<proto::OpenBufferResponse> {
4017 let peer_id = envelope.original_sender_id()?;
4018 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4019 let open_buffer = this.update(&mut cx, |this, cx| {
4020 this.open_buffer(
4021 ProjectPath {
4022 worktree_id,
4023 path: PathBuf::from(envelope.payload.path).into(),
4024 },
4025 cx,
4026 )
4027 });
4028
4029 let buffer = open_buffer.await?;
4030 this.update(&mut cx, |this, cx| {
4031 Ok(proto::OpenBufferResponse {
4032 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4033 })
4034 })
4035 }
4036
4037 fn serialize_project_transaction_for_peer(
4038 &mut self,
4039 project_transaction: ProjectTransaction,
4040 peer_id: PeerId,
4041 cx: &AppContext,
4042 ) -> proto::ProjectTransaction {
4043 let mut serialized_transaction = proto::ProjectTransaction {
4044 buffers: Default::default(),
4045 transactions: Default::default(),
4046 };
4047 for (buffer, transaction) in project_transaction.0 {
4048 serialized_transaction
4049 .buffers
4050 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4051 serialized_transaction
4052 .transactions
4053 .push(language::proto::serialize_transaction(&transaction));
4054 }
4055 serialized_transaction
4056 }
4057
4058 fn deserialize_project_transaction(
4059 &mut self,
4060 message: proto::ProjectTransaction,
4061 push_to_history: bool,
4062 cx: &mut ModelContext<Self>,
4063 ) -> Task<Result<ProjectTransaction>> {
4064 cx.spawn(|this, mut cx| async move {
4065 let mut project_transaction = ProjectTransaction::default();
4066 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4067 let buffer = this
4068 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4069 .await?;
4070 let transaction = language::proto::deserialize_transaction(transaction)?;
4071 project_transaction.0.insert(buffer, transaction);
4072 }
4073
4074 for (buffer, transaction) in &project_transaction.0 {
4075 buffer
4076 .update(&mut cx, |buffer, _| {
4077 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4078 })
4079 .await;
4080
4081 if push_to_history {
4082 buffer.update(&mut cx, |buffer, _| {
4083 buffer.push_transaction(transaction.clone(), Instant::now());
4084 });
4085 }
4086 }
4087
4088 Ok(project_transaction)
4089 })
4090 }
4091
4092 fn serialize_buffer_for_peer(
4093 &mut self,
4094 buffer: &ModelHandle<Buffer>,
4095 peer_id: PeerId,
4096 cx: &AppContext,
4097 ) -> proto::Buffer {
4098 let buffer_id = buffer.read(cx).remote_id();
4099 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4100 if shared_buffers.insert(buffer_id) {
4101 proto::Buffer {
4102 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4103 }
4104 } else {
4105 proto::Buffer {
4106 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4107 }
4108 }
4109 }
4110
4111 fn deserialize_buffer(
4112 &mut self,
4113 buffer: proto::Buffer,
4114 cx: &mut ModelContext<Self>,
4115 ) -> Task<Result<ModelHandle<Buffer>>> {
4116 let replica_id = self.replica_id();
4117
4118 let opened_buffer_tx = self.opened_buffer.0.clone();
4119 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4120 cx.spawn(|this, mut cx| async move {
4121 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4122 proto::buffer::Variant::Id(id) => {
4123 let buffer = loop {
4124 let buffer = this.read_with(&cx, |this, cx| {
4125 this.opened_buffers
4126 .get(&id)
4127 .and_then(|buffer| buffer.upgrade(cx))
4128 });
4129 if let Some(buffer) = buffer {
4130 break buffer;
4131 }
4132 opened_buffer_rx
4133 .next()
4134 .await
4135 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4136 };
4137 Ok(buffer)
4138 }
4139 proto::buffer::Variant::State(mut buffer) => {
4140 let mut buffer_worktree = None;
4141 let mut buffer_file = None;
4142 if let Some(file) = buffer.file.take() {
4143 this.read_with(&cx, |this, cx| {
4144 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4145 let worktree =
4146 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4147 anyhow!("no worktree found for id {}", file.worktree_id)
4148 })?;
4149 buffer_file =
4150 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4151 as Box<dyn language::File>);
4152 buffer_worktree = Some(worktree);
4153 Ok::<_, anyhow::Error>(())
4154 })?;
4155 }
4156
4157 let buffer = cx.add_model(|cx| {
4158 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4159 });
4160
4161 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4162
4163 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4164 Ok(buffer)
4165 }
4166 }
4167 })
4168 }
4169
4170 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4171 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4172 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4173 let start = serialized_symbol
4174 .start
4175 .ok_or_else(|| anyhow!("invalid start"))?;
4176 let end = serialized_symbol
4177 .end
4178 .ok_or_else(|| anyhow!("invalid end"))?;
4179 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4180 let path = PathBuf::from(serialized_symbol.path);
4181 let language = self.languages.select_language(&path);
4182 Ok(Symbol {
4183 source_worktree_id,
4184 worktree_id,
4185 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4186 label: language
4187 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4188 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4189 name: serialized_symbol.name,
4190 path,
4191 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4192 kind,
4193 signature: serialized_symbol
4194 .signature
4195 .try_into()
4196 .map_err(|_| anyhow!("invalid signature"))?,
4197 })
4198 }
4199
4200 async fn handle_buffer_saved(
4201 this: ModelHandle<Self>,
4202 envelope: TypedEnvelope<proto::BufferSaved>,
4203 _: Arc<Client>,
4204 mut cx: AsyncAppContext,
4205 ) -> Result<()> {
4206 let version = deserialize_version(envelope.payload.version);
4207 let mtime = envelope
4208 .payload
4209 .mtime
4210 .ok_or_else(|| anyhow!("missing mtime"))?
4211 .into();
4212
4213 this.update(&mut cx, |this, cx| {
4214 let buffer = this
4215 .opened_buffers
4216 .get(&envelope.payload.buffer_id)
4217 .and_then(|buffer| buffer.upgrade(cx));
4218 if let Some(buffer) = buffer {
4219 buffer.update(cx, |buffer, cx| {
4220 buffer.did_save(version, mtime, None, cx);
4221 });
4222 }
4223 Ok(())
4224 })
4225 }
4226
4227 async fn handle_buffer_reloaded(
4228 this: ModelHandle<Self>,
4229 envelope: TypedEnvelope<proto::BufferReloaded>,
4230 _: Arc<Client>,
4231 mut cx: AsyncAppContext,
4232 ) -> Result<()> {
4233 let payload = envelope.payload.clone();
4234 let version = deserialize_version(payload.version);
4235 let mtime = payload
4236 .mtime
4237 .ok_or_else(|| anyhow!("missing mtime"))?
4238 .into();
4239 this.update(&mut cx, |this, cx| {
4240 let buffer = this
4241 .opened_buffers
4242 .get(&payload.buffer_id)
4243 .and_then(|buffer| buffer.upgrade(cx));
4244 if let Some(buffer) = buffer {
4245 buffer.update(cx, |buffer, cx| {
4246 buffer.did_reload(version, mtime, cx);
4247 });
4248 }
4249 Ok(())
4250 })
4251 }
4252
4253 pub fn match_paths<'a>(
4254 &self,
4255 query: &'a str,
4256 include_ignored: bool,
4257 smart_case: bool,
4258 max_results: usize,
4259 cancel_flag: &'a AtomicBool,
4260 cx: &AppContext,
4261 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4262 let worktrees = self
4263 .worktrees(cx)
4264 .filter(|worktree| worktree.read(cx).is_visible())
4265 .collect::<Vec<_>>();
4266 let include_root_name = worktrees.len() > 1;
4267 let candidate_sets = worktrees
4268 .into_iter()
4269 .map(|worktree| CandidateSet {
4270 snapshot: worktree.read(cx).snapshot(),
4271 include_ignored,
4272 include_root_name,
4273 })
4274 .collect::<Vec<_>>();
4275
4276 let background = cx.background().clone();
4277 async move {
4278 fuzzy::match_paths(
4279 candidate_sets.as_slice(),
4280 query,
4281 smart_case,
4282 max_results,
4283 cancel_flag,
4284 background,
4285 )
4286 .await
4287 }
4288 }
4289
4290 fn edits_from_lsp(
4291 &mut self,
4292 buffer: &ModelHandle<Buffer>,
4293 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4294 version: Option<i32>,
4295 cx: &mut ModelContext<Self>,
4296 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4297 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4298 cx.background().spawn(async move {
4299 let snapshot = snapshot?;
4300 let mut lsp_edits = lsp_edits
4301 .into_iter()
4302 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4303 .peekable();
4304
4305 let mut edits = Vec::new();
4306 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4307 // Combine any LSP edits that are adjacent.
4308 //
4309 // Also, combine LSP edits that are separated from each other by only
4310 // a newline. This is important because for some code actions,
4311 // Rust-analyzer rewrites the entire buffer via a series of edits that
4312 // are separated by unchanged newline characters.
4313 //
4314 // In order for the diffing logic below to work properly, any edits that
4315 // cancel each other out must be combined into one.
4316 while let Some((next_range, next_text)) = lsp_edits.peek() {
4317 if next_range.start > range.end {
4318 if next_range.start.row > range.end.row + 1
4319 || next_range.start.column > 0
4320 || snapshot.clip_point_utf16(
4321 PointUtf16::new(range.end.row, u32::MAX),
4322 Bias::Left,
4323 ) > range.end
4324 {
4325 break;
4326 }
4327 new_text.push('\n');
4328 }
4329 range.end = next_range.end;
4330 new_text.push_str(&next_text);
4331 lsp_edits.next();
4332 }
4333
4334 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4335 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4336 {
4337 return Err(anyhow!("invalid edits received from language server"));
4338 }
4339
4340 // For multiline edits, perform a diff of the old and new text so that
4341 // we can identify the changes more precisely, preserving the locations
4342 // of any anchors positioned in the unchanged regions.
4343 if range.end.row > range.start.row {
4344 let mut offset = range.start.to_offset(&snapshot);
4345 let old_text = snapshot.text_for_range(range).collect::<String>();
4346
4347 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4348 let mut moved_since_edit = true;
4349 for change in diff.iter_all_changes() {
4350 let tag = change.tag();
4351 let value = change.value();
4352 match tag {
4353 ChangeTag::Equal => {
4354 offset += value.len();
4355 moved_since_edit = true;
4356 }
4357 ChangeTag::Delete => {
4358 let start = snapshot.anchor_after(offset);
4359 let end = snapshot.anchor_before(offset + value.len());
4360 if moved_since_edit {
4361 edits.push((start..end, String::new()));
4362 } else {
4363 edits.last_mut().unwrap().0.end = end;
4364 }
4365 offset += value.len();
4366 moved_since_edit = false;
4367 }
4368 ChangeTag::Insert => {
4369 if moved_since_edit {
4370 let anchor = snapshot.anchor_after(offset);
4371 edits.push((anchor.clone()..anchor, value.to_string()));
4372 } else {
4373 edits.last_mut().unwrap().1.push_str(value);
4374 }
4375 moved_since_edit = false;
4376 }
4377 }
4378 }
4379 } else if range.end == range.start {
4380 let anchor = snapshot.anchor_after(range.start);
4381 edits.push((anchor.clone()..anchor, new_text));
4382 } else {
4383 let edit_start = snapshot.anchor_after(range.start);
4384 let edit_end = snapshot.anchor_before(range.end);
4385 edits.push((edit_start..edit_end, new_text));
4386 }
4387 }
4388
4389 Ok(edits)
4390 })
4391 }
4392
4393 fn buffer_snapshot_for_lsp_version(
4394 &mut self,
4395 buffer: &ModelHandle<Buffer>,
4396 version: Option<i32>,
4397 cx: &AppContext,
4398 ) -> Result<TextBufferSnapshot> {
4399 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4400
4401 if let Some(version) = version {
4402 let buffer_id = buffer.read(cx).remote_id();
4403 let snapshots = self
4404 .buffer_snapshots
4405 .get_mut(&buffer_id)
4406 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4407 let mut found_snapshot = None;
4408 snapshots.retain(|(snapshot_version, snapshot)| {
4409 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4410 false
4411 } else {
4412 if *snapshot_version == version {
4413 found_snapshot = Some(snapshot.clone());
4414 }
4415 true
4416 }
4417 });
4418
4419 found_snapshot.ok_or_else(|| {
4420 anyhow!(
4421 "snapshot not found for buffer {} at version {}",
4422 buffer_id,
4423 version
4424 )
4425 })
4426 } else {
4427 Ok((buffer.read(cx)).text_snapshot())
4428 }
4429 }
4430
4431 fn language_server_for_buffer(
4432 &self,
4433 buffer: &Buffer,
4434 cx: &AppContext,
4435 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4436 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4437 let worktree_id = file.worktree_id(cx);
4438 self.language_servers
4439 .get(&(worktree_id, language.lsp_adapter()?.name()))
4440 } else {
4441 None
4442 }
4443 }
4444}
4445
4446impl WorktreeHandle {
4447 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4448 match self {
4449 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4450 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4451 }
4452 }
4453}
4454
4455impl OpenBuffer {
4456 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4457 match self {
4458 OpenBuffer::Strong(handle) => Some(handle.clone()),
4459 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4460 OpenBuffer::Loading(_) => None,
4461 }
4462 }
4463}
4464
4465struct CandidateSet {
4466 snapshot: Snapshot,
4467 include_ignored: bool,
4468 include_root_name: bool,
4469}
4470
4471impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4472 type Candidates = CandidateSetIter<'a>;
4473
4474 fn id(&self) -> usize {
4475 self.snapshot.id().to_usize()
4476 }
4477
4478 fn len(&self) -> usize {
4479 if self.include_ignored {
4480 self.snapshot.file_count()
4481 } else {
4482 self.snapshot.visible_file_count()
4483 }
4484 }
4485
4486 fn prefix(&self) -> Arc<str> {
4487 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4488 self.snapshot.root_name().into()
4489 } else if self.include_root_name {
4490 format!("{}/", self.snapshot.root_name()).into()
4491 } else {
4492 "".into()
4493 }
4494 }
4495
4496 fn candidates(&'a self, start: usize) -> Self::Candidates {
4497 CandidateSetIter {
4498 traversal: self.snapshot.files(self.include_ignored, start),
4499 }
4500 }
4501}
4502
4503struct CandidateSetIter<'a> {
4504 traversal: Traversal<'a>,
4505}
4506
4507impl<'a> Iterator for CandidateSetIter<'a> {
4508 type Item = PathMatchCandidate<'a>;
4509
4510 fn next(&mut self) -> Option<Self::Item> {
4511 self.traversal.next().map(|entry| {
4512 if let EntryKind::File(char_bag) = entry.kind {
4513 PathMatchCandidate {
4514 path: &entry.path,
4515 char_bag,
4516 }
4517 } else {
4518 unreachable!()
4519 }
4520 })
4521 }
4522}
4523
4524impl Entity for Project {
4525 type Event = Event;
4526
4527 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4528 match &self.client_state {
4529 ProjectClientState::Local { remote_id_rx, .. } => {
4530 if let Some(project_id) = *remote_id_rx.borrow() {
4531 self.client
4532 .send(proto::UnregisterProject { project_id })
4533 .log_err();
4534 }
4535 }
4536 ProjectClientState::Remote { remote_id, .. } => {
4537 self.client
4538 .send(proto::LeaveProject {
4539 project_id: *remote_id,
4540 })
4541 .log_err();
4542 }
4543 }
4544 }
4545
4546 fn app_will_quit(
4547 &mut self,
4548 _: &mut MutableAppContext,
4549 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4550 let shutdown_futures = self
4551 .language_servers
4552 .drain()
4553 .filter_map(|(_, (_, server))| server.shutdown())
4554 .collect::<Vec<_>>();
4555 Some(
4556 async move {
4557 futures::future::join_all(shutdown_futures).await;
4558 }
4559 .boxed(),
4560 )
4561 }
4562}
4563
4564impl Collaborator {
4565 fn from_proto(
4566 message: proto::Collaborator,
4567 user_store: &ModelHandle<UserStore>,
4568 cx: &mut AsyncAppContext,
4569 ) -> impl Future<Output = Result<Self>> {
4570 let user = user_store.update(cx, |user_store, cx| {
4571 user_store.fetch_user(message.user_id, cx)
4572 });
4573
4574 async move {
4575 Ok(Self {
4576 peer_id: PeerId(message.peer_id),
4577 user: user.await?,
4578 replica_id: message.replica_id as ReplicaId,
4579 })
4580 }
4581 }
4582}
4583
4584impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4585 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4586 Self {
4587 worktree_id,
4588 path: path.as_ref().into(),
4589 }
4590 }
4591}
4592
4593impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4594 fn from(options: lsp::CreateFileOptions) -> Self {
4595 Self {
4596 overwrite: options.overwrite.unwrap_or(false),
4597 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4598 }
4599 }
4600}
4601
4602impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4603 fn from(options: lsp::RenameFileOptions) -> Self {
4604 Self {
4605 overwrite: options.overwrite.unwrap_or(false),
4606 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4607 }
4608 }
4609}
4610
4611impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4612 fn from(options: lsp::DeleteFileOptions) -> Self {
4613 Self {
4614 recursive: options.recursive.unwrap_or(false),
4615 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4616 }
4617 }
4618}
4619
4620fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4621 proto::Symbol {
4622 source_worktree_id: symbol.source_worktree_id.to_proto(),
4623 worktree_id: symbol.worktree_id.to_proto(),
4624 language_server_name: symbol.language_server_name.0.to_string(),
4625 name: symbol.name.clone(),
4626 kind: unsafe { mem::transmute(symbol.kind) },
4627 path: symbol.path.to_string_lossy().to_string(),
4628 start: Some(proto::Point {
4629 row: symbol.range.start.row,
4630 column: symbol.range.start.column,
4631 }),
4632 end: Some(proto::Point {
4633 row: symbol.range.end.row,
4634 column: symbol.range.end.column,
4635 }),
4636 signature: symbol.signature.to_vec(),
4637 }
4638}
4639
4640fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4641 let mut path_components = path.components();
4642 let mut base_components = base.components();
4643 let mut components: Vec<Component> = Vec::new();
4644 loop {
4645 match (path_components.next(), base_components.next()) {
4646 (None, None) => break,
4647 (Some(a), None) => {
4648 components.push(a);
4649 components.extend(path_components.by_ref());
4650 break;
4651 }
4652 (None, _) => components.push(Component::ParentDir),
4653 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4654 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4655 (Some(a), Some(_)) => {
4656 components.push(Component::ParentDir);
4657 for _ in base_components {
4658 components.push(Component::ParentDir);
4659 }
4660 components.push(a);
4661 components.extend(path_components.by_ref());
4662 break;
4663 }
4664 }
4665 }
4666 components.iter().map(|c| c.as_os_str()).collect()
4667}
4668
4669impl Item for Buffer {
4670 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4671 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4672 }
4673}
4674
4675#[cfg(test)]
4676mod tests {
4677 use super::{Event, *};
4678 use fs::RealFs;
4679 use futures::{future, StreamExt};
4680 use gpui::test::subscribe;
4681 use language::{
4682 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4683 ToPoint,
4684 };
4685 use lsp::Url;
4686 use serde_json::json;
4687 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4688 use unindent::Unindent as _;
4689 use util::{assert_set_eq, test::temp_tree};
4690 use worktree::WorktreeHandle as _;
4691
4692 #[gpui::test]
4693 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4694 let dir = temp_tree(json!({
4695 "root": {
4696 "apple": "",
4697 "banana": {
4698 "carrot": {
4699 "date": "",
4700 "endive": "",
4701 }
4702 },
4703 "fennel": {
4704 "grape": "",
4705 }
4706 }
4707 }));
4708
4709 let root_link_path = dir.path().join("root_link");
4710 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4711 unix::fs::symlink(
4712 &dir.path().join("root/fennel"),
4713 &dir.path().join("root/finnochio"),
4714 )
4715 .unwrap();
4716
4717 let project = Project::test(Arc::new(RealFs), cx);
4718
4719 let (tree, _) = project
4720 .update(cx, |project, cx| {
4721 project.find_or_create_local_worktree(&root_link_path, true, cx)
4722 })
4723 .await
4724 .unwrap();
4725
4726 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4727 .await;
4728 cx.read(|cx| {
4729 let tree = tree.read(cx);
4730 assert_eq!(tree.file_count(), 5);
4731 assert_eq!(
4732 tree.inode_for_path("fennel/grape"),
4733 tree.inode_for_path("finnochio/grape")
4734 );
4735 });
4736
4737 let cancel_flag = Default::default();
4738 let results = project
4739 .read_with(cx, |project, cx| {
4740 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4741 })
4742 .await;
4743 assert_eq!(
4744 results
4745 .into_iter()
4746 .map(|result| result.path)
4747 .collect::<Vec<Arc<Path>>>(),
4748 vec![
4749 PathBuf::from("banana/carrot/date").into(),
4750 PathBuf::from("banana/carrot/endive").into(),
4751 ]
4752 );
4753 }
4754
4755 #[gpui::test]
4756 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4757 cx.foreground().forbid_parking();
4758
4759 let mut rust_language = Language::new(
4760 LanguageConfig {
4761 name: "Rust".into(),
4762 path_suffixes: vec!["rs".to_string()],
4763 ..Default::default()
4764 },
4765 Some(tree_sitter_rust::language()),
4766 );
4767 let mut json_language = Language::new(
4768 LanguageConfig {
4769 name: "JSON".into(),
4770 path_suffixes: vec!["json".to_string()],
4771 ..Default::default()
4772 },
4773 None,
4774 );
4775 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4776 name: "the-rust-language-server",
4777 capabilities: lsp::ServerCapabilities {
4778 completion_provider: Some(lsp::CompletionOptions {
4779 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4780 ..Default::default()
4781 }),
4782 ..Default::default()
4783 },
4784 ..Default::default()
4785 });
4786 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4787 name: "the-json-language-server",
4788 capabilities: lsp::ServerCapabilities {
4789 completion_provider: Some(lsp::CompletionOptions {
4790 trigger_characters: Some(vec![":".to_string()]),
4791 ..Default::default()
4792 }),
4793 ..Default::default()
4794 },
4795 ..Default::default()
4796 });
4797
4798 let fs = FakeFs::new(cx.background());
4799 fs.insert_tree(
4800 "/the-root",
4801 json!({
4802 "test.rs": "const A: i32 = 1;",
4803 "test2.rs": "",
4804 "Cargo.toml": "a = 1",
4805 "package.json": "{\"a\": 1}",
4806 }),
4807 )
4808 .await;
4809
4810 let project = Project::test(fs, cx);
4811 project.update(cx, |project, _| {
4812 project.languages.add(Arc::new(rust_language));
4813 project.languages.add(Arc::new(json_language));
4814 });
4815
4816 let worktree_id = project
4817 .update(cx, |project, cx| {
4818 project.find_or_create_local_worktree("/the-root", true, cx)
4819 })
4820 .await
4821 .unwrap()
4822 .0
4823 .read_with(cx, |tree, _| tree.id());
4824
4825 // Open a buffer without an associated language server.
4826 let toml_buffer = project
4827 .update(cx, |project, cx| {
4828 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4829 })
4830 .await
4831 .unwrap();
4832
4833 // Open a buffer with an associated language server.
4834 let rust_buffer = project
4835 .update(cx, |project, cx| {
4836 project.open_buffer((worktree_id, "test.rs"), cx)
4837 })
4838 .await
4839 .unwrap();
4840
4841 // A server is started up, and it is notified about Rust files.
4842 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4843 assert_eq!(
4844 fake_rust_server
4845 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4846 .await
4847 .text_document,
4848 lsp::TextDocumentItem {
4849 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4850 version: 0,
4851 text: "const A: i32 = 1;".to_string(),
4852 language_id: Default::default()
4853 }
4854 );
4855
4856 // The buffer is configured based on the language server's capabilities.
4857 rust_buffer.read_with(cx, |buffer, _| {
4858 assert_eq!(
4859 buffer.completion_triggers(),
4860 &[".".to_string(), "::".to_string()]
4861 );
4862 });
4863 toml_buffer.read_with(cx, |buffer, _| {
4864 assert!(buffer.completion_triggers().is_empty());
4865 });
4866
4867 // Edit a buffer. The changes are reported to the language server.
4868 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4869 assert_eq!(
4870 fake_rust_server
4871 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4872 .await
4873 .text_document,
4874 lsp::VersionedTextDocumentIdentifier::new(
4875 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4876 1
4877 )
4878 );
4879
4880 // Open a third buffer with a different associated language server.
4881 let json_buffer = project
4882 .update(cx, |project, cx| {
4883 project.open_buffer((worktree_id, "package.json"), cx)
4884 })
4885 .await
4886 .unwrap();
4887
4888 // A json language server is started up and is only notified about the json buffer.
4889 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4890 assert_eq!(
4891 fake_json_server
4892 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4893 .await
4894 .text_document,
4895 lsp::TextDocumentItem {
4896 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4897 version: 0,
4898 text: "{\"a\": 1}".to_string(),
4899 language_id: Default::default()
4900 }
4901 );
4902
4903 // This buffer is configured based on the second language server's
4904 // capabilities.
4905 json_buffer.read_with(cx, |buffer, _| {
4906 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4907 });
4908
4909 // When opening another buffer whose language server is already running,
4910 // it is also configured based on the existing language server's capabilities.
4911 let rust_buffer2 = project
4912 .update(cx, |project, cx| {
4913 project.open_buffer((worktree_id, "test2.rs"), cx)
4914 })
4915 .await
4916 .unwrap();
4917 rust_buffer2.read_with(cx, |buffer, _| {
4918 assert_eq!(
4919 buffer.completion_triggers(),
4920 &[".".to_string(), "::".to_string()]
4921 );
4922 });
4923
4924 // Changes are reported only to servers matching the buffer's language.
4925 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4926 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4927 assert_eq!(
4928 fake_rust_server
4929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4930 .await
4931 .text_document,
4932 lsp::VersionedTextDocumentIdentifier::new(
4933 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4934 1
4935 )
4936 );
4937
4938 // Save notifications are reported to all servers.
4939 toml_buffer
4940 .update(cx, |buffer, cx| buffer.save(cx))
4941 .await
4942 .unwrap();
4943 assert_eq!(
4944 fake_rust_server
4945 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4946 .await
4947 .text_document,
4948 lsp::TextDocumentIdentifier::new(
4949 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4950 )
4951 );
4952 assert_eq!(
4953 fake_json_server
4954 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4955 .await
4956 .text_document,
4957 lsp::TextDocumentIdentifier::new(
4958 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4959 )
4960 );
4961
4962 // Restart language servers
4963 project.update(cx, |project, cx| {
4964 project.restart_language_servers_for_buffers(
4965 vec![rust_buffer.clone(), json_buffer.clone()],
4966 cx,
4967 );
4968 });
4969
4970 let mut rust_shutdown_requests = fake_rust_server
4971 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(()));
4972 let mut json_shutdown_requests = fake_json_server
4973 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(()));
4974 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
4975
4976 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4977 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4978
4979 // Ensure both rust documents are reopened in new rust language server without worrying about order
4980 assert_set_eq!(
4981 [
4982 fake_rust_server
4983 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4984 .await
4985 .text_document,
4986 fake_rust_server
4987 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4988 .await
4989 .text_document,
4990 ],
4991 [
4992 lsp::TextDocumentItem {
4993 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4994 version: 1,
4995 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
4996 language_id: Default::default()
4997 },
4998 lsp::TextDocumentItem {
4999 uri: lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5000 version: 1,
5001 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5002 language_id: Default::default()
5003 },
5004 ]
5005 );
5006
5007 // Ensure json document is reopened in new json language server
5008 assert_eq!(
5009 fake_json_server
5010 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5011 .await
5012 .text_document,
5013 lsp::TextDocumentItem {
5014 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5015 version: 0,
5016 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5017 language_id: Default::default()
5018 }
5019 );
5020
5021 // Close notifications are reported only to servers matching the buffer's language.
5022 cx.update(|_| drop(json_buffer));
5023 let close_message = lsp::DidCloseTextDocumentParams {
5024 text_document: lsp::TextDocumentIdentifier::new(
5025 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5026 ),
5027 };
5028 assert_eq!(
5029 fake_json_server
5030 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5031 .await,
5032 close_message,
5033 );
5034 }
5035
5036 #[gpui::test]
5037 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5038 cx.foreground().forbid_parking();
5039
5040 let progress_token = "the-progress-token";
5041 let mut language = Language::new(
5042 LanguageConfig {
5043 name: "Rust".into(),
5044 path_suffixes: vec!["rs".to_string()],
5045 ..Default::default()
5046 },
5047 Some(tree_sitter_rust::language()),
5048 );
5049 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5050 disk_based_diagnostics_progress_token: Some(progress_token),
5051 disk_based_diagnostics_sources: &["disk"],
5052 ..Default::default()
5053 });
5054
5055 let fs = FakeFs::new(cx.background());
5056 fs.insert_tree(
5057 "/dir",
5058 json!({
5059 "a.rs": "fn a() { A }",
5060 "b.rs": "const y: i32 = 1",
5061 }),
5062 )
5063 .await;
5064
5065 let project = Project::test(fs, cx);
5066 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5067
5068 let (tree, _) = project
5069 .update(cx, |project, cx| {
5070 project.find_or_create_local_worktree("/dir", true, cx)
5071 })
5072 .await
5073 .unwrap();
5074 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5075
5076 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5077 .await;
5078
5079 // Cause worktree to start the fake language server
5080 let _buffer = project
5081 .update(cx, |project, cx| {
5082 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5083 })
5084 .await
5085 .unwrap();
5086
5087 let mut events = subscribe(&project, cx);
5088
5089 let mut fake_server = fake_servers.next().await.unwrap();
5090 fake_server.start_progress(progress_token).await;
5091 assert_eq!(
5092 events.next().await.unwrap(),
5093 Event::DiskBasedDiagnosticsStarted
5094 );
5095
5096 fake_server.start_progress(progress_token).await;
5097 fake_server.end_progress(progress_token).await;
5098 fake_server.start_progress(progress_token).await;
5099
5100 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5101 lsp::PublishDiagnosticsParams {
5102 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5103 version: None,
5104 diagnostics: vec![lsp::Diagnostic {
5105 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5106 severity: Some(lsp::DiagnosticSeverity::ERROR),
5107 message: "undefined variable 'A'".to_string(),
5108 ..Default::default()
5109 }],
5110 },
5111 );
5112 assert_eq!(
5113 events.next().await.unwrap(),
5114 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5115 );
5116
5117 fake_server.end_progress(progress_token).await;
5118 fake_server.end_progress(progress_token).await;
5119 assert_eq!(
5120 events.next().await.unwrap(),
5121 Event::DiskBasedDiagnosticsUpdated
5122 );
5123 assert_eq!(
5124 events.next().await.unwrap(),
5125 Event::DiskBasedDiagnosticsFinished
5126 );
5127
5128 let buffer = project
5129 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5130 .await
5131 .unwrap();
5132
5133 buffer.read_with(cx, |buffer, _| {
5134 let snapshot = buffer.snapshot();
5135 let diagnostics = snapshot
5136 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5137 .collect::<Vec<_>>();
5138 assert_eq!(
5139 diagnostics,
5140 &[DiagnosticEntry {
5141 range: Point::new(0, 9)..Point::new(0, 10),
5142 diagnostic: Diagnostic {
5143 severity: lsp::DiagnosticSeverity::ERROR,
5144 message: "undefined variable 'A'".to_string(),
5145 group_id: 0,
5146 is_primary: true,
5147 ..Default::default()
5148 }
5149 }]
5150 )
5151 });
5152 }
5153
5154 #[gpui::test]
5155 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5156 cx.foreground().forbid_parking();
5157
5158 let mut language = Language::new(
5159 LanguageConfig {
5160 name: "Rust".into(),
5161 path_suffixes: vec!["rs".to_string()],
5162 ..Default::default()
5163 },
5164 Some(tree_sitter_rust::language()),
5165 );
5166 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5167 disk_based_diagnostics_sources: &["disk"],
5168 ..Default::default()
5169 });
5170
5171 let text = "
5172 fn a() { A }
5173 fn b() { BB }
5174 fn c() { CCC }
5175 "
5176 .unindent();
5177
5178 let fs = FakeFs::new(cx.background());
5179 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5180
5181 let project = Project::test(fs, cx);
5182 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5183
5184 let worktree_id = project
5185 .update(cx, |project, cx| {
5186 project.find_or_create_local_worktree("/dir", true, cx)
5187 })
5188 .await
5189 .unwrap()
5190 .0
5191 .read_with(cx, |tree, _| tree.id());
5192
5193 let buffer = project
5194 .update(cx, |project, cx| {
5195 project.open_buffer((worktree_id, "a.rs"), cx)
5196 })
5197 .await
5198 .unwrap();
5199
5200 let mut fake_server = fake_servers.next().await.unwrap();
5201 let open_notification = fake_server
5202 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5203 .await;
5204
5205 // Edit the buffer, moving the content down
5206 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5207 let change_notification_1 = fake_server
5208 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5209 .await;
5210 assert!(
5211 change_notification_1.text_document.version > open_notification.text_document.version
5212 );
5213
5214 // Report some diagnostics for the initial version of the buffer
5215 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5216 lsp::PublishDiagnosticsParams {
5217 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5218 version: Some(open_notification.text_document.version),
5219 diagnostics: vec![
5220 lsp::Diagnostic {
5221 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5222 severity: Some(DiagnosticSeverity::ERROR),
5223 message: "undefined variable 'A'".to_string(),
5224 source: Some("disk".to_string()),
5225 ..Default::default()
5226 },
5227 lsp::Diagnostic {
5228 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5229 severity: Some(DiagnosticSeverity::ERROR),
5230 message: "undefined variable 'BB'".to_string(),
5231 source: Some("disk".to_string()),
5232 ..Default::default()
5233 },
5234 lsp::Diagnostic {
5235 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5236 severity: Some(DiagnosticSeverity::ERROR),
5237 source: Some("disk".to_string()),
5238 message: "undefined variable 'CCC'".to_string(),
5239 ..Default::default()
5240 },
5241 ],
5242 },
5243 );
5244
5245 // The diagnostics have moved down since they were created.
5246 buffer.next_notification(cx).await;
5247 buffer.read_with(cx, |buffer, _| {
5248 assert_eq!(
5249 buffer
5250 .snapshot()
5251 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5252 .collect::<Vec<_>>(),
5253 &[
5254 DiagnosticEntry {
5255 range: Point::new(3, 9)..Point::new(3, 11),
5256 diagnostic: Diagnostic {
5257 severity: DiagnosticSeverity::ERROR,
5258 message: "undefined variable 'BB'".to_string(),
5259 is_disk_based: true,
5260 group_id: 1,
5261 is_primary: true,
5262 ..Default::default()
5263 },
5264 },
5265 DiagnosticEntry {
5266 range: Point::new(4, 9)..Point::new(4, 12),
5267 diagnostic: Diagnostic {
5268 severity: DiagnosticSeverity::ERROR,
5269 message: "undefined variable 'CCC'".to_string(),
5270 is_disk_based: true,
5271 group_id: 2,
5272 is_primary: true,
5273 ..Default::default()
5274 }
5275 }
5276 ]
5277 );
5278 assert_eq!(
5279 chunks_with_diagnostics(buffer, 0..buffer.len()),
5280 [
5281 ("\n\nfn a() { ".to_string(), None),
5282 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5283 (" }\nfn b() { ".to_string(), None),
5284 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5285 (" }\nfn c() { ".to_string(), None),
5286 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5287 (" }\n".to_string(), None),
5288 ]
5289 );
5290 assert_eq!(
5291 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5292 [
5293 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5294 (" }\nfn c() { ".to_string(), None),
5295 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5296 ]
5297 );
5298 });
5299
5300 // Ensure overlapping diagnostics are highlighted correctly.
5301 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5302 lsp::PublishDiagnosticsParams {
5303 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5304 version: Some(open_notification.text_document.version),
5305 diagnostics: vec![
5306 lsp::Diagnostic {
5307 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5308 severity: Some(DiagnosticSeverity::ERROR),
5309 message: "undefined variable 'A'".to_string(),
5310 source: Some("disk".to_string()),
5311 ..Default::default()
5312 },
5313 lsp::Diagnostic {
5314 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5315 severity: Some(DiagnosticSeverity::WARNING),
5316 message: "unreachable statement".to_string(),
5317 source: Some("disk".to_string()),
5318 ..Default::default()
5319 },
5320 ],
5321 },
5322 );
5323
5324 buffer.next_notification(cx).await;
5325 buffer.read_with(cx, |buffer, _| {
5326 assert_eq!(
5327 buffer
5328 .snapshot()
5329 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5330 .collect::<Vec<_>>(),
5331 &[
5332 DiagnosticEntry {
5333 range: Point::new(2, 9)..Point::new(2, 12),
5334 diagnostic: Diagnostic {
5335 severity: DiagnosticSeverity::WARNING,
5336 message: "unreachable statement".to_string(),
5337 is_disk_based: true,
5338 group_id: 1,
5339 is_primary: true,
5340 ..Default::default()
5341 }
5342 },
5343 DiagnosticEntry {
5344 range: Point::new(2, 9)..Point::new(2, 10),
5345 diagnostic: Diagnostic {
5346 severity: DiagnosticSeverity::ERROR,
5347 message: "undefined variable 'A'".to_string(),
5348 is_disk_based: true,
5349 group_id: 0,
5350 is_primary: true,
5351 ..Default::default()
5352 },
5353 }
5354 ]
5355 );
5356 assert_eq!(
5357 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5358 [
5359 ("fn a() { ".to_string(), None),
5360 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5361 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5362 ("\n".to_string(), None),
5363 ]
5364 );
5365 assert_eq!(
5366 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5367 [
5368 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5369 ("\n".to_string(), None),
5370 ]
5371 );
5372 });
5373
5374 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5375 // changes since the last save.
5376 buffer.update(cx, |buffer, cx| {
5377 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5378 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5379 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5380 });
5381 let change_notification_2 = fake_server
5382 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5383 .await;
5384 assert!(
5385 change_notification_2.text_document.version
5386 > change_notification_1.text_document.version
5387 );
5388
5389 // Handle out-of-order diagnostics
5390 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5391 lsp::PublishDiagnosticsParams {
5392 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5393 version: Some(change_notification_2.text_document.version),
5394 diagnostics: vec![
5395 lsp::Diagnostic {
5396 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5397 severity: Some(DiagnosticSeverity::ERROR),
5398 message: "undefined variable 'BB'".to_string(),
5399 source: Some("disk".to_string()),
5400 ..Default::default()
5401 },
5402 lsp::Diagnostic {
5403 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5404 severity: Some(DiagnosticSeverity::WARNING),
5405 message: "undefined variable 'A'".to_string(),
5406 source: Some("disk".to_string()),
5407 ..Default::default()
5408 },
5409 ],
5410 },
5411 );
5412
5413 buffer.next_notification(cx).await;
5414 buffer.read_with(cx, |buffer, _| {
5415 assert_eq!(
5416 buffer
5417 .snapshot()
5418 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5419 .collect::<Vec<_>>(),
5420 &[
5421 DiagnosticEntry {
5422 range: Point::new(2, 21)..Point::new(2, 22),
5423 diagnostic: Diagnostic {
5424 severity: DiagnosticSeverity::WARNING,
5425 message: "undefined variable 'A'".to_string(),
5426 is_disk_based: true,
5427 group_id: 1,
5428 is_primary: true,
5429 ..Default::default()
5430 }
5431 },
5432 DiagnosticEntry {
5433 range: Point::new(3, 9)..Point::new(3, 14),
5434 diagnostic: Diagnostic {
5435 severity: DiagnosticSeverity::ERROR,
5436 message: "undefined variable 'BB'".to_string(),
5437 is_disk_based: true,
5438 group_id: 0,
5439 is_primary: true,
5440 ..Default::default()
5441 },
5442 }
5443 ]
5444 );
5445 });
5446 }
5447
5448 #[gpui::test]
5449 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5450 cx.foreground().forbid_parking();
5451
5452 let text = concat!(
5453 "let one = ;\n", //
5454 "let two = \n",
5455 "let three = 3;\n",
5456 );
5457
5458 let fs = FakeFs::new(cx.background());
5459 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5460
5461 let project = Project::test(fs, cx);
5462 let worktree_id = project
5463 .update(cx, |project, cx| {
5464 project.find_or_create_local_worktree("/dir", true, cx)
5465 })
5466 .await
5467 .unwrap()
5468 .0
5469 .read_with(cx, |tree, _| tree.id());
5470
5471 let buffer = project
5472 .update(cx, |project, cx| {
5473 project.open_buffer((worktree_id, "a.rs"), cx)
5474 })
5475 .await
5476 .unwrap();
5477
5478 project.update(cx, |project, cx| {
5479 project
5480 .update_buffer_diagnostics(
5481 &buffer,
5482 vec![
5483 DiagnosticEntry {
5484 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5485 diagnostic: Diagnostic {
5486 severity: DiagnosticSeverity::ERROR,
5487 message: "syntax error 1".to_string(),
5488 ..Default::default()
5489 },
5490 },
5491 DiagnosticEntry {
5492 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5493 diagnostic: Diagnostic {
5494 severity: DiagnosticSeverity::ERROR,
5495 message: "syntax error 2".to_string(),
5496 ..Default::default()
5497 },
5498 },
5499 ],
5500 None,
5501 cx,
5502 )
5503 .unwrap();
5504 });
5505
5506 // An empty range is extended forward to include the following character.
5507 // At the end of a line, an empty range is extended backward to include
5508 // the preceding character.
5509 buffer.read_with(cx, |buffer, _| {
5510 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5511 assert_eq!(
5512 chunks
5513 .iter()
5514 .map(|(s, d)| (s.as_str(), *d))
5515 .collect::<Vec<_>>(),
5516 &[
5517 ("let one = ", None),
5518 (";", Some(DiagnosticSeverity::ERROR)),
5519 ("\nlet two =", None),
5520 (" ", Some(DiagnosticSeverity::ERROR)),
5521 ("\nlet three = 3;\n", None)
5522 ]
5523 );
5524 });
5525 }
5526
5527 #[gpui::test]
5528 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5529 cx.foreground().forbid_parking();
5530
5531 let mut language = Language::new(
5532 LanguageConfig {
5533 name: "Rust".into(),
5534 path_suffixes: vec!["rs".to_string()],
5535 ..Default::default()
5536 },
5537 Some(tree_sitter_rust::language()),
5538 );
5539 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5540
5541 let text = "
5542 fn a() {
5543 f1();
5544 }
5545 fn b() {
5546 f2();
5547 }
5548 fn c() {
5549 f3();
5550 }
5551 "
5552 .unindent();
5553
5554 let fs = FakeFs::new(cx.background());
5555 fs.insert_tree(
5556 "/dir",
5557 json!({
5558 "a.rs": text.clone(),
5559 }),
5560 )
5561 .await;
5562
5563 let project = Project::test(fs, cx);
5564 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5565
5566 let worktree_id = project
5567 .update(cx, |project, cx| {
5568 project.find_or_create_local_worktree("/dir", true, cx)
5569 })
5570 .await
5571 .unwrap()
5572 .0
5573 .read_with(cx, |tree, _| tree.id());
5574
5575 let buffer = project
5576 .update(cx, |project, cx| {
5577 project.open_buffer((worktree_id, "a.rs"), cx)
5578 })
5579 .await
5580 .unwrap();
5581
5582 let mut fake_server = fake_servers.next().await.unwrap();
5583 let lsp_document_version = fake_server
5584 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5585 .await
5586 .text_document
5587 .version;
5588
5589 // Simulate editing the buffer after the language server computes some edits.
5590 buffer.update(cx, |buffer, cx| {
5591 buffer.edit(
5592 [Point::new(0, 0)..Point::new(0, 0)],
5593 "// above first function\n",
5594 cx,
5595 );
5596 buffer.edit(
5597 [Point::new(2, 0)..Point::new(2, 0)],
5598 " // inside first function\n",
5599 cx,
5600 );
5601 buffer.edit(
5602 [Point::new(6, 4)..Point::new(6, 4)],
5603 "// inside second function ",
5604 cx,
5605 );
5606
5607 assert_eq!(
5608 buffer.text(),
5609 "
5610 // above first function
5611 fn a() {
5612 // inside first function
5613 f1();
5614 }
5615 fn b() {
5616 // inside second function f2();
5617 }
5618 fn c() {
5619 f3();
5620 }
5621 "
5622 .unindent()
5623 );
5624 });
5625
5626 let edits = project
5627 .update(cx, |project, cx| {
5628 project.edits_from_lsp(
5629 &buffer,
5630 vec![
5631 // replace body of first function
5632 lsp::TextEdit {
5633 range: lsp::Range::new(
5634 lsp::Position::new(0, 0),
5635 lsp::Position::new(3, 0),
5636 ),
5637 new_text: "
5638 fn a() {
5639 f10();
5640 }
5641 "
5642 .unindent(),
5643 },
5644 // edit inside second function
5645 lsp::TextEdit {
5646 range: lsp::Range::new(
5647 lsp::Position::new(4, 6),
5648 lsp::Position::new(4, 6),
5649 ),
5650 new_text: "00".into(),
5651 },
5652 // edit inside third function via two distinct edits
5653 lsp::TextEdit {
5654 range: lsp::Range::new(
5655 lsp::Position::new(7, 5),
5656 lsp::Position::new(7, 5),
5657 ),
5658 new_text: "4000".into(),
5659 },
5660 lsp::TextEdit {
5661 range: lsp::Range::new(
5662 lsp::Position::new(7, 5),
5663 lsp::Position::new(7, 6),
5664 ),
5665 new_text: "".into(),
5666 },
5667 ],
5668 Some(lsp_document_version),
5669 cx,
5670 )
5671 })
5672 .await
5673 .unwrap();
5674
5675 buffer.update(cx, |buffer, cx| {
5676 for (range, new_text) in edits {
5677 buffer.edit([range], new_text, cx);
5678 }
5679 assert_eq!(
5680 buffer.text(),
5681 "
5682 // above first function
5683 fn a() {
5684 // inside first function
5685 f10();
5686 }
5687 fn b() {
5688 // inside second function f200();
5689 }
5690 fn c() {
5691 f4000();
5692 }
5693 "
5694 .unindent()
5695 );
5696 });
5697 }
5698
5699 #[gpui::test]
5700 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5701 cx.foreground().forbid_parking();
5702
5703 let text = "
5704 use a::b;
5705 use a::c;
5706
5707 fn f() {
5708 b();
5709 c();
5710 }
5711 "
5712 .unindent();
5713
5714 let fs = FakeFs::new(cx.background());
5715 fs.insert_tree(
5716 "/dir",
5717 json!({
5718 "a.rs": text.clone(),
5719 }),
5720 )
5721 .await;
5722
5723 let project = Project::test(fs, cx);
5724 let worktree_id = project
5725 .update(cx, |project, cx| {
5726 project.find_or_create_local_worktree("/dir", true, cx)
5727 })
5728 .await
5729 .unwrap()
5730 .0
5731 .read_with(cx, |tree, _| tree.id());
5732
5733 let buffer = project
5734 .update(cx, |project, cx| {
5735 project.open_buffer((worktree_id, "a.rs"), cx)
5736 })
5737 .await
5738 .unwrap();
5739
5740 // Simulate the language server sending us a small edit in the form of a very large diff.
5741 // Rust-analyzer does this when performing a merge-imports code action.
5742 let edits = project
5743 .update(cx, |project, cx| {
5744 project.edits_from_lsp(
5745 &buffer,
5746 [
5747 // Replace the first use statement without editing the semicolon.
5748 lsp::TextEdit {
5749 range: lsp::Range::new(
5750 lsp::Position::new(0, 4),
5751 lsp::Position::new(0, 8),
5752 ),
5753 new_text: "a::{b, c}".into(),
5754 },
5755 // Reinsert the remainder of the file between the semicolon and the final
5756 // newline of the file.
5757 lsp::TextEdit {
5758 range: lsp::Range::new(
5759 lsp::Position::new(0, 9),
5760 lsp::Position::new(0, 9),
5761 ),
5762 new_text: "\n\n".into(),
5763 },
5764 lsp::TextEdit {
5765 range: lsp::Range::new(
5766 lsp::Position::new(0, 9),
5767 lsp::Position::new(0, 9),
5768 ),
5769 new_text: "
5770 fn f() {
5771 b();
5772 c();
5773 }"
5774 .unindent(),
5775 },
5776 // Delete everything after the first newline of the file.
5777 lsp::TextEdit {
5778 range: lsp::Range::new(
5779 lsp::Position::new(1, 0),
5780 lsp::Position::new(7, 0),
5781 ),
5782 new_text: "".into(),
5783 },
5784 ],
5785 None,
5786 cx,
5787 )
5788 })
5789 .await
5790 .unwrap();
5791
5792 buffer.update(cx, |buffer, cx| {
5793 let edits = edits
5794 .into_iter()
5795 .map(|(range, text)| {
5796 (
5797 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5798 text,
5799 )
5800 })
5801 .collect::<Vec<_>>();
5802
5803 assert_eq!(
5804 edits,
5805 [
5806 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5807 (Point::new(1, 0)..Point::new(2, 0), "".into())
5808 ]
5809 );
5810
5811 for (range, new_text) in edits {
5812 buffer.edit([range], new_text, cx);
5813 }
5814 assert_eq!(
5815 buffer.text(),
5816 "
5817 use a::{b, c};
5818
5819 fn f() {
5820 b();
5821 c();
5822 }
5823 "
5824 .unindent()
5825 );
5826 });
5827 }
5828
5829 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5830 buffer: &Buffer,
5831 range: Range<T>,
5832 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5833 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5834 for chunk in buffer.snapshot().chunks(range, true) {
5835 if chunks.last().map_or(false, |prev_chunk| {
5836 prev_chunk.1 == chunk.diagnostic_severity
5837 }) {
5838 chunks.last_mut().unwrap().0.push_str(chunk.text);
5839 } else {
5840 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5841 }
5842 }
5843 chunks
5844 }
5845
5846 #[gpui::test]
5847 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5848 let dir = temp_tree(json!({
5849 "root": {
5850 "dir1": {},
5851 "dir2": {
5852 "dir3": {}
5853 }
5854 }
5855 }));
5856
5857 let project = Project::test(Arc::new(RealFs), cx);
5858 let (tree, _) = project
5859 .update(cx, |project, cx| {
5860 project.find_or_create_local_worktree(&dir.path(), true, cx)
5861 })
5862 .await
5863 .unwrap();
5864
5865 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5866 .await;
5867
5868 let cancel_flag = Default::default();
5869 let results = project
5870 .read_with(cx, |project, cx| {
5871 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5872 })
5873 .await;
5874
5875 assert!(results.is_empty());
5876 }
5877
5878 #[gpui::test]
5879 async fn test_definition(cx: &mut gpui::TestAppContext) {
5880 let mut language = Language::new(
5881 LanguageConfig {
5882 name: "Rust".into(),
5883 path_suffixes: vec!["rs".to_string()],
5884 ..Default::default()
5885 },
5886 Some(tree_sitter_rust::language()),
5887 );
5888 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5889
5890 let fs = FakeFs::new(cx.background());
5891 fs.insert_tree(
5892 "/dir",
5893 json!({
5894 "a.rs": "const fn a() { A }",
5895 "b.rs": "const y: i32 = crate::a()",
5896 }),
5897 )
5898 .await;
5899
5900 let project = Project::test(fs, cx);
5901 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5902
5903 let (tree, _) = project
5904 .update(cx, |project, cx| {
5905 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5906 })
5907 .await
5908 .unwrap();
5909 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5910 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5911 .await;
5912
5913 let buffer = project
5914 .update(cx, |project, cx| {
5915 project.open_buffer(
5916 ProjectPath {
5917 worktree_id,
5918 path: Path::new("").into(),
5919 },
5920 cx,
5921 )
5922 })
5923 .await
5924 .unwrap();
5925
5926 let mut fake_server = fake_servers.next().await.unwrap();
5927 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
5928 let params = params.text_document_position_params;
5929 assert_eq!(
5930 params.text_document.uri.to_file_path().unwrap(),
5931 Path::new("/dir/b.rs"),
5932 );
5933 assert_eq!(params.position, lsp::Position::new(0, 22));
5934
5935 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5936 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5937 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5938 )))
5939 });
5940
5941 let mut definitions = project
5942 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5943 .await
5944 .unwrap();
5945
5946 assert_eq!(definitions.len(), 1);
5947 let definition = definitions.pop().unwrap();
5948 cx.update(|cx| {
5949 let target_buffer = definition.buffer.read(cx);
5950 assert_eq!(
5951 target_buffer
5952 .file()
5953 .unwrap()
5954 .as_local()
5955 .unwrap()
5956 .abs_path(cx),
5957 Path::new("/dir/a.rs"),
5958 );
5959 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5960 assert_eq!(
5961 list_worktrees(&project, cx),
5962 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5963 );
5964
5965 drop(definition);
5966 });
5967 cx.read(|cx| {
5968 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5969 });
5970
5971 fn list_worktrees<'a>(
5972 project: &'a ModelHandle<Project>,
5973 cx: &'a AppContext,
5974 ) -> Vec<(&'a Path, bool)> {
5975 project
5976 .read(cx)
5977 .worktrees(cx)
5978 .map(|worktree| {
5979 let worktree = worktree.read(cx);
5980 (
5981 worktree.as_local().unwrap().abs_path().as_ref(),
5982 worktree.is_visible(),
5983 )
5984 })
5985 .collect::<Vec<_>>()
5986 }
5987 }
5988
5989 #[gpui::test]
5990 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5991 let fs = FakeFs::new(cx.background());
5992 fs.insert_tree(
5993 "/dir",
5994 json!({
5995 "file1": "the old contents",
5996 }),
5997 )
5998 .await;
5999
6000 let project = Project::test(fs.clone(), cx);
6001 let worktree_id = project
6002 .update(cx, |p, cx| {
6003 p.find_or_create_local_worktree("/dir", true, cx)
6004 })
6005 .await
6006 .unwrap()
6007 .0
6008 .read_with(cx, |tree, _| tree.id());
6009
6010 let buffer = project
6011 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6012 .await
6013 .unwrap();
6014 buffer
6015 .update(cx, |buffer, cx| {
6016 assert_eq!(buffer.text(), "the old contents");
6017 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6018 buffer.save(cx)
6019 })
6020 .await
6021 .unwrap();
6022
6023 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6024 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6025 }
6026
6027 #[gpui::test]
6028 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6029 let fs = FakeFs::new(cx.background());
6030 fs.insert_tree(
6031 "/dir",
6032 json!({
6033 "file1": "the old contents",
6034 }),
6035 )
6036 .await;
6037
6038 let project = Project::test(fs.clone(), cx);
6039 let worktree_id = project
6040 .update(cx, |p, cx| {
6041 p.find_or_create_local_worktree("/dir/file1", true, cx)
6042 })
6043 .await
6044 .unwrap()
6045 .0
6046 .read_with(cx, |tree, _| tree.id());
6047
6048 let buffer = project
6049 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6050 .await
6051 .unwrap();
6052 buffer
6053 .update(cx, |buffer, cx| {
6054 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6055 buffer.save(cx)
6056 })
6057 .await
6058 .unwrap();
6059
6060 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6061 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6062 }
6063
6064 #[gpui::test]
6065 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6066 let fs = FakeFs::new(cx.background());
6067 fs.insert_tree("/dir", json!({})).await;
6068
6069 let project = Project::test(fs.clone(), cx);
6070 let (worktree, _) = project
6071 .update(cx, |project, cx| {
6072 project.find_or_create_local_worktree("/dir", true, cx)
6073 })
6074 .await
6075 .unwrap();
6076 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6077
6078 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6079 buffer.update(cx, |buffer, cx| {
6080 buffer.edit([0..0], "abc", cx);
6081 assert!(buffer.is_dirty());
6082 assert!(!buffer.has_conflict());
6083 });
6084 project
6085 .update(cx, |project, cx| {
6086 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6087 })
6088 .await
6089 .unwrap();
6090 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6091 buffer.read_with(cx, |buffer, cx| {
6092 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6093 assert!(!buffer.is_dirty());
6094 assert!(!buffer.has_conflict());
6095 });
6096
6097 let opened_buffer = project
6098 .update(cx, |project, cx| {
6099 project.open_buffer((worktree_id, "file1"), cx)
6100 })
6101 .await
6102 .unwrap();
6103 assert_eq!(opened_buffer, buffer);
6104 }
6105
6106 #[gpui::test(retries = 5)]
6107 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6108 let dir = temp_tree(json!({
6109 "a": {
6110 "file1": "",
6111 "file2": "",
6112 "file3": "",
6113 },
6114 "b": {
6115 "c": {
6116 "file4": "",
6117 "file5": "",
6118 }
6119 }
6120 }));
6121
6122 let project = Project::test(Arc::new(RealFs), cx);
6123 let rpc = project.read_with(cx, |p, _| p.client.clone());
6124
6125 let (tree, _) = project
6126 .update(cx, |p, cx| {
6127 p.find_or_create_local_worktree(dir.path(), true, cx)
6128 })
6129 .await
6130 .unwrap();
6131 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6132
6133 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6134 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6135 async move { buffer.await.unwrap() }
6136 };
6137 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6138 tree.read_with(cx, |tree, _| {
6139 tree.entry_for_path(path)
6140 .expect(&format!("no entry for path {}", path))
6141 .id
6142 })
6143 };
6144
6145 let buffer2 = buffer_for_path("a/file2", cx).await;
6146 let buffer3 = buffer_for_path("a/file3", cx).await;
6147 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6148 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6149
6150 let file2_id = id_for_path("a/file2", &cx);
6151 let file3_id = id_for_path("a/file3", &cx);
6152 let file4_id = id_for_path("b/c/file4", &cx);
6153
6154 // Wait for the initial scan.
6155 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6156 .await;
6157
6158 // Create a remote copy of this worktree.
6159 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6160 let (remote, load_task) = cx.update(|cx| {
6161 Worktree::remote(
6162 1,
6163 1,
6164 initial_snapshot.to_proto(&Default::default(), true),
6165 rpc.clone(),
6166 cx,
6167 )
6168 });
6169 load_task.await;
6170
6171 cx.read(|cx| {
6172 assert!(!buffer2.read(cx).is_dirty());
6173 assert!(!buffer3.read(cx).is_dirty());
6174 assert!(!buffer4.read(cx).is_dirty());
6175 assert!(!buffer5.read(cx).is_dirty());
6176 });
6177
6178 // Rename and delete files and directories.
6179 tree.flush_fs_events(&cx).await;
6180 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6181 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6182 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6183 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6184 tree.flush_fs_events(&cx).await;
6185
6186 let expected_paths = vec![
6187 "a",
6188 "a/file1",
6189 "a/file2.new",
6190 "b",
6191 "d",
6192 "d/file3",
6193 "d/file4",
6194 ];
6195
6196 cx.read(|app| {
6197 assert_eq!(
6198 tree.read(app)
6199 .paths()
6200 .map(|p| p.to_str().unwrap())
6201 .collect::<Vec<_>>(),
6202 expected_paths
6203 );
6204
6205 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6206 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6207 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6208
6209 assert_eq!(
6210 buffer2.read(app).file().unwrap().path().as_ref(),
6211 Path::new("a/file2.new")
6212 );
6213 assert_eq!(
6214 buffer3.read(app).file().unwrap().path().as_ref(),
6215 Path::new("d/file3")
6216 );
6217 assert_eq!(
6218 buffer4.read(app).file().unwrap().path().as_ref(),
6219 Path::new("d/file4")
6220 );
6221 assert_eq!(
6222 buffer5.read(app).file().unwrap().path().as_ref(),
6223 Path::new("b/c/file5")
6224 );
6225
6226 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6227 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6228 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6229 assert!(buffer5.read(app).file().unwrap().is_deleted());
6230 });
6231
6232 // Update the remote worktree. Check that it becomes consistent with the
6233 // local worktree.
6234 remote.update(cx, |remote, cx| {
6235 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6236 &initial_snapshot,
6237 1,
6238 1,
6239 true,
6240 );
6241 remote
6242 .as_remote_mut()
6243 .unwrap()
6244 .snapshot
6245 .apply_remote_update(update_message)
6246 .unwrap();
6247
6248 assert_eq!(
6249 remote
6250 .paths()
6251 .map(|p| p.to_str().unwrap())
6252 .collect::<Vec<_>>(),
6253 expected_paths
6254 );
6255 });
6256 }
6257
6258 #[gpui::test]
6259 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6260 let fs = FakeFs::new(cx.background());
6261 fs.insert_tree(
6262 "/the-dir",
6263 json!({
6264 "a.txt": "a-contents",
6265 "b.txt": "b-contents",
6266 }),
6267 )
6268 .await;
6269
6270 let project = Project::test(fs.clone(), cx);
6271 let worktree_id = project
6272 .update(cx, |p, cx| {
6273 p.find_or_create_local_worktree("/the-dir", true, cx)
6274 })
6275 .await
6276 .unwrap()
6277 .0
6278 .read_with(cx, |tree, _| tree.id());
6279
6280 // Spawn multiple tasks to open paths, repeating some paths.
6281 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6282 (
6283 p.open_buffer((worktree_id, "a.txt"), cx),
6284 p.open_buffer((worktree_id, "b.txt"), cx),
6285 p.open_buffer((worktree_id, "a.txt"), cx),
6286 )
6287 });
6288
6289 let buffer_a_1 = buffer_a_1.await.unwrap();
6290 let buffer_a_2 = buffer_a_2.await.unwrap();
6291 let buffer_b = buffer_b.await.unwrap();
6292 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6293 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6294
6295 // There is only one buffer per path.
6296 let buffer_a_id = buffer_a_1.id();
6297 assert_eq!(buffer_a_2.id(), buffer_a_id);
6298
6299 // Open the same path again while it is still open.
6300 drop(buffer_a_1);
6301 let buffer_a_3 = project
6302 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6303 .await
6304 .unwrap();
6305
6306 // There's still only one buffer per path.
6307 assert_eq!(buffer_a_3.id(), buffer_a_id);
6308 }
6309
6310 #[gpui::test]
6311 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6312 use std::fs;
6313
6314 let dir = temp_tree(json!({
6315 "file1": "abc",
6316 "file2": "def",
6317 "file3": "ghi",
6318 }));
6319
6320 let project = Project::test(Arc::new(RealFs), cx);
6321 let (worktree, _) = project
6322 .update(cx, |p, cx| {
6323 p.find_or_create_local_worktree(dir.path(), true, cx)
6324 })
6325 .await
6326 .unwrap();
6327 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6328
6329 worktree.flush_fs_events(&cx).await;
6330 worktree
6331 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6332 .await;
6333
6334 let buffer1 = project
6335 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6336 .await
6337 .unwrap();
6338 let events = Rc::new(RefCell::new(Vec::new()));
6339
6340 // initially, the buffer isn't dirty.
6341 buffer1.update(cx, |buffer, cx| {
6342 cx.subscribe(&buffer1, {
6343 let events = events.clone();
6344 move |_, _, event, _| match event {
6345 BufferEvent::Operation(_) => {}
6346 _ => events.borrow_mut().push(event.clone()),
6347 }
6348 })
6349 .detach();
6350
6351 assert!(!buffer.is_dirty());
6352 assert!(events.borrow().is_empty());
6353
6354 buffer.edit(vec![1..2], "", cx);
6355 });
6356
6357 // after the first edit, the buffer is dirty, and emits a dirtied event.
6358 buffer1.update(cx, |buffer, cx| {
6359 assert!(buffer.text() == "ac");
6360 assert!(buffer.is_dirty());
6361 assert_eq!(
6362 *events.borrow(),
6363 &[language::Event::Edited, language::Event::Dirtied]
6364 );
6365 events.borrow_mut().clear();
6366 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6367 });
6368
6369 // after saving, the buffer is not dirty, and emits a saved event.
6370 buffer1.update(cx, |buffer, cx| {
6371 assert!(!buffer.is_dirty());
6372 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6373 events.borrow_mut().clear();
6374
6375 buffer.edit(vec![1..1], "B", cx);
6376 buffer.edit(vec![2..2], "D", cx);
6377 });
6378
6379 // after editing again, the buffer is dirty, and emits another dirty event.
6380 buffer1.update(cx, |buffer, cx| {
6381 assert!(buffer.text() == "aBDc");
6382 assert!(buffer.is_dirty());
6383 assert_eq!(
6384 *events.borrow(),
6385 &[
6386 language::Event::Edited,
6387 language::Event::Dirtied,
6388 language::Event::Edited,
6389 ],
6390 );
6391 events.borrow_mut().clear();
6392
6393 // TODO - currently, after restoring the buffer to its
6394 // previously-saved state, the is still considered dirty.
6395 buffer.edit([1..3], "", cx);
6396 assert!(buffer.text() == "ac");
6397 assert!(buffer.is_dirty());
6398 });
6399
6400 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6401
6402 // When a file is deleted, the buffer is considered dirty.
6403 let events = Rc::new(RefCell::new(Vec::new()));
6404 let buffer2 = project
6405 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6406 .await
6407 .unwrap();
6408 buffer2.update(cx, |_, cx| {
6409 cx.subscribe(&buffer2, {
6410 let events = events.clone();
6411 move |_, _, event, _| events.borrow_mut().push(event.clone())
6412 })
6413 .detach();
6414 });
6415
6416 fs::remove_file(dir.path().join("file2")).unwrap();
6417 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6418 assert_eq!(
6419 *events.borrow(),
6420 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6421 );
6422
6423 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6424 let events = Rc::new(RefCell::new(Vec::new()));
6425 let buffer3 = project
6426 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6427 .await
6428 .unwrap();
6429 buffer3.update(cx, |_, cx| {
6430 cx.subscribe(&buffer3, {
6431 let events = events.clone();
6432 move |_, _, event, _| events.borrow_mut().push(event.clone())
6433 })
6434 .detach();
6435 });
6436
6437 worktree.flush_fs_events(&cx).await;
6438 buffer3.update(cx, |buffer, cx| {
6439 buffer.edit(Some(0..0), "x", cx);
6440 });
6441 events.borrow_mut().clear();
6442 fs::remove_file(dir.path().join("file3")).unwrap();
6443 buffer3
6444 .condition(&cx, |_, _| !events.borrow().is_empty())
6445 .await;
6446 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6447 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6448 }
6449
6450 #[gpui::test]
6451 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6452 use std::fs;
6453
6454 let initial_contents = "aaa\nbbbbb\nc\n";
6455 let dir = temp_tree(json!({ "the-file": initial_contents }));
6456
6457 let project = Project::test(Arc::new(RealFs), cx);
6458 let (worktree, _) = project
6459 .update(cx, |p, cx| {
6460 p.find_or_create_local_worktree(dir.path(), true, cx)
6461 })
6462 .await
6463 .unwrap();
6464 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6465
6466 worktree
6467 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6468 .await;
6469
6470 let abs_path = dir.path().join("the-file");
6471 let buffer = project
6472 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6473 .await
6474 .unwrap();
6475
6476 // TODO
6477 // Add a cursor on each row.
6478 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6479 // assert!(!buffer.is_dirty());
6480 // buffer.add_selection_set(
6481 // &(0..3)
6482 // .map(|row| Selection {
6483 // id: row as usize,
6484 // start: Point::new(row, 1),
6485 // end: Point::new(row, 1),
6486 // reversed: false,
6487 // goal: SelectionGoal::None,
6488 // })
6489 // .collect::<Vec<_>>(),
6490 // cx,
6491 // )
6492 // });
6493
6494 // Change the file on disk, adding two new lines of text, and removing
6495 // one line.
6496 buffer.read_with(cx, |buffer, _| {
6497 assert!(!buffer.is_dirty());
6498 assert!(!buffer.has_conflict());
6499 });
6500 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6501 fs::write(&abs_path, new_contents).unwrap();
6502
6503 // Because the buffer was not modified, it is reloaded from disk. Its
6504 // contents are edited according to the diff between the old and new
6505 // file contents.
6506 buffer
6507 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6508 .await;
6509
6510 buffer.update(cx, |buffer, _| {
6511 assert_eq!(buffer.text(), new_contents);
6512 assert!(!buffer.is_dirty());
6513 assert!(!buffer.has_conflict());
6514
6515 // TODO
6516 // let cursor_positions = buffer
6517 // .selection_set(selection_set_id)
6518 // .unwrap()
6519 // .selections::<Point>(&*buffer)
6520 // .map(|selection| {
6521 // assert_eq!(selection.start, selection.end);
6522 // selection.start
6523 // })
6524 // .collect::<Vec<_>>();
6525 // assert_eq!(
6526 // cursor_positions,
6527 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6528 // );
6529 });
6530
6531 // Modify the buffer
6532 buffer.update(cx, |buffer, cx| {
6533 buffer.edit(vec![0..0], " ", cx);
6534 assert!(buffer.is_dirty());
6535 assert!(!buffer.has_conflict());
6536 });
6537
6538 // Change the file on disk again, adding blank lines to the beginning.
6539 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6540
6541 // Because the buffer is modified, it doesn't reload from disk, but is
6542 // marked as having a conflict.
6543 buffer
6544 .condition(&cx, |buffer, _| buffer.has_conflict())
6545 .await;
6546 }
6547
6548 #[gpui::test]
6549 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6550 cx.foreground().forbid_parking();
6551
6552 let fs = FakeFs::new(cx.background());
6553 fs.insert_tree(
6554 "/the-dir",
6555 json!({
6556 "a.rs": "
6557 fn foo(mut v: Vec<usize>) {
6558 for x in &v {
6559 v.push(1);
6560 }
6561 }
6562 "
6563 .unindent(),
6564 }),
6565 )
6566 .await;
6567
6568 let project = Project::test(fs.clone(), cx);
6569 let (worktree, _) = project
6570 .update(cx, |p, cx| {
6571 p.find_or_create_local_worktree("/the-dir", true, cx)
6572 })
6573 .await
6574 .unwrap();
6575 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6576
6577 let buffer = project
6578 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6579 .await
6580 .unwrap();
6581
6582 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6583 let message = lsp::PublishDiagnosticsParams {
6584 uri: buffer_uri.clone(),
6585 diagnostics: vec![
6586 lsp::Diagnostic {
6587 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6588 severity: Some(DiagnosticSeverity::WARNING),
6589 message: "error 1".to_string(),
6590 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6591 location: lsp::Location {
6592 uri: buffer_uri.clone(),
6593 range: lsp::Range::new(
6594 lsp::Position::new(1, 8),
6595 lsp::Position::new(1, 9),
6596 ),
6597 },
6598 message: "error 1 hint 1".to_string(),
6599 }]),
6600 ..Default::default()
6601 },
6602 lsp::Diagnostic {
6603 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6604 severity: Some(DiagnosticSeverity::HINT),
6605 message: "error 1 hint 1".to_string(),
6606 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6607 location: lsp::Location {
6608 uri: buffer_uri.clone(),
6609 range: lsp::Range::new(
6610 lsp::Position::new(1, 8),
6611 lsp::Position::new(1, 9),
6612 ),
6613 },
6614 message: "original diagnostic".to_string(),
6615 }]),
6616 ..Default::default()
6617 },
6618 lsp::Diagnostic {
6619 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6620 severity: Some(DiagnosticSeverity::ERROR),
6621 message: "error 2".to_string(),
6622 related_information: Some(vec![
6623 lsp::DiagnosticRelatedInformation {
6624 location: lsp::Location {
6625 uri: buffer_uri.clone(),
6626 range: lsp::Range::new(
6627 lsp::Position::new(1, 13),
6628 lsp::Position::new(1, 15),
6629 ),
6630 },
6631 message: "error 2 hint 1".to_string(),
6632 },
6633 lsp::DiagnosticRelatedInformation {
6634 location: lsp::Location {
6635 uri: buffer_uri.clone(),
6636 range: lsp::Range::new(
6637 lsp::Position::new(1, 13),
6638 lsp::Position::new(1, 15),
6639 ),
6640 },
6641 message: "error 2 hint 2".to_string(),
6642 },
6643 ]),
6644 ..Default::default()
6645 },
6646 lsp::Diagnostic {
6647 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6648 severity: Some(DiagnosticSeverity::HINT),
6649 message: "error 2 hint 1".to_string(),
6650 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6651 location: lsp::Location {
6652 uri: buffer_uri.clone(),
6653 range: lsp::Range::new(
6654 lsp::Position::new(2, 8),
6655 lsp::Position::new(2, 17),
6656 ),
6657 },
6658 message: "original diagnostic".to_string(),
6659 }]),
6660 ..Default::default()
6661 },
6662 lsp::Diagnostic {
6663 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6664 severity: Some(DiagnosticSeverity::HINT),
6665 message: "error 2 hint 2".to_string(),
6666 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6667 location: lsp::Location {
6668 uri: buffer_uri.clone(),
6669 range: lsp::Range::new(
6670 lsp::Position::new(2, 8),
6671 lsp::Position::new(2, 17),
6672 ),
6673 },
6674 message: "original diagnostic".to_string(),
6675 }]),
6676 ..Default::default()
6677 },
6678 ],
6679 version: None,
6680 };
6681
6682 project
6683 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
6684 .unwrap();
6685 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6686
6687 assert_eq!(
6688 buffer
6689 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6690 .collect::<Vec<_>>(),
6691 &[
6692 DiagnosticEntry {
6693 range: Point::new(1, 8)..Point::new(1, 9),
6694 diagnostic: Diagnostic {
6695 severity: DiagnosticSeverity::WARNING,
6696 message: "error 1".to_string(),
6697 group_id: 0,
6698 is_primary: true,
6699 ..Default::default()
6700 }
6701 },
6702 DiagnosticEntry {
6703 range: Point::new(1, 8)..Point::new(1, 9),
6704 diagnostic: Diagnostic {
6705 severity: DiagnosticSeverity::HINT,
6706 message: "error 1 hint 1".to_string(),
6707 group_id: 0,
6708 is_primary: false,
6709 ..Default::default()
6710 }
6711 },
6712 DiagnosticEntry {
6713 range: Point::new(1, 13)..Point::new(1, 15),
6714 diagnostic: Diagnostic {
6715 severity: DiagnosticSeverity::HINT,
6716 message: "error 2 hint 1".to_string(),
6717 group_id: 1,
6718 is_primary: false,
6719 ..Default::default()
6720 }
6721 },
6722 DiagnosticEntry {
6723 range: Point::new(1, 13)..Point::new(1, 15),
6724 diagnostic: Diagnostic {
6725 severity: DiagnosticSeverity::HINT,
6726 message: "error 2 hint 2".to_string(),
6727 group_id: 1,
6728 is_primary: false,
6729 ..Default::default()
6730 }
6731 },
6732 DiagnosticEntry {
6733 range: Point::new(2, 8)..Point::new(2, 17),
6734 diagnostic: Diagnostic {
6735 severity: DiagnosticSeverity::ERROR,
6736 message: "error 2".to_string(),
6737 group_id: 1,
6738 is_primary: true,
6739 ..Default::default()
6740 }
6741 }
6742 ]
6743 );
6744
6745 assert_eq!(
6746 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6747 &[
6748 DiagnosticEntry {
6749 range: Point::new(1, 8)..Point::new(1, 9),
6750 diagnostic: Diagnostic {
6751 severity: DiagnosticSeverity::WARNING,
6752 message: "error 1".to_string(),
6753 group_id: 0,
6754 is_primary: true,
6755 ..Default::default()
6756 }
6757 },
6758 DiagnosticEntry {
6759 range: Point::new(1, 8)..Point::new(1, 9),
6760 diagnostic: Diagnostic {
6761 severity: DiagnosticSeverity::HINT,
6762 message: "error 1 hint 1".to_string(),
6763 group_id: 0,
6764 is_primary: false,
6765 ..Default::default()
6766 }
6767 },
6768 ]
6769 );
6770 assert_eq!(
6771 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6772 &[
6773 DiagnosticEntry {
6774 range: Point::new(1, 13)..Point::new(1, 15),
6775 diagnostic: Diagnostic {
6776 severity: DiagnosticSeverity::HINT,
6777 message: "error 2 hint 1".to_string(),
6778 group_id: 1,
6779 is_primary: false,
6780 ..Default::default()
6781 }
6782 },
6783 DiagnosticEntry {
6784 range: Point::new(1, 13)..Point::new(1, 15),
6785 diagnostic: Diagnostic {
6786 severity: DiagnosticSeverity::HINT,
6787 message: "error 2 hint 2".to_string(),
6788 group_id: 1,
6789 is_primary: false,
6790 ..Default::default()
6791 }
6792 },
6793 DiagnosticEntry {
6794 range: Point::new(2, 8)..Point::new(2, 17),
6795 diagnostic: Diagnostic {
6796 severity: DiagnosticSeverity::ERROR,
6797 message: "error 2".to_string(),
6798 group_id: 1,
6799 is_primary: true,
6800 ..Default::default()
6801 }
6802 }
6803 ]
6804 );
6805 }
6806
6807 #[gpui::test]
6808 async fn test_rename(cx: &mut gpui::TestAppContext) {
6809 cx.foreground().forbid_parking();
6810
6811 let mut language = Language::new(
6812 LanguageConfig {
6813 name: "Rust".into(),
6814 path_suffixes: vec!["rs".to_string()],
6815 ..Default::default()
6816 },
6817 Some(tree_sitter_rust::language()),
6818 );
6819 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6820
6821 let fs = FakeFs::new(cx.background());
6822 fs.insert_tree(
6823 "/dir",
6824 json!({
6825 "one.rs": "const ONE: usize = 1;",
6826 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6827 }),
6828 )
6829 .await;
6830
6831 let project = Project::test(fs.clone(), cx);
6832 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6833
6834 let (tree, _) = project
6835 .update(cx, |project, cx| {
6836 project.find_or_create_local_worktree("/dir", true, cx)
6837 })
6838 .await
6839 .unwrap();
6840 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6841 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6842 .await;
6843
6844 let buffer = project
6845 .update(cx, |project, cx| {
6846 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6847 })
6848 .await
6849 .unwrap();
6850
6851 let mut fake_server = fake_servers.next().await.unwrap();
6852
6853 let response = project.update(cx, |project, cx| {
6854 project.prepare_rename(buffer.clone(), 7, cx)
6855 });
6856 fake_server
6857 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6858 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6859 assert_eq!(params.position, lsp::Position::new(0, 7));
6860 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6861 lsp::Position::new(0, 6),
6862 lsp::Position::new(0, 9),
6863 )))
6864 })
6865 .next()
6866 .await
6867 .unwrap();
6868 let range = response.await.unwrap().unwrap();
6869 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6870 assert_eq!(range, 6..9);
6871
6872 let response = project.update(cx, |project, cx| {
6873 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6874 });
6875 fake_server
6876 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
6877 assert_eq!(
6878 params.text_document_position.text_document.uri.as_str(),
6879 "file:///dir/one.rs"
6880 );
6881 assert_eq!(
6882 params.text_document_position.position,
6883 lsp::Position::new(0, 7)
6884 );
6885 assert_eq!(params.new_name, "THREE");
6886 Some(lsp::WorkspaceEdit {
6887 changes: Some(
6888 [
6889 (
6890 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6891 vec![lsp::TextEdit::new(
6892 lsp::Range::new(
6893 lsp::Position::new(0, 6),
6894 lsp::Position::new(0, 9),
6895 ),
6896 "THREE".to_string(),
6897 )],
6898 ),
6899 (
6900 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6901 vec![
6902 lsp::TextEdit::new(
6903 lsp::Range::new(
6904 lsp::Position::new(0, 24),
6905 lsp::Position::new(0, 27),
6906 ),
6907 "THREE".to_string(),
6908 ),
6909 lsp::TextEdit::new(
6910 lsp::Range::new(
6911 lsp::Position::new(0, 35),
6912 lsp::Position::new(0, 38),
6913 ),
6914 "THREE".to_string(),
6915 ),
6916 ],
6917 ),
6918 ]
6919 .into_iter()
6920 .collect(),
6921 ),
6922 ..Default::default()
6923 })
6924 })
6925 .next()
6926 .await
6927 .unwrap();
6928 let mut transaction = response.await.unwrap().0;
6929 assert_eq!(transaction.len(), 2);
6930 assert_eq!(
6931 transaction
6932 .remove_entry(&buffer)
6933 .unwrap()
6934 .0
6935 .read_with(cx, |buffer, _| buffer.text()),
6936 "const THREE: usize = 1;"
6937 );
6938 assert_eq!(
6939 transaction
6940 .into_keys()
6941 .next()
6942 .unwrap()
6943 .read_with(cx, |buffer, _| buffer.text()),
6944 "const TWO: usize = one::THREE + one::THREE;"
6945 );
6946 }
6947
6948 #[gpui::test]
6949 async fn test_search(cx: &mut gpui::TestAppContext) {
6950 let fs = FakeFs::new(cx.background());
6951 fs.insert_tree(
6952 "/dir",
6953 json!({
6954 "one.rs": "const ONE: usize = 1;",
6955 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6956 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6957 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6958 }),
6959 )
6960 .await;
6961 let project = Project::test(fs.clone(), cx);
6962 let (tree, _) = project
6963 .update(cx, |project, cx| {
6964 project.find_or_create_local_worktree("/dir", true, cx)
6965 })
6966 .await
6967 .unwrap();
6968 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6969 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6970 .await;
6971
6972 assert_eq!(
6973 search(&project, SearchQuery::text("TWO", false, true), cx)
6974 .await
6975 .unwrap(),
6976 HashMap::from_iter([
6977 ("two.rs".to_string(), vec![6..9]),
6978 ("three.rs".to_string(), vec![37..40])
6979 ])
6980 );
6981
6982 let buffer_4 = project
6983 .update(cx, |project, cx| {
6984 project.open_buffer((worktree_id, "four.rs"), cx)
6985 })
6986 .await
6987 .unwrap();
6988 buffer_4.update(cx, |buffer, cx| {
6989 buffer.edit([20..28, 31..43], "two::TWO", cx);
6990 });
6991
6992 assert_eq!(
6993 search(&project, SearchQuery::text("TWO", false, true), cx)
6994 .await
6995 .unwrap(),
6996 HashMap::from_iter([
6997 ("two.rs".to_string(), vec![6..9]),
6998 ("three.rs".to_string(), vec![37..40]),
6999 ("four.rs".to_string(), vec![25..28, 36..39])
7000 ])
7001 );
7002
7003 async fn search(
7004 project: &ModelHandle<Project>,
7005 query: SearchQuery,
7006 cx: &mut gpui::TestAppContext,
7007 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7008 let results = project
7009 .update(cx, |project, cx| project.search(query, cx))
7010 .await?;
7011
7012 Ok(results
7013 .into_iter()
7014 .map(|(buffer, ranges)| {
7015 buffer.read_with(cx, |buffer, _| {
7016 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7017 let ranges = ranges
7018 .into_iter()
7019 .map(|range| range.to_offset(buffer))
7020 .collect::<Vec<_>>();
7021 (path, ranges)
7022 })
7023 })
7024 .collect())
7025 }
7026 }
7027}