1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub struct Project {
53 worktrees: Vec<WorktreeHandle>,
54 active_entry: Option<ProjectEntry>,
55 languages: Arc<LanguageRegistry>,
56 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
57 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
58 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
59 language_server_settings: Arc<Mutex<serde_json::Value>>,
60 next_language_server_id: usize,
61 client: Arc<client::Client>,
62 next_entry_id: Arc<AtomicUsize>,
63 user_store: ModelHandle<UserStore>,
64 fs: Arc<dyn Fs>,
65 client_state: ProjectClientState,
66 collaborators: HashMap<PeerId, Collaborator>,
67 subscriptions: Vec<client::Subscription>,
68 language_servers_with_diagnostics_running: isize,
69 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
70 shared_buffers: HashMap<PeerId, HashSet<u64>>,
71 loading_buffers: HashMap<
72 ProjectPath,
73 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
74 >,
75 loading_local_worktrees:
76 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
77 opened_buffers: HashMap<u64, OpenBuffer>,
78 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
79 nonce: u128,
80}
81
82enum OpenBuffer {
83 Strong(ModelHandle<Buffer>),
84 Weak(WeakModelHandle<Buffer>),
85 Loading(Vec<Operation>),
86}
87
88enum WorktreeHandle {
89 Strong(ModelHandle<Worktree>),
90 Weak(WeakModelHandle<Worktree>),
91}
92
93enum ProjectClientState {
94 Local {
95 is_shared: bool,
96 remote_id_tx: watch::Sender<Option<u64>>,
97 remote_id_rx: watch::Receiver<Option<u64>>,
98 _maintain_remote_id_task: Task<Option<()>>,
99 },
100 Remote {
101 sharing_has_stopped: bool,
102 remote_id: u64,
103 replica_id: ReplicaId,
104 _detect_unshare_task: Task<Option<()>>,
105 },
106}
107
108#[derive(Clone, Debug)]
109pub struct Collaborator {
110 pub user: Arc<User>,
111 pub peer_id: PeerId,
112 pub replica_id: ReplicaId,
113}
114
115#[derive(Clone, Debug, PartialEq)]
116pub enum Event {
117 ActiveEntryChanged(Option<ProjectEntry>),
118 WorktreeRemoved(WorktreeId),
119 DiskBasedDiagnosticsStarted,
120 DiskBasedDiagnosticsUpdated,
121 DiskBasedDiagnosticsFinished,
122 DiagnosticsUpdated(ProjectPath),
123}
124
125enum LanguageServerEvent {
126 WorkStart {
127 token: String,
128 },
129 WorkProgress {
130 token: String,
131 progress: LanguageServerProgress,
132 },
133 WorkEnd {
134 token: String,
135 },
136 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
137}
138
139pub struct LanguageServerStatus {
140 pub name: String,
141 pub pending_work: BTreeMap<String, LanguageServerProgress>,
142 pending_diagnostic_updates: isize,
143}
144
145#[derive(Clone, Debug)]
146pub struct LanguageServerProgress {
147 pub message: Option<String>,
148 pub percentage: Option<usize>,
149 pub last_update_at: Instant,
150}
151
152#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
153pub struct ProjectPath {
154 pub worktree_id: WorktreeId,
155 pub path: Arc<Path>,
156}
157
158#[derive(Clone, Debug, Default, PartialEq)]
159pub struct DiagnosticSummary {
160 pub error_count: usize,
161 pub warning_count: usize,
162 pub info_count: usize,
163 pub hint_count: usize,
164}
165
166#[derive(Debug)]
167pub struct Location {
168 pub buffer: ModelHandle<Buffer>,
169 pub range: Range<language::Anchor>,
170}
171
172#[derive(Debug)]
173pub struct DocumentHighlight {
174 pub range: Range<language::Anchor>,
175 pub kind: DocumentHighlightKind,
176}
177
178#[derive(Clone, Debug)]
179pub struct Symbol {
180 pub source_worktree_id: WorktreeId,
181 pub worktree_id: WorktreeId,
182 pub language_name: String,
183 pub path: PathBuf,
184 pub label: CodeLabel,
185 pub name: String,
186 pub kind: lsp::SymbolKind,
187 pub range: Range<PointUtf16>,
188 pub signature: [u8; 32],
189}
190
191#[derive(Default)]
192pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
193
194impl DiagnosticSummary {
195 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
196 let mut this = Self {
197 error_count: 0,
198 warning_count: 0,
199 info_count: 0,
200 hint_count: 0,
201 };
202
203 for entry in diagnostics {
204 if entry.diagnostic.is_primary {
205 match entry.diagnostic.severity {
206 DiagnosticSeverity::ERROR => this.error_count += 1,
207 DiagnosticSeverity::WARNING => this.warning_count += 1,
208 DiagnosticSeverity::INFORMATION => this.info_count += 1,
209 DiagnosticSeverity::HINT => this.hint_count += 1,
210 _ => {}
211 }
212 }
213 }
214
215 this
216 }
217
218 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
219 proto::DiagnosticSummary {
220 path: path.to_string_lossy().to_string(),
221 error_count: self.error_count as u32,
222 warning_count: self.warning_count as u32,
223 info_count: self.info_count as u32,
224 hint_count: self.hint_count as u32,
225 }
226 }
227}
228
229#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
230pub struct ProjectEntry {
231 pub worktree_id: WorktreeId,
232 pub entry_id: usize,
233}
234
235impl Project {
236 pub fn init(client: &Arc<Client>) {
237 client.add_entity_message_handler(Self::handle_add_collaborator);
238 client.add_entity_message_handler(Self::handle_buffer_reloaded);
239 client.add_entity_message_handler(Self::handle_buffer_saved);
240 client.add_entity_message_handler(Self::handle_start_language_server);
241 client.add_entity_message_handler(Self::handle_update_language_server);
242 client.add_entity_message_handler(Self::handle_remove_collaborator);
243 client.add_entity_message_handler(Self::handle_register_worktree);
244 client.add_entity_message_handler(Self::handle_unregister_worktree);
245 client.add_entity_message_handler(Self::handle_unshare_project);
246 client.add_entity_message_handler(Self::handle_update_buffer_file);
247 client.add_entity_message_handler(Self::handle_update_buffer);
248 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
249 client.add_entity_message_handler(Self::handle_update_worktree);
250 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
251 client.add_entity_request_handler(Self::handle_apply_code_action);
252 client.add_entity_request_handler(Self::handle_format_buffers);
253 client.add_entity_request_handler(Self::handle_get_code_actions);
254 client.add_entity_request_handler(Self::handle_get_completions);
255 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
256 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
257 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
258 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
259 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
260 client.add_entity_request_handler(Self::handle_search_project);
261 client.add_entity_request_handler(Self::handle_get_project_symbols);
262 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
263 client.add_entity_request_handler(Self::handle_open_buffer);
264 client.add_entity_request_handler(Self::handle_save_buffer);
265 }
266
267 pub fn local(
268 client: Arc<Client>,
269 user_store: ModelHandle<UserStore>,
270 languages: Arc<LanguageRegistry>,
271 fs: Arc<dyn Fs>,
272 cx: &mut MutableAppContext,
273 ) -> ModelHandle<Self> {
274 cx.add_model(|cx: &mut ModelContext<Self>| {
275 let (remote_id_tx, remote_id_rx) = watch::channel();
276 let _maintain_remote_id_task = cx.spawn_weak({
277 let rpc = client.clone();
278 move |this, mut cx| {
279 async move {
280 let mut status = rpc.status();
281 while let Some(status) = status.next().await {
282 if let Some(this) = this.upgrade(&cx) {
283 let remote_id = if status.is_connected() {
284 let response = rpc.request(proto::RegisterProject {}).await?;
285 Some(response.project_id)
286 } else {
287 None
288 };
289
290 if let Some(project_id) = remote_id {
291 let mut registrations = Vec::new();
292 this.update(&mut cx, |this, cx| {
293 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
294 registrations.push(worktree.update(
295 cx,
296 |worktree, cx| {
297 let worktree = worktree.as_local_mut().unwrap();
298 worktree.register(project_id, cx)
299 },
300 ));
301 }
302 });
303 for registration in registrations {
304 registration.await?;
305 }
306 }
307 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
308 }
309 }
310 Ok(())
311 }
312 .log_err()
313 }
314 });
315
316 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
317 Self {
318 worktrees: Default::default(),
319 collaborators: Default::default(),
320 opened_buffers: Default::default(),
321 shared_buffers: Default::default(),
322 loading_buffers: Default::default(),
323 loading_local_worktrees: Default::default(),
324 buffer_snapshots: Default::default(),
325 client_state: ProjectClientState::Local {
326 is_shared: false,
327 remote_id_tx,
328 remote_id_rx,
329 _maintain_remote_id_task,
330 },
331 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
332 subscriptions: Vec::new(),
333 active_entry: None,
334 languages,
335 client,
336 user_store,
337 fs,
338 next_entry_id: Default::default(),
339 language_servers_with_diagnostics_running: 0,
340 language_servers: Default::default(),
341 started_language_servers: Default::default(),
342 language_server_statuses: Default::default(),
343 language_server_settings: Default::default(),
344 next_language_server_id: 0,
345 nonce: StdRng::from_entropy().gen(),
346 }
347 })
348 }
349
350 pub async fn remote(
351 remote_id: u64,
352 client: Arc<Client>,
353 user_store: ModelHandle<UserStore>,
354 languages: Arc<LanguageRegistry>,
355 fs: Arc<dyn Fs>,
356 cx: &mut AsyncAppContext,
357 ) -> Result<ModelHandle<Self>> {
358 client.authenticate_and_connect(&cx).await?;
359
360 let response = client
361 .request(proto::JoinProject {
362 project_id: remote_id,
363 })
364 .await?;
365
366 let replica_id = response.replica_id as ReplicaId;
367
368 let mut worktrees = Vec::new();
369 for worktree in response.worktrees {
370 let (worktree, load_task) = cx
371 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
372 worktrees.push(worktree);
373 load_task.detach();
374 }
375
376 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
377 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
378 let mut this = Self {
379 worktrees: Vec::new(),
380 loading_buffers: Default::default(),
381 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
382 shared_buffers: Default::default(),
383 loading_local_worktrees: Default::default(),
384 active_entry: None,
385 collaborators: Default::default(),
386 languages,
387 user_store: user_store.clone(),
388 fs,
389 next_entry_id: Default::default(),
390 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
391 client: client.clone(),
392 client_state: ProjectClientState::Remote {
393 sharing_has_stopped: false,
394 remote_id,
395 replica_id,
396 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
397 async move {
398 let mut status = client.status();
399 let is_connected =
400 status.next().await.map_or(false, |s| s.is_connected());
401 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
402 if !is_connected || status.next().await.is_some() {
403 if let Some(this) = this.upgrade(&cx) {
404 this.update(&mut cx, |this, cx| this.project_unshared(cx))
405 }
406 }
407 Ok(())
408 }
409 .log_err()
410 }),
411 },
412 language_servers_with_diagnostics_running: 0,
413 language_servers: Default::default(),
414 started_language_servers: Default::default(),
415 language_server_settings: Default::default(),
416 language_server_statuses: response
417 .language_servers
418 .into_iter()
419 .map(|server| {
420 (
421 server.id as usize,
422 LanguageServerStatus {
423 name: server.name,
424 pending_work: Default::default(),
425 pending_diagnostic_updates: 0,
426 },
427 )
428 })
429 .collect(),
430 next_language_server_id: 0,
431 opened_buffers: Default::default(),
432 buffer_snapshots: Default::default(),
433 nonce: StdRng::from_entropy().gen(),
434 };
435 for worktree in worktrees {
436 this.add_worktree(&worktree, cx);
437 }
438 this
439 });
440
441 let user_ids = response
442 .collaborators
443 .iter()
444 .map(|peer| peer.user_id)
445 .collect();
446 user_store
447 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
448 .await?;
449 let mut collaborators = HashMap::default();
450 for message in response.collaborators {
451 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
452 collaborators.insert(collaborator.peer_id, collaborator);
453 }
454
455 this.update(cx, |this, _| {
456 this.collaborators = collaborators;
457 });
458
459 Ok(this)
460 }
461
462 #[cfg(any(test, feature = "test-support"))]
463 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
464 let languages = Arc::new(LanguageRegistry::test());
465 let http_client = client::test::FakeHttpClient::with_404_response();
466 let client = client::Client::new(http_client.clone());
467 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
468 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
469 }
470
471 #[cfg(any(test, feature = "test-support"))]
472 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
473 self.opened_buffers
474 .get(&remote_id)
475 .and_then(|buffer| buffer.upgrade(cx))
476 }
477
478 #[cfg(any(test, feature = "test-support"))]
479 pub fn languages(&self) -> &Arc<LanguageRegistry> {
480 &self.languages
481 }
482
483 #[cfg(any(test, feature = "test-support"))]
484 pub fn check_invariants(&self, cx: &AppContext) {
485 if self.is_local() {
486 let mut worktree_root_paths = HashMap::default();
487 for worktree in self.worktrees(cx) {
488 let worktree = worktree.read(cx);
489 let abs_path = worktree.as_local().unwrap().abs_path().clone();
490 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
491 assert_eq!(
492 prev_worktree_id,
493 None,
494 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
495 abs_path,
496 worktree.id(),
497 prev_worktree_id
498 )
499 }
500 } else {
501 let replica_id = self.replica_id();
502 for buffer in self.opened_buffers.values() {
503 if let Some(buffer) = buffer.upgrade(cx) {
504 let buffer = buffer.read(cx);
505 assert_eq!(
506 buffer.deferred_ops_len(),
507 0,
508 "replica {}, buffer {} has deferred operations",
509 replica_id,
510 buffer.remote_id()
511 );
512 }
513 }
514 }
515 }
516
517 #[cfg(any(test, feature = "test-support"))]
518 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
519 let path = path.into();
520 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
521 self.opened_buffers.iter().any(|(_, buffer)| {
522 if let Some(buffer) = buffer.upgrade(cx) {
523 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
524 if file.worktree == worktree && file.path() == &path.path {
525 return true;
526 }
527 }
528 }
529 false
530 })
531 } else {
532 false
533 }
534 }
535
536 pub fn fs(&self) -> &Arc<dyn Fs> {
537 &self.fs
538 }
539
540 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
541 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
542 *remote_id_tx.borrow_mut() = remote_id;
543 }
544
545 self.subscriptions.clear();
546 if let Some(remote_id) = remote_id {
547 self.subscriptions
548 .push(self.client.add_model_for_remote_entity(remote_id, cx));
549 }
550 }
551
552 pub fn remote_id(&self) -> Option<u64> {
553 match &self.client_state {
554 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
555 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
556 }
557 }
558
559 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
560 let mut id = None;
561 let mut watch = None;
562 match &self.client_state {
563 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
564 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
565 }
566
567 async move {
568 if let Some(id) = id {
569 return id;
570 }
571 let mut watch = watch.unwrap();
572 loop {
573 let id = *watch.borrow();
574 if let Some(id) = id {
575 return id;
576 }
577 watch.next().await;
578 }
579 }
580 }
581
582 pub fn replica_id(&self) -> ReplicaId {
583 match &self.client_state {
584 ProjectClientState::Local { .. } => 0,
585 ProjectClientState::Remote { replica_id, .. } => *replica_id,
586 }
587 }
588
589 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
590 &self.collaborators
591 }
592
593 pub fn worktrees<'a>(
594 &'a self,
595 cx: &'a AppContext,
596 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
597 self.worktrees
598 .iter()
599 .filter_map(move |worktree| worktree.upgrade(cx))
600 }
601
602 pub fn visible_worktrees<'a>(
603 &'a self,
604 cx: &'a AppContext,
605 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
606 self.worktrees.iter().filter_map(|worktree| {
607 worktree.upgrade(cx).and_then(|worktree| {
608 if worktree.read(cx).is_visible() {
609 Some(worktree)
610 } else {
611 None
612 }
613 })
614 })
615 }
616
617 pub fn worktree_for_id(
618 &self,
619 id: WorktreeId,
620 cx: &AppContext,
621 ) -> Option<ModelHandle<Worktree>> {
622 self.worktrees(cx)
623 .find(|worktree| worktree.read(cx).id() == id)
624 }
625
626 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
627 let rpc = self.client.clone();
628 cx.spawn(|this, mut cx| async move {
629 let project_id = this.update(&mut cx, |this, cx| {
630 if let ProjectClientState::Local {
631 is_shared,
632 remote_id_rx,
633 ..
634 } = &mut this.client_state
635 {
636 *is_shared = true;
637
638 for open_buffer in this.opened_buffers.values_mut() {
639 match open_buffer {
640 OpenBuffer::Strong(_) => {}
641 OpenBuffer::Weak(buffer) => {
642 if let Some(buffer) = buffer.upgrade(cx) {
643 *open_buffer = OpenBuffer::Strong(buffer);
644 }
645 }
646 OpenBuffer::Loading(_) => unreachable!(),
647 }
648 }
649
650 for worktree_handle in this.worktrees.iter_mut() {
651 match worktree_handle {
652 WorktreeHandle::Strong(_) => {}
653 WorktreeHandle::Weak(worktree) => {
654 if let Some(worktree) = worktree.upgrade(cx) {
655 *worktree_handle = WorktreeHandle::Strong(worktree);
656 }
657 }
658 }
659 }
660
661 remote_id_rx
662 .borrow()
663 .ok_or_else(|| anyhow!("no project id"))
664 } else {
665 Err(anyhow!("can't share a remote project"))
666 }
667 })?;
668
669 rpc.request(proto::ShareProject { project_id }).await?;
670
671 let mut tasks = Vec::new();
672 this.update(&mut cx, |this, cx| {
673 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
674 worktree.update(cx, |worktree, cx| {
675 let worktree = worktree.as_local_mut().unwrap();
676 tasks.push(worktree.share(project_id, cx));
677 });
678 }
679 });
680 for task in tasks {
681 task.await?;
682 }
683 this.update(&mut cx, |_, cx| cx.notify());
684 Ok(())
685 })
686 }
687
688 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
689 let rpc = self.client.clone();
690 cx.spawn(|this, mut cx| async move {
691 let project_id = this.update(&mut cx, |this, cx| {
692 if let ProjectClientState::Local {
693 is_shared,
694 remote_id_rx,
695 ..
696 } = &mut this.client_state
697 {
698 *is_shared = false;
699
700 for open_buffer in this.opened_buffers.values_mut() {
701 match open_buffer {
702 OpenBuffer::Strong(buffer) => {
703 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
704 }
705 _ => {}
706 }
707 }
708
709 for worktree_handle in this.worktrees.iter_mut() {
710 match worktree_handle {
711 WorktreeHandle::Strong(worktree) => {
712 if !worktree.read(cx).is_visible() {
713 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
714 }
715 }
716 _ => {}
717 }
718 }
719
720 remote_id_rx
721 .borrow()
722 .ok_or_else(|| anyhow!("no project id"))
723 } else {
724 Err(anyhow!("can't share a remote project"))
725 }
726 })?;
727
728 rpc.send(proto::UnshareProject { project_id })?;
729 this.update(&mut cx, |this, cx| {
730 this.collaborators.clear();
731 this.shared_buffers.clear();
732 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
733 worktree.update(cx, |worktree, _| {
734 worktree.as_local_mut().unwrap().unshare();
735 });
736 }
737 cx.notify()
738 });
739 Ok(())
740 })
741 }
742
743 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
744 if let ProjectClientState::Remote {
745 sharing_has_stopped,
746 ..
747 } = &mut self.client_state
748 {
749 *sharing_has_stopped = true;
750 self.collaborators.clear();
751 cx.notify();
752 }
753 }
754
755 pub fn is_read_only(&self) -> bool {
756 match &self.client_state {
757 ProjectClientState::Local { .. } => false,
758 ProjectClientState::Remote {
759 sharing_has_stopped,
760 ..
761 } => *sharing_has_stopped,
762 }
763 }
764
765 pub fn is_local(&self) -> bool {
766 match &self.client_state {
767 ProjectClientState::Local { .. } => true,
768 ProjectClientState::Remote { .. } => false,
769 }
770 }
771
772 pub fn is_remote(&self) -> bool {
773 !self.is_local()
774 }
775
776 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
777 if self.is_remote() {
778 return Err(anyhow!("creating buffers as a guest is not supported yet"));
779 }
780
781 let buffer = cx.add_model(|cx| {
782 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
783 });
784 self.register_buffer(&buffer, cx)?;
785 Ok(buffer)
786 }
787
788 pub fn open_buffer(
789 &mut self,
790 path: impl Into<ProjectPath>,
791 cx: &mut ModelContext<Self>,
792 ) -> Task<Result<ModelHandle<Buffer>>> {
793 let project_path = path.into();
794 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
795 worktree
796 } else {
797 return Task::ready(Err(anyhow!("no such worktree")));
798 };
799
800 // If there is already a buffer for the given path, then return it.
801 let existing_buffer = self.get_open_buffer(&project_path, cx);
802 if let Some(existing_buffer) = existing_buffer {
803 return Task::ready(Ok(existing_buffer));
804 }
805
806 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
807 // If the given path is already being loaded, then wait for that existing
808 // task to complete and return the same buffer.
809 hash_map::Entry::Occupied(e) => e.get().clone(),
810
811 // Otherwise, record the fact that this path is now being loaded.
812 hash_map::Entry::Vacant(entry) => {
813 let (mut tx, rx) = postage::watch::channel();
814 entry.insert(rx.clone());
815
816 let load_buffer = if worktree.read(cx).is_local() {
817 self.open_local_buffer(&project_path.path, &worktree, cx)
818 } else {
819 self.open_remote_buffer(&project_path.path, &worktree, cx)
820 };
821
822 cx.spawn(move |this, mut cx| async move {
823 let load_result = load_buffer.await;
824 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
825 // Record the fact that the buffer is no longer loading.
826 this.loading_buffers.remove(&project_path);
827 let buffer = load_result.map_err(Arc::new)?;
828 Ok(buffer)
829 }));
830 })
831 .detach();
832 rx
833 }
834 };
835
836 cx.foreground().spawn(async move {
837 loop {
838 if let Some(result) = loading_watch.borrow().as_ref() {
839 match result {
840 Ok(buffer) => return Ok(buffer.clone()),
841 Err(error) => return Err(anyhow!("{}", error)),
842 }
843 }
844 loading_watch.next().await;
845 }
846 })
847 }
848
849 fn open_local_buffer(
850 &mut self,
851 path: &Arc<Path>,
852 worktree: &ModelHandle<Worktree>,
853 cx: &mut ModelContext<Self>,
854 ) -> Task<Result<ModelHandle<Buffer>>> {
855 let load_buffer = worktree.update(cx, |worktree, cx| {
856 let worktree = worktree.as_local_mut().unwrap();
857 worktree.load_buffer(path, cx)
858 });
859 cx.spawn(|this, mut cx| async move {
860 let buffer = load_buffer.await?;
861 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
862 Ok(buffer)
863 })
864 }
865
866 fn open_remote_buffer(
867 &mut self,
868 path: &Arc<Path>,
869 worktree: &ModelHandle<Worktree>,
870 cx: &mut ModelContext<Self>,
871 ) -> Task<Result<ModelHandle<Buffer>>> {
872 let rpc = self.client.clone();
873 let project_id = self.remote_id().unwrap();
874 let remote_worktree_id = worktree.read(cx).id();
875 let path = path.clone();
876 let path_string = path.to_string_lossy().to_string();
877 cx.spawn(|this, mut cx| async move {
878 let response = rpc
879 .request(proto::OpenBuffer {
880 project_id,
881 worktree_id: remote_worktree_id.to_proto(),
882 path: path_string,
883 })
884 .await?;
885 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
886 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
887 .await
888 })
889 }
890
891 fn open_local_buffer_via_lsp(
892 &mut self,
893 abs_path: lsp::Url,
894 lang_name: Arc<str>,
895 lang_server: Arc<LanguageServer>,
896 cx: &mut ModelContext<Self>,
897 ) -> Task<Result<ModelHandle<Buffer>>> {
898 cx.spawn(|this, mut cx| async move {
899 let abs_path = abs_path
900 .to_file_path()
901 .map_err(|_| anyhow!("can't convert URI to path"))?;
902 let (worktree, relative_path) = if let Some(result) =
903 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
904 {
905 result
906 } else {
907 let worktree = this
908 .update(&mut cx, |this, cx| {
909 this.create_local_worktree(&abs_path, false, cx)
910 })
911 .await?;
912 this.update(&mut cx, |this, cx| {
913 this.language_servers
914 .insert((worktree.read(cx).id(), lang_name), lang_server);
915 });
916 (worktree, PathBuf::new())
917 };
918
919 let project_path = ProjectPath {
920 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
921 path: relative_path.into(),
922 };
923 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
924 .await
925 })
926 }
927
928 pub fn save_buffer_as(
929 &mut self,
930 buffer: ModelHandle<Buffer>,
931 abs_path: PathBuf,
932 cx: &mut ModelContext<Project>,
933 ) -> Task<Result<()>> {
934 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
935 cx.spawn(|this, mut cx| async move {
936 let (worktree, path) = worktree_task.await?;
937 worktree
938 .update(&mut cx, |worktree, cx| {
939 worktree
940 .as_local_mut()
941 .unwrap()
942 .save_buffer_as(buffer.clone(), path, cx)
943 })
944 .await?;
945 this.update(&mut cx, |this, cx| {
946 this.assign_language_to_buffer(&buffer, cx);
947 this.register_buffer_with_language_server(&buffer, cx);
948 });
949 Ok(())
950 })
951 }
952
953 pub fn get_open_buffer(
954 &mut self,
955 path: &ProjectPath,
956 cx: &mut ModelContext<Self>,
957 ) -> Option<ModelHandle<Buffer>> {
958 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
959 self.opened_buffers.values().find_map(|buffer| {
960 let buffer = buffer.upgrade(cx)?;
961 let file = File::from_dyn(buffer.read(cx).file())?;
962 if file.worktree == worktree && file.path() == &path.path {
963 Some(buffer)
964 } else {
965 None
966 }
967 })
968 }
969
970 fn register_buffer(
971 &mut self,
972 buffer: &ModelHandle<Buffer>,
973 cx: &mut ModelContext<Self>,
974 ) -> Result<()> {
975 let remote_id = buffer.read(cx).remote_id();
976 let open_buffer = if self.is_remote() || self.is_shared() {
977 OpenBuffer::Strong(buffer.clone())
978 } else {
979 OpenBuffer::Weak(buffer.downgrade())
980 };
981
982 match self.opened_buffers.insert(remote_id, open_buffer) {
983 None => {}
984 Some(OpenBuffer::Loading(operations)) => {
985 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
986 }
987 Some(OpenBuffer::Weak(existing_handle)) => {
988 if existing_handle.upgrade(cx).is_some() {
989 Err(anyhow!(
990 "already registered buffer with remote id {}",
991 remote_id
992 ))?
993 }
994 }
995 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
996 "already registered buffer with remote id {}",
997 remote_id
998 ))?,
999 }
1000 cx.subscribe(buffer, |this, buffer, event, cx| {
1001 this.on_buffer_event(buffer, event, cx);
1002 })
1003 .detach();
1004
1005 self.assign_language_to_buffer(buffer, cx);
1006 self.register_buffer_with_language_server(buffer, cx);
1007
1008 Ok(())
1009 }
1010
1011 fn register_buffer_with_language_server(
1012 &mut self,
1013 buffer_handle: &ModelHandle<Buffer>,
1014 cx: &mut ModelContext<Self>,
1015 ) {
1016 let buffer = buffer_handle.read(cx);
1017 let buffer_id = buffer.remote_id();
1018 if let Some(file) = File::from_dyn(buffer.file()) {
1019 if file.is_local() {
1020 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1021 let initial_snapshot = buffer.text_snapshot();
1022 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1023
1024 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1025 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1026 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1027 .log_err();
1028 }
1029 }
1030
1031 if let Some(server) = language_server {
1032 server
1033 .notify::<lsp::notification::DidOpenTextDocument>(
1034 lsp::DidOpenTextDocumentParams {
1035 text_document: lsp::TextDocumentItem::new(
1036 uri,
1037 Default::default(),
1038 0,
1039 initial_snapshot.text(),
1040 ),
1041 }
1042 .clone(),
1043 )
1044 .log_err();
1045 buffer_handle.update(cx, |buffer, cx| {
1046 buffer.set_completion_triggers(
1047 server
1048 .capabilities()
1049 .completion_provider
1050 .as_ref()
1051 .and_then(|provider| provider.trigger_characters.clone())
1052 .unwrap_or(Vec::new()),
1053 cx,
1054 )
1055 });
1056 self.buffer_snapshots
1057 .insert(buffer_id, vec![(0, initial_snapshot)]);
1058 }
1059
1060 cx.observe_release(buffer_handle, |this, buffer, cx| {
1061 if let Some(file) = File::from_dyn(buffer.file()) {
1062 if file.is_local() {
1063 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1064 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1065 server
1066 .notify::<lsp::notification::DidCloseTextDocument>(
1067 lsp::DidCloseTextDocumentParams {
1068 text_document: lsp::TextDocumentIdentifier::new(
1069 uri.clone(),
1070 ),
1071 },
1072 )
1073 .log_err();
1074 }
1075 }
1076 }
1077 })
1078 .detach();
1079 }
1080 }
1081 }
1082
1083 fn on_buffer_event(
1084 &mut self,
1085 buffer: ModelHandle<Buffer>,
1086 event: &BufferEvent,
1087 cx: &mut ModelContext<Self>,
1088 ) -> Option<()> {
1089 match event {
1090 BufferEvent::Operation(operation) => {
1091 let project_id = self.remote_id()?;
1092 let request = self.client.request(proto::UpdateBuffer {
1093 project_id,
1094 buffer_id: buffer.read(cx).remote_id(),
1095 operations: vec![language::proto::serialize_operation(&operation)],
1096 });
1097 cx.background().spawn(request).detach_and_log_err(cx);
1098 }
1099 BufferEvent::Edited => {
1100 let language_server = self
1101 .language_server_for_buffer(buffer.read(cx), cx)?
1102 .clone();
1103 let buffer = buffer.read(cx);
1104 let file = File::from_dyn(buffer.file())?;
1105 let abs_path = file.as_local()?.abs_path(cx);
1106 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1107 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1108 let (version, prev_snapshot) = buffer_snapshots.last()?;
1109 let next_snapshot = buffer.text_snapshot();
1110 let next_version = version + 1;
1111
1112 let content_changes = buffer
1113 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1114 .map(|edit| {
1115 let edit_start = edit.new.start.0;
1116 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1117 let new_text = next_snapshot
1118 .text_for_range(edit.new.start.1..edit.new.end.1)
1119 .collect();
1120 lsp::TextDocumentContentChangeEvent {
1121 range: Some(lsp::Range::new(
1122 edit_start.to_lsp_position(),
1123 edit_end.to_lsp_position(),
1124 )),
1125 range_length: None,
1126 text: new_text,
1127 }
1128 })
1129 .collect();
1130
1131 buffer_snapshots.push((next_version, next_snapshot));
1132
1133 language_server
1134 .notify::<lsp::notification::DidChangeTextDocument>(
1135 lsp::DidChangeTextDocumentParams {
1136 text_document: lsp::VersionedTextDocumentIdentifier::new(
1137 uri,
1138 next_version,
1139 ),
1140 content_changes,
1141 },
1142 )
1143 .log_err();
1144 }
1145 BufferEvent::Saved => {
1146 let file = File::from_dyn(buffer.read(cx).file())?;
1147 let worktree_id = file.worktree_id(cx);
1148 let abs_path = file.as_local()?.abs_path(cx);
1149 let text_document = lsp::TextDocumentIdentifier {
1150 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1151 };
1152
1153 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1154 server
1155 .notify::<lsp::notification::DidSaveTextDocument>(
1156 lsp::DidSaveTextDocumentParams {
1157 text_document: text_document.clone(),
1158 text: None,
1159 },
1160 )
1161 .log_err();
1162 }
1163 }
1164 _ => {}
1165 }
1166
1167 None
1168 }
1169
1170 fn language_servers_for_worktree(
1171 &self,
1172 worktree_id: WorktreeId,
1173 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1174 self.language_servers.iter().filter_map(
1175 move |((language_server_worktree_id, language_name), server)| {
1176 if *language_server_worktree_id == worktree_id {
1177 Some((language_name.as_ref(), server))
1178 } else {
1179 None
1180 }
1181 },
1182 )
1183 }
1184
1185 fn assign_language_to_buffer(
1186 &mut self,
1187 buffer: &ModelHandle<Buffer>,
1188 cx: &mut ModelContext<Self>,
1189 ) -> Option<()> {
1190 // If the buffer has a language, set it and start the language server if we haven't already.
1191 let full_path = buffer.read(cx).file()?.full_path(cx);
1192 let language = self.languages.select_language(&full_path)?;
1193 buffer.update(cx, |buffer, cx| {
1194 buffer.set_language(Some(language.clone()), cx);
1195 });
1196
1197 let file = File::from_dyn(buffer.read(cx).file())?;
1198 let worktree = file.worktree.read(cx).as_local()?;
1199 let worktree_id = worktree.id();
1200 let worktree_abs_path = worktree.abs_path().clone();
1201 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1202
1203 None
1204 }
1205
1206 fn start_language_server(
1207 &mut self,
1208 worktree_id: WorktreeId,
1209 worktree_path: Arc<Path>,
1210 language: Arc<Language>,
1211 cx: &mut ModelContext<Self>,
1212 ) {
1213 let key = (worktree_id, language.name());
1214 self.started_language_servers
1215 .entry(key.clone())
1216 .or_insert_with(|| {
1217 let server_id = post_inc(&mut self.next_language_server_id);
1218 let language_server = self.languages.start_language_server(
1219 language.clone(),
1220 worktree_path,
1221 self.client.http_client(),
1222 cx,
1223 );
1224 cx.spawn_weak(|this, mut cx| async move {
1225 let mut language_server = language_server?.await.log_err()?;
1226 let this = this.upgrade(&cx)?;
1227 let (language_server_events_tx, language_server_events_rx) =
1228 smol::channel::unbounded();
1229
1230 language_server
1231 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1232 let language_server_events_tx = language_server_events_tx.clone();
1233 move |params| {
1234 language_server_events_tx
1235 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1236 .ok();
1237 }
1238 })
1239 .detach();
1240
1241 language_server
1242 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1243 let settings = this
1244 .read_with(&cx, |this, _| this.language_server_settings.clone());
1245 move |params| {
1246 let settings = settings.lock();
1247 Ok(params
1248 .items
1249 .into_iter()
1250 .map(|item| {
1251 if let Some(section) = &item.section {
1252 settings
1253 .get(section)
1254 .cloned()
1255 .unwrap_or(serde_json::Value::Null)
1256 } else {
1257 settings.clone()
1258 }
1259 })
1260 .collect())
1261 }
1262 })
1263 .detach();
1264
1265 language_server
1266 .on_notification::<lsp::notification::Progress, _>(move |params| {
1267 let token = match params.token {
1268 lsp::NumberOrString::String(token) => token,
1269 lsp::NumberOrString::Number(token) => {
1270 log::info!("skipping numeric progress token {}", token);
1271 return;
1272 }
1273 };
1274
1275 match params.value {
1276 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1277 lsp::WorkDoneProgress::Begin(_) => {
1278 language_server_events_tx
1279 .try_send(LanguageServerEvent::WorkStart { token })
1280 .ok();
1281 }
1282 lsp::WorkDoneProgress::Report(report) => {
1283 language_server_events_tx
1284 .try_send(LanguageServerEvent::WorkProgress {
1285 token,
1286 progress: LanguageServerProgress {
1287 message: report.message,
1288 percentage: report
1289 .percentage
1290 .map(|p| p as usize),
1291 last_update_at: Instant::now(),
1292 },
1293 })
1294 .ok();
1295 }
1296 lsp::WorkDoneProgress::End(_) => {
1297 language_server_events_tx
1298 .try_send(LanguageServerEvent::WorkEnd { token })
1299 .ok();
1300 }
1301 },
1302 }
1303 })
1304 .detach();
1305
1306 // Process all the LSP events.
1307 cx.spawn(|mut cx| {
1308 let this = this.downgrade();
1309 async move {
1310 while let Ok(event) = language_server_events_rx.recv().await {
1311 let this = this.upgrade(&cx)?;
1312 this.update(&mut cx, |this, cx| {
1313 this.on_lsp_event(server_id, event, &language, cx)
1314 });
1315
1316 // Don't starve the main thread when lots of events arrive all at once.
1317 smol::future::yield_now().await;
1318 }
1319 Some(())
1320 }
1321 })
1322 .detach();
1323
1324 let language_server = language_server.initialize().await.log_err()?;
1325 this.update(&mut cx, |this, cx| {
1326 this.language_servers
1327 .insert(key.clone(), language_server.clone());
1328 this.language_server_statuses.insert(
1329 server_id,
1330 LanguageServerStatus {
1331 name: language_server.name().to_string(),
1332 pending_work: Default::default(),
1333 pending_diagnostic_updates: 0,
1334 },
1335 );
1336 language_server
1337 .notify::<lsp::notification::DidChangeConfiguration>(
1338 lsp::DidChangeConfigurationParams {
1339 settings: this.language_server_settings.lock().clone(),
1340 },
1341 )
1342 .ok();
1343
1344 if let Some(project_id) = this.remote_id() {
1345 this.client
1346 .send(proto::StartLanguageServer {
1347 project_id,
1348 server: Some(proto::LanguageServer {
1349 id: server_id as u64,
1350 name: language_server.name().to_string(),
1351 }),
1352 })
1353 .log_err();
1354 }
1355
1356 // Tell the language server about every open buffer in the worktree that matches the language.
1357 for buffer in this.opened_buffers.values() {
1358 if let Some(buffer_handle) = buffer.upgrade(cx) {
1359 let buffer = buffer_handle.read(cx);
1360 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1361 file
1362 } else {
1363 continue;
1364 };
1365 let language = if let Some(language) = buffer.language() {
1366 language
1367 } else {
1368 continue;
1369 };
1370 if (file.worktree.read(cx).id(), language.name()) != key {
1371 continue;
1372 }
1373
1374 let file = file.as_local()?;
1375 let versions = this
1376 .buffer_snapshots
1377 .entry(buffer.remote_id())
1378 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1379 let (version, initial_snapshot) = versions.last().unwrap();
1380 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1381 language_server
1382 .notify::<lsp::notification::DidOpenTextDocument>(
1383 lsp::DidOpenTextDocumentParams {
1384 text_document: lsp::TextDocumentItem::new(
1385 uri,
1386 Default::default(),
1387 *version,
1388 initial_snapshot.text(),
1389 ),
1390 },
1391 )
1392 .log_err()?;
1393 buffer_handle.update(cx, |buffer, cx| {
1394 buffer.set_completion_triggers(
1395 language_server
1396 .capabilities()
1397 .completion_provider
1398 .as_ref()
1399 .and_then(|provider| {
1400 provider.trigger_characters.clone()
1401 })
1402 .unwrap_or(Vec::new()),
1403 cx,
1404 )
1405 });
1406 }
1407 }
1408
1409 cx.notify();
1410 Some(())
1411 });
1412
1413 Some(language_server)
1414 })
1415 });
1416 }
1417
1418 fn on_lsp_event(
1419 &mut self,
1420 language_server_id: usize,
1421 event: LanguageServerEvent,
1422 language: &Arc<Language>,
1423 cx: &mut ModelContext<Self>,
1424 ) {
1425 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1426 let language_server_status =
1427 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1428 status
1429 } else {
1430 return;
1431 };
1432
1433 match event {
1434 LanguageServerEvent::WorkStart { token } => {
1435 if Some(&token) == disk_diagnostics_token {
1436 language_server_status.pending_diagnostic_updates += 1;
1437 if language_server_status.pending_diagnostic_updates == 1 {
1438 self.disk_based_diagnostics_started(cx);
1439 self.broadcast_language_server_update(
1440 language_server_id,
1441 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1442 proto::LspDiskBasedDiagnosticsUpdating {},
1443 ),
1444 );
1445 }
1446 } else {
1447 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1448 self.broadcast_language_server_update(
1449 language_server_id,
1450 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1451 token,
1452 }),
1453 );
1454 }
1455 }
1456 LanguageServerEvent::WorkProgress { token, progress } => {
1457 if Some(&token) != disk_diagnostics_token {
1458 self.on_lsp_work_progress(
1459 language_server_id,
1460 token.clone(),
1461 progress.clone(),
1462 cx,
1463 );
1464 self.broadcast_language_server_update(
1465 language_server_id,
1466 proto::update_language_server::Variant::WorkProgress(
1467 proto::LspWorkProgress {
1468 token,
1469 message: progress.message,
1470 percentage: progress.percentage.map(|p| p as u32),
1471 },
1472 ),
1473 );
1474 }
1475 }
1476 LanguageServerEvent::WorkEnd { token } => {
1477 if Some(&token) == disk_diagnostics_token {
1478 language_server_status.pending_diagnostic_updates -= 1;
1479 if language_server_status.pending_diagnostic_updates == 0 {
1480 self.disk_based_diagnostics_finished(cx);
1481 self.broadcast_language_server_update(
1482 language_server_id,
1483 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1484 proto::LspDiskBasedDiagnosticsUpdated {},
1485 ),
1486 );
1487 }
1488 } else {
1489 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1490 self.broadcast_language_server_update(
1491 language_server_id,
1492 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1493 token,
1494 }),
1495 );
1496 }
1497 }
1498 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1499 language.process_diagnostics(&mut params);
1500
1501 if disk_diagnostics_token.is_none() {
1502 self.disk_based_diagnostics_started(cx);
1503 self.broadcast_language_server_update(
1504 language_server_id,
1505 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1506 proto::LspDiskBasedDiagnosticsUpdating {},
1507 ),
1508 );
1509 }
1510 self.update_diagnostics(
1511 params,
1512 language
1513 .disk_based_diagnostic_sources()
1514 .unwrap_or(&Default::default()),
1515 cx,
1516 )
1517 .log_err();
1518 if disk_diagnostics_token.is_none() {
1519 self.disk_based_diagnostics_finished(cx);
1520 self.broadcast_language_server_update(
1521 language_server_id,
1522 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1523 proto::LspDiskBasedDiagnosticsUpdated {},
1524 ),
1525 );
1526 }
1527 }
1528 }
1529 }
1530
1531 fn on_lsp_work_start(
1532 &mut self,
1533 language_server_id: usize,
1534 token: String,
1535 cx: &mut ModelContext<Self>,
1536 ) {
1537 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1538 status.pending_work.insert(
1539 token,
1540 LanguageServerProgress {
1541 message: None,
1542 percentage: None,
1543 last_update_at: Instant::now(),
1544 },
1545 );
1546 cx.notify();
1547 }
1548 }
1549
1550 fn on_lsp_work_progress(
1551 &mut self,
1552 language_server_id: usize,
1553 token: String,
1554 progress: LanguageServerProgress,
1555 cx: &mut ModelContext<Self>,
1556 ) {
1557 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1558 status.pending_work.insert(token, progress);
1559 cx.notify();
1560 }
1561 }
1562
1563 fn on_lsp_work_end(
1564 &mut self,
1565 language_server_id: usize,
1566 token: String,
1567 cx: &mut ModelContext<Self>,
1568 ) {
1569 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1570 status.pending_work.remove(&token);
1571 cx.notify();
1572 }
1573 }
1574
1575 fn broadcast_language_server_update(
1576 &self,
1577 language_server_id: usize,
1578 event: proto::update_language_server::Variant,
1579 ) {
1580 if let Some(project_id) = self.remote_id() {
1581 self.client
1582 .send(proto::UpdateLanguageServer {
1583 project_id,
1584 language_server_id: language_server_id as u64,
1585 variant: Some(event),
1586 })
1587 .log_err();
1588 }
1589 }
1590
1591 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1592 for server in self.language_servers.values() {
1593 server
1594 .notify::<lsp::notification::DidChangeConfiguration>(
1595 lsp::DidChangeConfigurationParams {
1596 settings: settings.clone(),
1597 },
1598 )
1599 .ok();
1600 }
1601 *self.language_server_settings.lock() = settings;
1602 }
1603
1604 pub fn language_server_statuses(
1605 &self,
1606 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1607 self.language_server_statuses.values()
1608 }
1609
1610 pub fn update_diagnostics(
1611 &mut self,
1612 params: lsp::PublishDiagnosticsParams,
1613 disk_based_sources: &HashSet<String>,
1614 cx: &mut ModelContext<Self>,
1615 ) -> Result<()> {
1616 let abs_path = params
1617 .uri
1618 .to_file_path()
1619 .map_err(|_| anyhow!("URI is not a file"))?;
1620 let mut next_group_id = 0;
1621 let mut diagnostics = Vec::default();
1622 let mut primary_diagnostic_group_ids = HashMap::default();
1623 let mut sources_by_group_id = HashMap::default();
1624 let mut supporting_diagnostic_severities = HashMap::default();
1625 for diagnostic in ¶ms.diagnostics {
1626 let source = diagnostic.source.as_ref();
1627 let code = diagnostic.code.as_ref().map(|code| match code {
1628 lsp::NumberOrString::Number(code) => code.to_string(),
1629 lsp::NumberOrString::String(code) => code.clone(),
1630 });
1631 let range = range_from_lsp(diagnostic.range);
1632 let is_supporting = diagnostic
1633 .related_information
1634 .as_ref()
1635 .map_or(false, |infos| {
1636 infos.iter().any(|info| {
1637 primary_diagnostic_group_ids.contains_key(&(
1638 source,
1639 code.clone(),
1640 range_from_lsp(info.location.range),
1641 ))
1642 })
1643 });
1644
1645 if is_supporting {
1646 if let Some(severity) = diagnostic.severity {
1647 supporting_diagnostic_severities
1648 .insert((source, code.clone(), range), severity);
1649 }
1650 } else {
1651 let group_id = post_inc(&mut next_group_id);
1652 let is_disk_based =
1653 source.map_or(false, |source| disk_based_sources.contains(source));
1654
1655 sources_by_group_id.insert(group_id, source);
1656 primary_diagnostic_group_ids
1657 .insert((source, code.clone(), range.clone()), group_id);
1658
1659 diagnostics.push(DiagnosticEntry {
1660 range,
1661 diagnostic: Diagnostic {
1662 code: code.clone(),
1663 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1664 message: diagnostic.message.clone(),
1665 group_id,
1666 is_primary: true,
1667 is_valid: true,
1668 is_disk_based,
1669 },
1670 });
1671 if let Some(infos) = &diagnostic.related_information {
1672 for info in infos {
1673 if info.location.uri == params.uri && !info.message.is_empty() {
1674 let range = range_from_lsp(info.location.range);
1675 diagnostics.push(DiagnosticEntry {
1676 range,
1677 diagnostic: Diagnostic {
1678 code: code.clone(),
1679 severity: DiagnosticSeverity::INFORMATION,
1680 message: info.message.clone(),
1681 group_id,
1682 is_primary: false,
1683 is_valid: true,
1684 is_disk_based,
1685 },
1686 });
1687 }
1688 }
1689 }
1690 }
1691 }
1692
1693 for entry in &mut diagnostics {
1694 let diagnostic = &mut entry.diagnostic;
1695 if !diagnostic.is_primary {
1696 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1697 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1698 source,
1699 diagnostic.code.clone(),
1700 entry.range.clone(),
1701 )) {
1702 diagnostic.severity = severity;
1703 }
1704 }
1705 }
1706
1707 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1708 Ok(())
1709 }
1710
1711 pub fn update_diagnostic_entries(
1712 &mut self,
1713 abs_path: PathBuf,
1714 version: Option<i32>,
1715 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1716 cx: &mut ModelContext<Project>,
1717 ) -> Result<(), anyhow::Error> {
1718 let (worktree, relative_path) = self
1719 .find_local_worktree(&abs_path, cx)
1720 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1721 if !worktree.read(cx).is_visible() {
1722 return Ok(());
1723 }
1724
1725 let project_path = ProjectPath {
1726 worktree_id: worktree.read(cx).id(),
1727 path: relative_path.into(),
1728 };
1729
1730 for buffer in self.opened_buffers.values() {
1731 if let Some(buffer) = buffer.upgrade(cx) {
1732 if buffer
1733 .read(cx)
1734 .file()
1735 .map_or(false, |file| *file.path() == project_path.path)
1736 {
1737 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1738 break;
1739 }
1740 }
1741 }
1742 worktree.update(cx, |worktree, cx| {
1743 worktree
1744 .as_local_mut()
1745 .ok_or_else(|| anyhow!("not a local worktree"))?
1746 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1747 })?;
1748 cx.emit(Event::DiagnosticsUpdated(project_path));
1749 Ok(())
1750 }
1751
1752 fn update_buffer_diagnostics(
1753 &mut self,
1754 buffer: &ModelHandle<Buffer>,
1755 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1756 version: Option<i32>,
1757 cx: &mut ModelContext<Self>,
1758 ) -> Result<()> {
1759 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1760 Ordering::Equal
1761 .then_with(|| b.is_primary.cmp(&a.is_primary))
1762 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1763 .then_with(|| a.severity.cmp(&b.severity))
1764 .then_with(|| a.message.cmp(&b.message))
1765 }
1766
1767 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1768
1769 diagnostics.sort_unstable_by(|a, b| {
1770 Ordering::Equal
1771 .then_with(|| a.range.start.cmp(&b.range.start))
1772 .then_with(|| b.range.end.cmp(&a.range.end))
1773 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1774 });
1775
1776 let mut sanitized_diagnostics = Vec::new();
1777 let mut edits_since_save = snapshot
1778 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1779 .peekable();
1780 let mut last_edit_old_end = PointUtf16::zero();
1781 let mut last_edit_new_end = PointUtf16::zero();
1782 'outer: for entry in diagnostics {
1783 let mut start = entry.range.start;
1784 let mut end = entry.range.end;
1785
1786 // Some diagnostics are based on files on disk instead of buffers'
1787 // current contents. Adjust these diagnostics' ranges to reflect
1788 // any unsaved edits.
1789 if entry.diagnostic.is_disk_based {
1790 while let Some(edit) = edits_since_save.peek() {
1791 if edit.old.end <= start {
1792 last_edit_old_end = edit.old.end;
1793 last_edit_new_end = edit.new.end;
1794 edits_since_save.next();
1795 } else if edit.old.start <= end && edit.old.end >= start {
1796 continue 'outer;
1797 } else {
1798 break;
1799 }
1800 }
1801
1802 let start_overshoot = start - last_edit_old_end;
1803 start = last_edit_new_end;
1804 start += start_overshoot;
1805
1806 let end_overshoot = end - last_edit_old_end;
1807 end = last_edit_new_end;
1808 end += end_overshoot;
1809 }
1810
1811 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1812 ..snapshot.clip_point_utf16(end, Bias::Right);
1813
1814 // Expand empty ranges by one character
1815 if range.start == range.end {
1816 range.end.column += 1;
1817 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1818 if range.start == range.end && range.end.column > 0 {
1819 range.start.column -= 1;
1820 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1821 }
1822 }
1823
1824 sanitized_diagnostics.push(DiagnosticEntry {
1825 range,
1826 diagnostic: entry.diagnostic,
1827 });
1828 }
1829 drop(edits_since_save);
1830
1831 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1832 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1833 Ok(())
1834 }
1835
1836 pub fn format(
1837 &self,
1838 buffers: HashSet<ModelHandle<Buffer>>,
1839 push_to_history: bool,
1840 cx: &mut ModelContext<Project>,
1841 ) -> Task<Result<ProjectTransaction>> {
1842 let mut local_buffers = Vec::new();
1843 let mut remote_buffers = None;
1844 for buffer_handle in buffers {
1845 let buffer = buffer_handle.read(cx);
1846 if let Some(file) = File::from_dyn(buffer.file()) {
1847 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1848 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1849 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1850 }
1851 } else {
1852 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1853 }
1854 } else {
1855 return Task::ready(Ok(Default::default()));
1856 }
1857 }
1858
1859 let remote_buffers = self.remote_id().zip(remote_buffers);
1860 let client = self.client.clone();
1861
1862 cx.spawn(|this, mut cx| async move {
1863 let mut project_transaction = ProjectTransaction::default();
1864
1865 if let Some((project_id, remote_buffers)) = remote_buffers {
1866 let response = client
1867 .request(proto::FormatBuffers {
1868 project_id,
1869 buffer_ids: remote_buffers
1870 .iter()
1871 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1872 .collect(),
1873 })
1874 .await?
1875 .transaction
1876 .ok_or_else(|| anyhow!("missing transaction"))?;
1877 project_transaction = this
1878 .update(&mut cx, |this, cx| {
1879 this.deserialize_project_transaction(response, push_to_history, cx)
1880 })
1881 .await?;
1882 }
1883
1884 for (buffer, buffer_abs_path, language_server) in local_buffers {
1885 let text_document = lsp::TextDocumentIdentifier::new(
1886 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1887 );
1888 let capabilities = &language_server.capabilities();
1889 let lsp_edits = if capabilities
1890 .document_formatting_provider
1891 .as_ref()
1892 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1893 {
1894 language_server
1895 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1896 text_document,
1897 options: Default::default(),
1898 work_done_progress_params: Default::default(),
1899 })
1900 .await?
1901 } else if capabilities
1902 .document_range_formatting_provider
1903 .as_ref()
1904 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1905 {
1906 let buffer_start = lsp::Position::new(0, 0);
1907 let buffer_end = buffer
1908 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1909 .to_lsp_position();
1910 language_server
1911 .request::<lsp::request::RangeFormatting>(
1912 lsp::DocumentRangeFormattingParams {
1913 text_document,
1914 range: lsp::Range::new(buffer_start, buffer_end),
1915 options: Default::default(),
1916 work_done_progress_params: Default::default(),
1917 },
1918 )
1919 .await?
1920 } else {
1921 continue;
1922 };
1923
1924 if let Some(lsp_edits) = lsp_edits {
1925 let edits = this
1926 .update(&mut cx, |this, cx| {
1927 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1928 })
1929 .await?;
1930 buffer.update(&mut cx, |buffer, cx| {
1931 buffer.finalize_last_transaction();
1932 buffer.start_transaction();
1933 for (range, text) in edits {
1934 buffer.edit([range], text, cx);
1935 }
1936 if buffer.end_transaction(cx).is_some() {
1937 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1938 if !push_to_history {
1939 buffer.forget_transaction(transaction.id);
1940 }
1941 project_transaction.0.insert(cx.handle(), transaction);
1942 }
1943 });
1944 }
1945 }
1946
1947 Ok(project_transaction)
1948 })
1949 }
1950
1951 pub fn definition<T: ToPointUtf16>(
1952 &self,
1953 buffer: &ModelHandle<Buffer>,
1954 position: T,
1955 cx: &mut ModelContext<Self>,
1956 ) -> Task<Result<Vec<Location>>> {
1957 let position = position.to_point_utf16(buffer.read(cx));
1958 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1959 }
1960
1961 pub fn references<T: ToPointUtf16>(
1962 &self,
1963 buffer: &ModelHandle<Buffer>,
1964 position: T,
1965 cx: &mut ModelContext<Self>,
1966 ) -> Task<Result<Vec<Location>>> {
1967 let position = position.to_point_utf16(buffer.read(cx));
1968 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1969 }
1970
1971 pub fn document_highlights<T: ToPointUtf16>(
1972 &self,
1973 buffer: &ModelHandle<Buffer>,
1974 position: T,
1975 cx: &mut ModelContext<Self>,
1976 ) -> Task<Result<Vec<DocumentHighlight>>> {
1977 let position = position.to_point_utf16(buffer.read(cx));
1978
1979 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1980 }
1981
1982 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1983 if self.is_local() {
1984 let mut language_servers = HashMap::default();
1985 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1986 if let Some((worktree, language)) = self
1987 .worktree_for_id(*worktree_id, cx)
1988 .and_then(|worktree| worktree.read(cx).as_local())
1989 .zip(self.languages.get_language(language_name))
1990 {
1991 language_servers
1992 .entry(Arc::as_ptr(language_server))
1993 .or_insert((
1994 language_server.clone(),
1995 *worktree_id,
1996 worktree.abs_path().clone(),
1997 language.clone(),
1998 ));
1999 }
2000 }
2001
2002 let mut requests = Vec::new();
2003 for (language_server, _, _, _) in language_servers.values() {
2004 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2005 lsp::WorkspaceSymbolParams {
2006 query: query.to_string(),
2007 ..Default::default()
2008 },
2009 ));
2010 }
2011
2012 cx.spawn_weak(|this, cx| async move {
2013 let responses = futures::future::try_join_all(requests).await?;
2014
2015 let mut symbols = Vec::new();
2016 if let Some(this) = this.upgrade(&cx) {
2017 this.read_with(&cx, |this, cx| {
2018 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2019 language_servers.into_values().zip(responses)
2020 {
2021 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2022 |lsp_symbol| {
2023 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2024 let mut worktree_id = source_worktree_id;
2025 let path;
2026 if let Some((worktree, rel_path)) =
2027 this.find_local_worktree(&abs_path, cx)
2028 {
2029 worktree_id = worktree.read(cx).id();
2030 path = rel_path;
2031 } else {
2032 path = relativize_path(&worktree_abs_path, &abs_path);
2033 }
2034
2035 let label = language
2036 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2037 .unwrap_or_else(|| {
2038 CodeLabel::plain(lsp_symbol.name.clone(), None)
2039 });
2040 let signature = this.symbol_signature(worktree_id, &path);
2041
2042 Some(Symbol {
2043 source_worktree_id,
2044 worktree_id,
2045 language_name: language.name().to_string(),
2046 name: lsp_symbol.name,
2047 kind: lsp_symbol.kind,
2048 label,
2049 path,
2050 range: range_from_lsp(lsp_symbol.location.range),
2051 signature,
2052 })
2053 },
2054 ));
2055 }
2056 })
2057 }
2058
2059 Ok(symbols)
2060 })
2061 } else if let Some(project_id) = self.remote_id() {
2062 let request = self.client.request(proto::GetProjectSymbols {
2063 project_id,
2064 query: query.to_string(),
2065 });
2066 cx.spawn_weak(|this, cx| async move {
2067 let response = request.await?;
2068 let mut symbols = Vec::new();
2069 if let Some(this) = this.upgrade(&cx) {
2070 this.read_with(&cx, |this, _| {
2071 symbols.extend(
2072 response
2073 .symbols
2074 .into_iter()
2075 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2076 );
2077 })
2078 }
2079 Ok(symbols)
2080 })
2081 } else {
2082 Task::ready(Ok(Default::default()))
2083 }
2084 }
2085
2086 pub fn open_buffer_for_symbol(
2087 &mut self,
2088 symbol: &Symbol,
2089 cx: &mut ModelContext<Self>,
2090 ) -> Task<Result<ModelHandle<Buffer>>> {
2091 if self.is_local() {
2092 let language_server = if let Some(server) = self.language_servers.get(&(
2093 symbol.source_worktree_id,
2094 Arc::from(symbol.language_name.as_str()),
2095 )) {
2096 server.clone()
2097 } else {
2098 return Task::ready(Err(anyhow!(
2099 "language server for worktree and language not found"
2100 )));
2101 };
2102
2103 let worktree_abs_path = if let Some(worktree_abs_path) = self
2104 .worktree_for_id(symbol.worktree_id, cx)
2105 .and_then(|worktree| worktree.read(cx).as_local())
2106 .map(|local_worktree| local_worktree.abs_path())
2107 {
2108 worktree_abs_path
2109 } else {
2110 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2111 };
2112 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2113 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2114 uri
2115 } else {
2116 return Task::ready(Err(anyhow!("invalid symbol path")));
2117 };
2118
2119 self.open_local_buffer_via_lsp(
2120 symbol_uri,
2121 Arc::from(symbol.language_name.as_str()),
2122 language_server,
2123 cx,
2124 )
2125 } else if let Some(project_id) = self.remote_id() {
2126 let request = self.client.request(proto::OpenBufferForSymbol {
2127 project_id,
2128 symbol: Some(serialize_symbol(symbol)),
2129 });
2130 cx.spawn(|this, mut cx| async move {
2131 let response = request.await?;
2132 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2133 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2134 .await
2135 })
2136 } else {
2137 Task::ready(Err(anyhow!("project does not have a remote id")))
2138 }
2139 }
2140
2141 pub fn completions<T: ToPointUtf16>(
2142 &self,
2143 source_buffer_handle: &ModelHandle<Buffer>,
2144 position: T,
2145 cx: &mut ModelContext<Self>,
2146 ) -> Task<Result<Vec<Completion>>> {
2147 let source_buffer_handle = source_buffer_handle.clone();
2148 let source_buffer = source_buffer_handle.read(cx);
2149 let buffer_id = source_buffer.remote_id();
2150 let language = source_buffer.language().cloned();
2151 let worktree;
2152 let buffer_abs_path;
2153 if let Some(file) = File::from_dyn(source_buffer.file()) {
2154 worktree = file.worktree.clone();
2155 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2156 } else {
2157 return Task::ready(Ok(Default::default()));
2158 };
2159
2160 let position = position.to_point_utf16(source_buffer);
2161 let anchor = source_buffer.anchor_after(position);
2162
2163 if worktree.read(cx).as_local().is_some() {
2164 let buffer_abs_path = buffer_abs_path.unwrap();
2165 let lang_server =
2166 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2167 server.clone()
2168 } else {
2169 return Task::ready(Ok(Default::default()));
2170 };
2171
2172 cx.spawn(|_, cx| async move {
2173 let completions = lang_server
2174 .request::<lsp::request::Completion>(lsp::CompletionParams {
2175 text_document_position: lsp::TextDocumentPositionParams::new(
2176 lsp::TextDocumentIdentifier::new(
2177 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2178 ),
2179 position.to_lsp_position(),
2180 ),
2181 context: Default::default(),
2182 work_done_progress_params: Default::default(),
2183 partial_result_params: Default::default(),
2184 })
2185 .await
2186 .context("lsp completion request failed")?;
2187
2188 let completions = if let Some(completions) = completions {
2189 match completions {
2190 lsp::CompletionResponse::Array(completions) => completions,
2191 lsp::CompletionResponse::List(list) => list.items,
2192 }
2193 } else {
2194 Default::default()
2195 };
2196
2197 source_buffer_handle.read_with(&cx, |this, _| {
2198 Ok(completions
2199 .into_iter()
2200 .filter_map(|lsp_completion| {
2201 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2202 lsp::CompletionTextEdit::Edit(edit) => {
2203 (range_from_lsp(edit.range), edit.new_text.clone())
2204 }
2205 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2206 log::info!("unsupported insert/replace completion");
2207 return None;
2208 }
2209 };
2210
2211 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2212 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2213 if clipped_start == old_range.start && clipped_end == old_range.end {
2214 Some(Completion {
2215 old_range: this.anchor_before(old_range.start)
2216 ..this.anchor_after(old_range.end),
2217 new_text,
2218 label: language
2219 .as_ref()
2220 .and_then(|l| l.label_for_completion(&lsp_completion))
2221 .unwrap_or_else(|| {
2222 CodeLabel::plain(
2223 lsp_completion.label.clone(),
2224 lsp_completion.filter_text.as_deref(),
2225 )
2226 }),
2227 lsp_completion,
2228 })
2229 } else {
2230 None
2231 }
2232 })
2233 .collect())
2234 })
2235 })
2236 } else if let Some(project_id) = self.remote_id() {
2237 let rpc = self.client.clone();
2238 let message = proto::GetCompletions {
2239 project_id,
2240 buffer_id,
2241 position: Some(language::proto::serialize_anchor(&anchor)),
2242 version: serialize_version(&source_buffer.version()),
2243 };
2244 cx.spawn_weak(|_, mut cx| async move {
2245 let response = rpc.request(message).await?;
2246
2247 source_buffer_handle
2248 .update(&mut cx, |buffer, _| {
2249 buffer.wait_for_version(deserialize_version(response.version))
2250 })
2251 .await;
2252
2253 response
2254 .completions
2255 .into_iter()
2256 .map(|completion| {
2257 language::proto::deserialize_completion(completion, language.as_ref())
2258 })
2259 .collect()
2260 })
2261 } else {
2262 Task::ready(Ok(Default::default()))
2263 }
2264 }
2265
2266 pub fn apply_additional_edits_for_completion(
2267 &self,
2268 buffer_handle: ModelHandle<Buffer>,
2269 completion: Completion,
2270 push_to_history: bool,
2271 cx: &mut ModelContext<Self>,
2272 ) -> Task<Result<Option<Transaction>>> {
2273 let buffer = buffer_handle.read(cx);
2274 let buffer_id = buffer.remote_id();
2275
2276 if self.is_local() {
2277 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2278 server.clone()
2279 } else {
2280 return Task::ready(Ok(Default::default()));
2281 };
2282
2283 cx.spawn(|this, mut cx| async move {
2284 let resolved_completion = lang_server
2285 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2286 .await?;
2287 if let Some(edits) = resolved_completion.additional_text_edits {
2288 let edits = this
2289 .update(&mut cx, |this, cx| {
2290 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2291 })
2292 .await?;
2293 buffer_handle.update(&mut cx, |buffer, cx| {
2294 buffer.finalize_last_transaction();
2295 buffer.start_transaction();
2296 for (range, text) in edits {
2297 buffer.edit([range], text, cx);
2298 }
2299 let transaction = if buffer.end_transaction(cx).is_some() {
2300 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2301 if !push_to_history {
2302 buffer.forget_transaction(transaction.id);
2303 }
2304 Some(transaction)
2305 } else {
2306 None
2307 };
2308 Ok(transaction)
2309 })
2310 } else {
2311 Ok(None)
2312 }
2313 })
2314 } else if let Some(project_id) = self.remote_id() {
2315 let client = self.client.clone();
2316 cx.spawn(|_, mut cx| async move {
2317 let response = client
2318 .request(proto::ApplyCompletionAdditionalEdits {
2319 project_id,
2320 buffer_id,
2321 completion: Some(language::proto::serialize_completion(&completion)),
2322 })
2323 .await?;
2324
2325 if let Some(transaction) = response.transaction {
2326 let transaction = language::proto::deserialize_transaction(transaction)?;
2327 buffer_handle
2328 .update(&mut cx, |buffer, _| {
2329 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2330 })
2331 .await;
2332 if push_to_history {
2333 buffer_handle.update(&mut cx, |buffer, _| {
2334 buffer.push_transaction(transaction.clone(), Instant::now());
2335 });
2336 }
2337 Ok(Some(transaction))
2338 } else {
2339 Ok(None)
2340 }
2341 })
2342 } else {
2343 Task::ready(Err(anyhow!("project does not have a remote id")))
2344 }
2345 }
2346
2347 pub fn code_actions<T: ToOffset>(
2348 &self,
2349 buffer_handle: &ModelHandle<Buffer>,
2350 range: Range<T>,
2351 cx: &mut ModelContext<Self>,
2352 ) -> Task<Result<Vec<CodeAction>>> {
2353 let buffer_handle = buffer_handle.clone();
2354 let buffer = buffer_handle.read(cx);
2355 let buffer_id = buffer.remote_id();
2356 let worktree;
2357 let buffer_abs_path;
2358 if let Some(file) = File::from_dyn(buffer.file()) {
2359 worktree = file.worktree.clone();
2360 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2361 } else {
2362 return Task::ready(Ok(Default::default()));
2363 };
2364 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2365
2366 if worktree.read(cx).as_local().is_some() {
2367 let buffer_abs_path = buffer_abs_path.unwrap();
2368 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2369 server.clone()
2370 } else {
2371 return Task::ready(Ok(Default::default()));
2372 };
2373
2374 let lsp_range = lsp::Range::new(
2375 range.start.to_point_utf16(buffer).to_lsp_position(),
2376 range.end.to_point_utf16(buffer).to_lsp_position(),
2377 );
2378 cx.foreground().spawn(async move {
2379 if !lang_server.capabilities().code_action_provider.is_some() {
2380 return Ok(Default::default());
2381 }
2382
2383 Ok(lang_server
2384 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2385 text_document: lsp::TextDocumentIdentifier::new(
2386 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2387 ),
2388 range: lsp_range,
2389 work_done_progress_params: Default::default(),
2390 partial_result_params: Default::default(),
2391 context: lsp::CodeActionContext {
2392 diagnostics: Default::default(),
2393 only: Some(vec![
2394 lsp::CodeActionKind::QUICKFIX,
2395 lsp::CodeActionKind::REFACTOR,
2396 lsp::CodeActionKind::REFACTOR_EXTRACT,
2397 ]),
2398 },
2399 })
2400 .await?
2401 .unwrap_or_default()
2402 .into_iter()
2403 .filter_map(|entry| {
2404 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2405 Some(CodeAction {
2406 range: range.clone(),
2407 lsp_action,
2408 })
2409 } else {
2410 None
2411 }
2412 })
2413 .collect())
2414 })
2415 } else if let Some(project_id) = self.remote_id() {
2416 let rpc = self.client.clone();
2417 let version = buffer.version();
2418 cx.spawn_weak(|_, mut cx| async move {
2419 let response = rpc
2420 .request(proto::GetCodeActions {
2421 project_id,
2422 buffer_id,
2423 start: Some(language::proto::serialize_anchor(&range.start)),
2424 end: Some(language::proto::serialize_anchor(&range.end)),
2425 version: serialize_version(&version),
2426 })
2427 .await?;
2428
2429 buffer_handle
2430 .update(&mut cx, |buffer, _| {
2431 buffer.wait_for_version(deserialize_version(response.version))
2432 })
2433 .await;
2434
2435 response
2436 .actions
2437 .into_iter()
2438 .map(language::proto::deserialize_code_action)
2439 .collect()
2440 })
2441 } else {
2442 Task::ready(Ok(Default::default()))
2443 }
2444 }
2445
2446 pub fn apply_code_action(
2447 &self,
2448 buffer_handle: ModelHandle<Buffer>,
2449 mut action: CodeAction,
2450 push_to_history: bool,
2451 cx: &mut ModelContext<Self>,
2452 ) -> Task<Result<ProjectTransaction>> {
2453 if self.is_local() {
2454 let buffer = buffer_handle.read(cx);
2455 let lang_name = if let Some(lang) = buffer.language() {
2456 lang.name()
2457 } else {
2458 return Task::ready(Ok(Default::default()));
2459 };
2460 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2461 server.clone()
2462 } else {
2463 return Task::ready(Ok(Default::default()));
2464 };
2465 let range = action.range.to_point_utf16(buffer);
2466
2467 cx.spawn(|this, mut cx| async move {
2468 if let Some(lsp_range) = action
2469 .lsp_action
2470 .data
2471 .as_mut()
2472 .and_then(|d| d.get_mut("codeActionParams"))
2473 .and_then(|d| d.get_mut("range"))
2474 {
2475 *lsp_range = serde_json::to_value(&lsp::Range::new(
2476 range.start.to_lsp_position(),
2477 range.end.to_lsp_position(),
2478 ))
2479 .unwrap();
2480 action.lsp_action = lang_server
2481 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2482 .await?;
2483 } else {
2484 let actions = this
2485 .update(&mut cx, |this, cx| {
2486 this.code_actions(&buffer_handle, action.range, cx)
2487 })
2488 .await?;
2489 action.lsp_action = actions
2490 .into_iter()
2491 .find(|a| a.lsp_action.title == action.lsp_action.title)
2492 .ok_or_else(|| anyhow!("code action is outdated"))?
2493 .lsp_action;
2494 }
2495
2496 if let Some(edit) = action.lsp_action.edit {
2497 Self::deserialize_workspace_edit(
2498 this,
2499 edit,
2500 push_to_history,
2501 lang_name,
2502 lang_server,
2503 &mut cx,
2504 )
2505 .await
2506 } else {
2507 Ok(ProjectTransaction::default())
2508 }
2509 })
2510 } else if let Some(project_id) = self.remote_id() {
2511 let client = self.client.clone();
2512 let request = proto::ApplyCodeAction {
2513 project_id,
2514 buffer_id: buffer_handle.read(cx).remote_id(),
2515 action: Some(language::proto::serialize_code_action(&action)),
2516 };
2517 cx.spawn(|this, mut cx| async move {
2518 let response = client
2519 .request(request)
2520 .await?
2521 .transaction
2522 .ok_or_else(|| anyhow!("missing transaction"))?;
2523 this.update(&mut cx, |this, cx| {
2524 this.deserialize_project_transaction(response, push_to_history, cx)
2525 })
2526 .await
2527 })
2528 } else {
2529 Task::ready(Err(anyhow!("project does not have a remote id")))
2530 }
2531 }
2532
2533 async fn deserialize_workspace_edit(
2534 this: ModelHandle<Self>,
2535 edit: lsp::WorkspaceEdit,
2536 push_to_history: bool,
2537 language_name: Arc<str>,
2538 language_server: Arc<LanguageServer>,
2539 cx: &mut AsyncAppContext,
2540 ) -> Result<ProjectTransaction> {
2541 let fs = this.read_with(cx, |this, _| this.fs.clone());
2542 let mut operations = Vec::new();
2543 if let Some(document_changes) = edit.document_changes {
2544 match document_changes {
2545 lsp::DocumentChanges::Edits(edits) => {
2546 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2547 }
2548 lsp::DocumentChanges::Operations(ops) => operations = ops,
2549 }
2550 } else if let Some(changes) = edit.changes {
2551 operations.extend(changes.into_iter().map(|(uri, edits)| {
2552 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2553 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2554 uri,
2555 version: None,
2556 },
2557 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2558 })
2559 }));
2560 }
2561
2562 let mut project_transaction = ProjectTransaction::default();
2563 for operation in operations {
2564 match operation {
2565 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2566 let abs_path = op
2567 .uri
2568 .to_file_path()
2569 .map_err(|_| anyhow!("can't convert URI to path"))?;
2570
2571 if let Some(parent_path) = abs_path.parent() {
2572 fs.create_dir(parent_path).await?;
2573 }
2574 if abs_path.ends_with("/") {
2575 fs.create_dir(&abs_path).await?;
2576 } else {
2577 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2578 .await?;
2579 }
2580 }
2581 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2582 let source_abs_path = op
2583 .old_uri
2584 .to_file_path()
2585 .map_err(|_| anyhow!("can't convert URI to path"))?;
2586 let target_abs_path = op
2587 .new_uri
2588 .to_file_path()
2589 .map_err(|_| anyhow!("can't convert URI to path"))?;
2590 fs.rename(
2591 &source_abs_path,
2592 &target_abs_path,
2593 op.options.map(Into::into).unwrap_or_default(),
2594 )
2595 .await?;
2596 }
2597 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2598 let abs_path = op
2599 .uri
2600 .to_file_path()
2601 .map_err(|_| anyhow!("can't convert URI to path"))?;
2602 let options = op.options.map(Into::into).unwrap_or_default();
2603 if abs_path.ends_with("/") {
2604 fs.remove_dir(&abs_path, options).await?;
2605 } else {
2606 fs.remove_file(&abs_path, options).await?;
2607 }
2608 }
2609 lsp::DocumentChangeOperation::Edit(op) => {
2610 let buffer_to_edit = this
2611 .update(cx, |this, cx| {
2612 this.open_local_buffer_via_lsp(
2613 op.text_document.uri,
2614 language_name.clone(),
2615 language_server.clone(),
2616 cx,
2617 )
2618 })
2619 .await?;
2620
2621 let edits = this
2622 .update(cx, |this, cx| {
2623 let edits = op.edits.into_iter().map(|edit| match edit {
2624 lsp::OneOf::Left(edit) => edit,
2625 lsp::OneOf::Right(edit) => edit.text_edit,
2626 });
2627 this.edits_from_lsp(
2628 &buffer_to_edit,
2629 edits,
2630 op.text_document.version,
2631 cx,
2632 )
2633 })
2634 .await?;
2635
2636 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2637 buffer.finalize_last_transaction();
2638 buffer.start_transaction();
2639 for (range, text) in edits {
2640 buffer.edit([range], text, cx);
2641 }
2642 let transaction = if buffer.end_transaction(cx).is_some() {
2643 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2644 if !push_to_history {
2645 buffer.forget_transaction(transaction.id);
2646 }
2647 Some(transaction)
2648 } else {
2649 None
2650 };
2651
2652 transaction
2653 });
2654 if let Some(transaction) = transaction {
2655 project_transaction.0.insert(buffer_to_edit, transaction);
2656 }
2657 }
2658 }
2659 }
2660
2661 Ok(project_transaction)
2662 }
2663
2664 pub fn prepare_rename<T: ToPointUtf16>(
2665 &self,
2666 buffer: ModelHandle<Buffer>,
2667 position: T,
2668 cx: &mut ModelContext<Self>,
2669 ) -> Task<Result<Option<Range<Anchor>>>> {
2670 let position = position.to_point_utf16(buffer.read(cx));
2671 self.request_lsp(buffer, PrepareRename { position }, cx)
2672 }
2673
2674 pub fn perform_rename<T: ToPointUtf16>(
2675 &self,
2676 buffer: ModelHandle<Buffer>,
2677 position: T,
2678 new_name: String,
2679 push_to_history: bool,
2680 cx: &mut ModelContext<Self>,
2681 ) -> Task<Result<ProjectTransaction>> {
2682 let position = position.to_point_utf16(buffer.read(cx));
2683 self.request_lsp(
2684 buffer,
2685 PerformRename {
2686 position,
2687 new_name,
2688 push_to_history,
2689 },
2690 cx,
2691 )
2692 }
2693
2694 pub fn search(
2695 &self,
2696 query: SearchQuery,
2697 cx: &mut ModelContext<Self>,
2698 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2699 if self.is_local() {
2700 let snapshots = self
2701 .visible_worktrees(cx)
2702 .filter_map(|tree| {
2703 let tree = tree.read(cx).as_local()?;
2704 Some(tree.snapshot())
2705 })
2706 .collect::<Vec<_>>();
2707
2708 let background = cx.background().clone();
2709 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2710 if path_count == 0 {
2711 return Task::ready(Ok(Default::default()));
2712 }
2713 let workers = background.num_cpus().min(path_count);
2714 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2715 cx.background()
2716 .spawn({
2717 let fs = self.fs.clone();
2718 let background = cx.background().clone();
2719 let query = query.clone();
2720 async move {
2721 let fs = &fs;
2722 let query = &query;
2723 let matching_paths_tx = &matching_paths_tx;
2724 let paths_per_worker = (path_count + workers - 1) / workers;
2725 let snapshots = &snapshots;
2726 background
2727 .scoped(|scope| {
2728 for worker_ix in 0..workers {
2729 let worker_start_ix = worker_ix * paths_per_worker;
2730 let worker_end_ix = worker_start_ix + paths_per_worker;
2731 scope.spawn(async move {
2732 let mut snapshot_start_ix = 0;
2733 let mut abs_path = PathBuf::new();
2734 for snapshot in snapshots {
2735 let snapshot_end_ix =
2736 snapshot_start_ix + snapshot.visible_file_count();
2737 if worker_end_ix <= snapshot_start_ix {
2738 break;
2739 } else if worker_start_ix > snapshot_end_ix {
2740 snapshot_start_ix = snapshot_end_ix;
2741 continue;
2742 } else {
2743 let start_in_snapshot = worker_start_ix
2744 .saturating_sub(snapshot_start_ix);
2745 let end_in_snapshot =
2746 cmp::min(worker_end_ix, snapshot_end_ix)
2747 - snapshot_start_ix;
2748
2749 for entry in snapshot
2750 .files(false, start_in_snapshot)
2751 .take(end_in_snapshot - start_in_snapshot)
2752 {
2753 if matching_paths_tx.is_closed() {
2754 break;
2755 }
2756
2757 abs_path.clear();
2758 abs_path.push(&snapshot.abs_path());
2759 abs_path.push(&entry.path);
2760 let matches = if let Some(file) =
2761 fs.open_sync(&abs_path).await.log_err()
2762 {
2763 query.detect(file).unwrap_or(false)
2764 } else {
2765 false
2766 };
2767
2768 if matches {
2769 let project_path =
2770 (snapshot.id(), entry.path.clone());
2771 if matching_paths_tx
2772 .send(project_path)
2773 .await
2774 .is_err()
2775 {
2776 break;
2777 }
2778 }
2779 }
2780
2781 snapshot_start_ix = snapshot_end_ix;
2782 }
2783 }
2784 });
2785 }
2786 })
2787 .await;
2788 }
2789 })
2790 .detach();
2791
2792 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2793 let open_buffers = self
2794 .opened_buffers
2795 .values()
2796 .filter_map(|b| b.upgrade(cx))
2797 .collect::<HashSet<_>>();
2798 cx.spawn(|this, cx| async move {
2799 for buffer in &open_buffers {
2800 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2801 buffers_tx.send((buffer.clone(), snapshot)).await?;
2802 }
2803
2804 let open_buffers = Rc::new(RefCell::new(open_buffers));
2805 while let Some(project_path) = matching_paths_rx.next().await {
2806 if buffers_tx.is_closed() {
2807 break;
2808 }
2809
2810 let this = this.clone();
2811 let open_buffers = open_buffers.clone();
2812 let buffers_tx = buffers_tx.clone();
2813 cx.spawn(|mut cx| async move {
2814 if let Some(buffer) = this
2815 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2816 .await
2817 .log_err()
2818 {
2819 if open_buffers.borrow_mut().insert(buffer.clone()) {
2820 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2821 buffers_tx.send((buffer, snapshot)).await?;
2822 }
2823 }
2824
2825 Ok::<_, anyhow::Error>(())
2826 })
2827 .detach();
2828 }
2829
2830 Ok::<_, anyhow::Error>(())
2831 })
2832 .detach_and_log_err(cx);
2833
2834 let background = cx.background().clone();
2835 cx.background().spawn(async move {
2836 let query = &query;
2837 let mut matched_buffers = Vec::new();
2838 for _ in 0..workers {
2839 matched_buffers.push(HashMap::default());
2840 }
2841 background
2842 .scoped(|scope| {
2843 for worker_matched_buffers in matched_buffers.iter_mut() {
2844 let mut buffers_rx = buffers_rx.clone();
2845 scope.spawn(async move {
2846 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2847 let buffer_matches = query
2848 .search(snapshot.as_rope())
2849 .await
2850 .iter()
2851 .map(|range| {
2852 snapshot.anchor_before(range.start)
2853 ..snapshot.anchor_after(range.end)
2854 })
2855 .collect::<Vec<_>>();
2856 if !buffer_matches.is_empty() {
2857 worker_matched_buffers
2858 .insert(buffer.clone(), buffer_matches);
2859 }
2860 }
2861 });
2862 }
2863 })
2864 .await;
2865 Ok(matched_buffers.into_iter().flatten().collect())
2866 })
2867 } else if let Some(project_id) = self.remote_id() {
2868 let request = self.client.request(query.to_proto(project_id));
2869 cx.spawn(|this, mut cx| async move {
2870 let response = request.await?;
2871 let mut result = HashMap::default();
2872 for location in response.locations {
2873 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2874 let target_buffer = this
2875 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2876 .await?;
2877 let start = location
2878 .start
2879 .and_then(deserialize_anchor)
2880 .ok_or_else(|| anyhow!("missing target start"))?;
2881 let end = location
2882 .end
2883 .and_then(deserialize_anchor)
2884 .ok_or_else(|| anyhow!("missing target end"))?;
2885 result
2886 .entry(target_buffer)
2887 .or_insert(Vec::new())
2888 .push(start..end)
2889 }
2890 Ok(result)
2891 })
2892 } else {
2893 Task::ready(Ok(Default::default()))
2894 }
2895 }
2896
2897 fn request_lsp<R: LspCommand>(
2898 &self,
2899 buffer_handle: ModelHandle<Buffer>,
2900 request: R,
2901 cx: &mut ModelContext<Self>,
2902 ) -> Task<Result<R::Response>>
2903 where
2904 <R::LspRequest as lsp::request::Request>::Result: Send,
2905 {
2906 let buffer = buffer_handle.read(cx);
2907 if self.is_local() {
2908 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2909 if let Some((file, language_server)) =
2910 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2911 {
2912 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2913 return cx.spawn(|this, cx| async move {
2914 if !request.check_capabilities(&language_server.capabilities()) {
2915 return Ok(Default::default());
2916 }
2917
2918 let response = language_server
2919 .request::<R::LspRequest>(lsp_params)
2920 .await
2921 .context("lsp request failed")?;
2922 request
2923 .response_from_lsp(response, this, buffer_handle, cx)
2924 .await
2925 });
2926 }
2927 } else if let Some(project_id) = self.remote_id() {
2928 let rpc = self.client.clone();
2929 let message = request.to_proto(project_id, buffer);
2930 return cx.spawn(|this, cx| async move {
2931 let response = rpc.request(message).await?;
2932 request
2933 .response_from_proto(response, this, buffer_handle, cx)
2934 .await
2935 });
2936 }
2937 Task::ready(Ok(Default::default()))
2938 }
2939
2940 pub fn find_or_create_local_worktree(
2941 &mut self,
2942 abs_path: impl AsRef<Path>,
2943 visible: bool,
2944 cx: &mut ModelContext<Self>,
2945 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2946 let abs_path = abs_path.as_ref();
2947 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2948 Task::ready(Ok((tree.clone(), relative_path.into())))
2949 } else {
2950 let worktree = self.create_local_worktree(abs_path, visible, cx);
2951 cx.foreground()
2952 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2953 }
2954 }
2955
2956 pub fn find_local_worktree(
2957 &self,
2958 abs_path: &Path,
2959 cx: &AppContext,
2960 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2961 for tree in self.worktrees(cx) {
2962 if let Some(relative_path) = tree
2963 .read(cx)
2964 .as_local()
2965 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2966 {
2967 return Some((tree.clone(), relative_path.into()));
2968 }
2969 }
2970 None
2971 }
2972
2973 pub fn is_shared(&self) -> bool {
2974 match &self.client_state {
2975 ProjectClientState::Local { is_shared, .. } => *is_shared,
2976 ProjectClientState::Remote { .. } => false,
2977 }
2978 }
2979
2980 fn create_local_worktree(
2981 &mut self,
2982 abs_path: impl AsRef<Path>,
2983 visible: bool,
2984 cx: &mut ModelContext<Self>,
2985 ) -> Task<Result<ModelHandle<Worktree>>> {
2986 let fs = self.fs.clone();
2987 let client = self.client.clone();
2988 let next_entry_id = self.next_entry_id.clone();
2989 let path: Arc<Path> = abs_path.as_ref().into();
2990 let task = self
2991 .loading_local_worktrees
2992 .entry(path.clone())
2993 .or_insert_with(|| {
2994 cx.spawn(|project, mut cx| {
2995 async move {
2996 let worktree = Worktree::local(
2997 client.clone(),
2998 path.clone(),
2999 visible,
3000 fs,
3001 next_entry_id,
3002 &mut cx,
3003 )
3004 .await;
3005 project.update(&mut cx, |project, _| {
3006 project.loading_local_worktrees.remove(&path);
3007 });
3008 let worktree = worktree?;
3009
3010 let (remote_project_id, is_shared) =
3011 project.update(&mut cx, |project, cx| {
3012 project.add_worktree(&worktree, cx);
3013 (project.remote_id(), project.is_shared())
3014 });
3015
3016 if let Some(project_id) = remote_project_id {
3017 if is_shared {
3018 worktree
3019 .update(&mut cx, |worktree, cx| {
3020 worktree.as_local_mut().unwrap().share(project_id, cx)
3021 })
3022 .await?;
3023 } else {
3024 worktree
3025 .update(&mut cx, |worktree, cx| {
3026 worktree.as_local_mut().unwrap().register(project_id, cx)
3027 })
3028 .await?;
3029 }
3030 }
3031
3032 Ok(worktree)
3033 }
3034 .map_err(|err| Arc::new(err))
3035 })
3036 .shared()
3037 })
3038 .clone();
3039 cx.foreground().spawn(async move {
3040 match task.await {
3041 Ok(worktree) => Ok(worktree),
3042 Err(err) => Err(anyhow!("{}", err)),
3043 }
3044 })
3045 }
3046
3047 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3048 self.worktrees.retain(|worktree| {
3049 worktree
3050 .upgrade(cx)
3051 .map_or(false, |w| w.read(cx).id() != id)
3052 });
3053 cx.notify();
3054 }
3055
3056 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3057 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3058 if worktree.read(cx).is_local() {
3059 cx.subscribe(&worktree, |this, worktree, _, cx| {
3060 this.update_local_worktree_buffers(worktree, cx);
3061 })
3062 .detach();
3063 }
3064
3065 let push_strong_handle = {
3066 let worktree = worktree.read(cx);
3067 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3068 };
3069 if push_strong_handle {
3070 self.worktrees
3071 .push(WorktreeHandle::Strong(worktree.clone()));
3072 } else {
3073 cx.observe_release(&worktree, |this, _, cx| {
3074 this.worktrees
3075 .retain(|worktree| worktree.upgrade(cx).is_some());
3076 cx.notify();
3077 })
3078 .detach();
3079 self.worktrees
3080 .push(WorktreeHandle::Weak(worktree.downgrade()));
3081 }
3082 cx.notify();
3083 }
3084
3085 fn update_local_worktree_buffers(
3086 &mut self,
3087 worktree_handle: ModelHandle<Worktree>,
3088 cx: &mut ModelContext<Self>,
3089 ) {
3090 let snapshot = worktree_handle.read(cx).snapshot();
3091 let mut buffers_to_delete = Vec::new();
3092 for (buffer_id, buffer) in &self.opened_buffers {
3093 if let Some(buffer) = buffer.upgrade(cx) {
3094 buffer.update(cx, |buffer, cx| {
3095 if let Some(old_file) = File::from_dyn(buffer.file()) {
3096 if old_file.worktree != worktree_handle {
3097 return;
3098 }
3099
3100 let new_file = if let Some(entry) = old_file
3101 .entry_id
3102 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3103 {
3104 File {
3105 is_local: true,
3106 entry_id: Some(entry.id),
3107 mtime: entry.mtime,
3108 path: entry.path.clone(),
3109 worktree: worktree_handle.clone(),
3110 }
3111 } else if let Some(entry) =
3112 snapshot.entry_for_path(old_file.path().as_ref())
3113 {
3114 File {
3115 is_local: true,
3116 entry_id: Some(entry.id),
3117 mtime: entry.mtime,
3118 path: entry.path.clone(),
3119 worktree: worktree_handle.clone(),
3120 }
3121 } else {
3122 File {
3123 is_local: true,
3124 entry_id: None,
3125 path: old_file.path().clone(),
3126 mtime: old_file.mtime(),
3127 worktree: worktree_handle.clone(),
3128 }
3129 };
3130
3131 if let Some(project_id) = self.remote_id() {
3132 self.client
3133 .send(proto::UpdateBufferFile {
3134 project_id,
3135 buffer_id: *buffer_id as u64,
3136 file: Some(new_file.to_proto()),
3137 })
3138 .log_err();
3139 }
3140 buffer.file_updated(Box::new(new_file), cx).detach();
3141 }
3142 });
3143 } else {
3144 buffers_to_delete.push(*buffer_id);
3145 }
3146 }
3147
3148 for buffer_id in buffers_to_delete {
3149 self.opened_buffers.remove(&buffer_id);
3150 }
3151 }
3152
3153 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3154 let new_active_entry = entry.and_then(|project_path| {
3155 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3156 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3157 Some(ProjectEntry {
3158 worktree_id: project_path.worktree_id,
3159 entry_id: entry.id,
3160 })
3161 });
3162 if new_active_entry != self.active_entry {
3163 self.active_entry = new_active_entry;
3164 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3165 }
3166 }
3167
3168 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3169 self.language_servers_with_diagnostics_running > 0
3170 }
3171
3172 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3173 let mut summary = DiagnosticSummary::default();
3174 for (_, path_summary) in self.diagnostic_summaries(cx) {
3175 summary.error_count += path_summary.error_count;
3176 summary.warning_count += path_summary.warning_count;
3177 summary.info_count += path_summary.info_count;
3178 summary.hint_count += path_summary.hint_count;
3179 }
3180 summary
3181 }
3182
3183 pub fn diagnostic_summaries<'a>(
3184 &'a self,
3185 cx: &'a AppContext,
3186 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3187 self.worktrees(cx).flat_map(move |worktree| {
3188 let worktree = worktree.read(cx);
3189 let worktree_id = worktree.id();
3190 worktree
3191 .diagnostic_summaries()
3192 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3193 })
3194 }
3195
3196 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3197 self.language_servers_with_diagnostics_running += 1;
3198 if self.language_servers_with_diagnostics_running == 1 {
3199 cx.emit(Event::DiskBasedDiagnosticsStarted);
3200 }
3201 }
3202
3203 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3204 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3205 self.language_servers_with_diagnostics_running -= 1;
3206 if self.language_servers_with_diagnostics_running == 0 {
3207 cx.emit(Event::DiskBasedDiagnosticsFinished);
3208 }
3209 }
3210
3211 pub fn active_entry(&self) -> Option<ProjectEntry> {
3212 self.active_entry
3213 }
3214
3215 // RPC message handlers
3216
3217 async fn handle_unshare_project(
3218 this: ModelHandle<Self>,
3219 _: TypedEnvelope<proto::UnshareProject>,
3220 _: Arc<Client>,
3221 mut cx: AsyncAppContext,
3222 ) -> Result<()> {
3223 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3224 Ok(())
3225 }
3226
3227 async fn handle_add_collaborator(
3228 this: ModelHandle<Self>,
3229 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3230 _: Arc<Client>,
3231 mut cx: AsyncAppContext,
3232 ) -> Result<()> {
3233 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3234 let collaborator = envelope
3235 .payload
3236 .collaborator
3237 .take()
3238 .ok_or_else(|| anyhow!("empty collaborator"))?;
3239
3240 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3241 this.update(&mut cx, |this, cx| {
3242 this.collaborators
3243 .insert(collaborator.peer_id, collaborator);
3244 cx.notify();
3245 });
3246
3247 Ok(())
3248 }
3249
3250 async fn handle_remove_collaborator(
3251 this: ModelHandle<Self>,
3252 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3253 _: Arc<Client>,
3254 mut cx: AsyncAppContext,
3255 ) -> Result<()> {
3256 this.update(&mut cx, |this, cx| {
3257 let peer_id = PeerId(envelope.payload.peer_id);
3258 let replica_id = this
3259 .collaborators
3260 .remove(&peer_id)
3261 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3262 .replica_id;
3263 for (_, buffer) in &this.opened_buffers {
3264 if let Some(buffer) = buffer.upgrade(cx) {
3265 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3266 }
3267 }
3268 cx.notify();
3269 Ok(())
3270 })
3271 }
3272
3273 async fn handle_register_worktree(
3274 this: ModelHandle<Self>,
3275 envelope: TypedEnvelope<proto::RegisterWorktree>,
3276 client: Arc<Client>,
3277 mut cx: AsyncAppContext,
3278 ) -> Result<()> {
3279 this.update(&mut cx, |this, cx| {
3280 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3281 let replica_id = this.replica_id();
3282 let worktree = proto::Worktree {
3283 id: envelope.payload.worktree_id,
3284 root_name: envelope.payload.root_name,
3285 entries: Default::default(),
3286 diagnostic_summaries: Default::default(),
3287 visible: envelope.payload.visible,
3288 };
3289 let (worktree, load_task) =
3290 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3291 this.add_worktree(&worktree, cx);
3292 load_task.detach();
3293 Ok(())
3294 })
3295 }
3296
3297 async fn handle_unregister_worktree(
3298 this: ModelHandle<Self>,
3299 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3300 _: Arc<Client>,
3301 mut cx: AsyncAppContext,
3302 ) -> Result<()> {
3303 this.update(&mut cx, |this, cx| {
3304 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3305 this.remove_worktree(worktree_id, cx);
3306 Ok(())
3307 })
3308 }
3309
3310 async fn handle_update_worktree(
3311 this: ModelHandle<Self>,
3312 envelope: TypedEnvelope<proto::UpdateWorktree>,
3313 _: Arc<Client>,
3314 mut cx: AsyncAppContext,
3315 ) -> Result<()> {
3316 this.update(&mut cx, |this, cx| {
3317 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3318 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3319 worktree.update(cx, |worktree, _| {
3320 let worktree = worktree.as_remote_mut().unwrap();
3321 worktree.update_from_remote(envelope)
3322 })?;
3323 }
3324 Ok(())
3325 })
3326 }
3327
3328 async fn handle_update_diagnostic_summary(
3329 this: ModelHandle<Self>,
3330 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3331 _: Arc<Client>,
3332 mut cx: AsyncAppContext,
3333 ) -> Result<()> {
3334 this.update(&mut cx, |this, cx| {
3335 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3336 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3337 if let Some(summary) = envelope.payload.summary {
3338 let project_path = ProjectPath {
3339 worktree_id,
3340 path: Path::new(&summary.path).into(),
3341 };
3342 worktree.update(cx, |worktree, _| {
3343 worktree
3344 .as_remote_mut()
3345 .unwrap()
3346 .update_diagnostic_summary(project_path.path.clone(), &summary);
3347 });
3348 cx.emit(Event::DiagnosticsUpdated(project_path));
3349 }
3350 }
3351 Ok(())
3352 })
3353 }
3354
3355 async fn handle_start_language_server(
3356 this: ModelHandle<Self>,
3357 envelope: TypedEnvelope<proto::StartLanguageServer>,
3358 _: Arc<Client>,
3359 mut cx: AsyncAppContext,
3360 ) -> Result<()> {
3361 let server = envelope
3362 .payload
3363 .server
3364 .ok_or_else(|| anyhow!("invalid server"))?;
3365 this.update(&mut cx, |this, cx| {
3366 this.language_server_statuses.insert(
3367 server.id as usize,
3368 LanguageServerStatus {
3369 name: server.name,
3370 pending_work: Default::default(),
3371 pending_diagnostic_updates: 0,
3372 },
3373 );
3374 cx.notify();
3375 });
3376 Ok(())
3377 }
3378
3379 async fn handle_update_language_server(
3380 this: ModelHandle<Self>,
3381 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3382 _: Arc<Client>,
3383 mut cx: AsyncAppContext,
3384 ) -> Result<()> {
3385 let language_server_id = envelope.payload.language_server_id as usize;
3386 match envelope
3387 .payload
3388 .variant
3389 .ok_or_else(|| anyhow!("invalid variant"))?
3390 {
3391 proto::update_language_server::Variant::WorkStart(payload) => {
3392 this.update(&mut cx, |this, cx| {
3393 this.on_lsp_work_start(language_server_id, payload.token, cx);
3394 })
3395 }
3396 proto::update_language_server::Variant::WorkProgress(payload) => {
3397 this.update(&mut cx, |this, cx| {
3398 this.on_lsp_work_progress(
3399 language_server_id,
3400 payload.token,
3401 LanguageServerProgress {
3402 message: payload.message,
3403 percentage: payload.percentage.map(|p| p as usize),
3404 last_update_at: Instant::now(),
3405 },
3406 cx,
3407 );
3408 })
3409 }
3410 proto::update_language_server::Variant::WorkEnd(payload) => {
3411 this.update(&mut cx, |this, cx| {
3412 this.on_lsp_work_end(language_server_id, payload.token, cx);
3413 })
3414 }
3415 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3416 this.update(&mut cx, |this, cx| {
3417 this.disk_based_diagnostics_started(cx);
3418 })
3419 }
3420 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3421 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3422 }
3423 }
3424
3425 Ok(())
3426 }
3427
3428 async fn handle_update_buffer(
3429 this: ModelHandle<Self>,
3430 envelope: TypedEnvelope<proto::UpdateBuffer>,
3431 _: Arc<Client>,
3432 mut cx: AsyncAppContext,
3433 ) -> Result<()> {
3434 this.update(&mut cx, |this, cx| {
3435 let payload = envelope.payload.clone();
3436 let buffer_id = payload.buffer_id;
3437 let ops = payload
3438 .operations
3439 .into_iter()
3440 .map(|op| language::proto::deserialize_operation(op))
3441 .collect::<Result<Vec<_>, _>>()?;
3442 match this.opened_buffers.entry(buffer_id) {
3443 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3444 OpenBuffer::Strong(buffer) => {
3445 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3446 }
3447 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3448 OpenBuffer::Weak(_) => {}
3449 },
3450 hash_map::Entry::Vacant(e) => {
3451 e.insert(OpenBuffer::Loading(ops));
3452 }
3453 }
3454 Ok(())
3455 })
3456 }
3457
3458 async fn handle_update_buffer_file(
3459 this: ModelHandle<Self>,
3460 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3461 _: Arc<Client>,
3462 mut cx: AsyncAppContext,
3463 ) -> Result<()> {
3464 this.update(&mut cx, |this, cx| {
3465 let payload = envelope.payload.clone();
3466 let buffer_id = payload.buffer_id;
3467 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3468 let worktree = this
3469 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3470 .ok_or_else(|| anyhow!("no such worktree"))?;
3471 let file = File::from_proto(file, worktree.clone(), cx)?;
3472 let buffer = this
3473 .opened_buffers
3474 .get_mut(&buffer_id)
3475 .and_then(|b| b.upgrade(cx))
3476 .ok_or_else(|| anyhow!("no such buffer"))?;
3477 buffer.update(cx, |buffer, cx| {
3478 buffer.file_updated(Box::new(file), cx).detach();
3479 });
3480 Ok(())
3481 })
3482 }
3483
3484 async fn handle_save_buffer(
3485 this: ModelHandle<Self>,
3486 envelope: TypedEnvelope<proto::SaveBuffer>,
3487 _: Arc<Client>,
3488 mut cx: AsyncAppContext,
3489 ) -> Result<proto::BufferSaved> {
3490 let buffer_id = envelope.payload.buffer_id;
3491 let requested_version = deserialize_version(envelope.payload.version);
3492
3493 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3494 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3495 let buffer = this
3496 .opened_buffers
3497 .get(&buffer_id)
3498 .map(|buffer| buffer.upgrade(cx).unwrap())
3499 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3500 Ok::<_, anyhow::Error>((project_id, buffer))
3501 })?;
3502 buffer
3503 .update(&mut cx, |buffer, _| {
3504 buffer.wait_for_version(requested_version)
3505 })
3506 .await;
3507
3508 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3509 Ok(proto::BufferSaved {
3510 project_id,
3511 buffer_id,
3512 version: serialize_version(&saved_version),
3513 mtime: Some(mtime.into()),
3514 })
3515 }
3516
3517 async fn handle_format_buffers(
3518 this: ModelHandle<Self>,
3519 envelope: TypedEnvelope<proto::FormatBuffers>,
3520 _: Arc<Client>,
3521 mut cx: AsyncAppContext,
3522 ) -> Result<proto::FormatBuffersResponse> {
3523 let sender_id = envelope.original_sender_id()?;
3524 let format = this.update(&mut cx, |this, cx| {
3525 let mut buffers = HashSet::default();
3526 for buffer_id in &envelope.payload.buffer_ids {
3527 buffers.insert(
3528 this.opened_buffers
3529 .get(buffer_id)
3530 .map(|buffer| buffer.upgrade(cx).unwrap())
3531 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3532 );
3533 }
3534 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3535 })?;
3536
3537 let project_transaction = format.await?;
3538 let project_transaction = this.update(&mut cx, |this, cx| {
3539 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3540 });
3541 Ok(proto::FormatBuffersResponse {
3542 transaction: Some(project_transaction),
3543 })
3544 }
3545
3546 async fn handle_get_completions(
3547 this: ModelHandle<Self>,
3548 envelope: TypedEnvelope<proto::GetCompletions>,
3549 _: Arc<Client>,
3550 mut cx: AsyncAppContext,
3551 ) -> Result<proto::GetCompletionsResponse> {
3552 let position = envelope
3553 .payload
3554 .position
3555 .and_then(language::proto::deserialize_anchor)
3556 .ok_or_else(|| anyhow!("invalid position"))?;
3557 let version = deserialize_version(envelope.payload.version);
3558 let buffer = this.read_with(&cx, |this, cx| {
3559 this.opened_buffers
3560 .get(&envelope.payload.buffer_id)
3561 .map(|buffer| buffer.upgrade(cx).unwrap())
3562 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3563 })?;
3564 buffer
3565 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3566 .await;
3567 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3568 let completions = this
3569 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3570 .await?;
3571
3572 Ok(proto::GetCompletionsResponse {
3573 completions: completions
3574 .iter()
3575 .map(language::proto::serialize_completion)
3576 .collect(),
3577 version: serialize_version(&version),
3578 })
3579 }
3580
3581 async fn handle_apply_additional_edits_for_completion(
3582 this: ModelHandle<Self>,
3583 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3584 _: Arc<Client>,
3585 mut cx: AsyncAppContext,
3586 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3587 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3588 let buffer = this
3589 .opened_buffers
3590 .get(&envelope.payload.buffer_id)
3591 .map(|buffer| buffer.upgrade(cx).unwrap())
3592 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3593 let language = buffer.read(cx).language();
3594 let completion = language::proto::deserialize_completion(
3595 envelope
3596 .payload
3597 .completion
3598 .ok_or_else(|| anyhow!("invalid completion"))?,
3599 language,
3600 )?;
3601 Ok::<_, anyhow::Error>(
3602 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3603 )
3604 })?;
3605
3606 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3607 transaction: apply_additional_edits
3608 .await?
3609 .as_ref()
3610 .map(language::proto::serialize_transaction),
3611 })
3612 }
3613
3614 async fn handle_get_code_actions(
3615 this: ModelHandle<Self>,
3616 envelope: TypedEnvelope<proto::GetCodeActions>,
3617 _: Arc<Client>,
3618 mut cx: AsyncAppContext,
3619 ) -> Result<proto::GetCodeActionsResponse> {
3620 let start = envelope
3621 .payload
3622 .start
3623 .and_then(language::proto::deserialize_anchor)
3624 .ok_or_else(|| anyhow!("invalid start"))?;
3625 let end = envelope
3626 .payload
3627 .end
3628 .and_then(language::proto::deserialize_anchor)
3629 .ok_or_else(|| anyhow!("invalid end"))?;
3630 let buffer = this.update(&mut cx, |this, cx| {
3631 this.opened_buffers
3632 .get(&envelope.payload.buffer_id)
3633 .map(|buffer| buffer.upgrade(cx).unwrap())
3634 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3635 })?;
3636 buffer
3637 .update(&mut cx, |buffer, _| {
3638 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3639 })
3640 .await;
3641
3642 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3643 let code_actions = this.update(&mut cx, |this, cx| {
3644 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3645 })?;
3646
3647 Ok(proto::GetCodeActionsResponse {
3648 actions: code_actions
3649 .await?
3650 .iter()
3651 .map(language::proto::serialize_code_action)
3652 .collect(),
3653 version: serialize_version(&version),
3654 })
3655 }
3656
3657 async fn handle_apply_code_action(
3658 this: ModelHandle<Self>,
3659 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3660 _: Arc<Client>,
3661 mut cx: AsyncAppContext,
3662 ) -> Result<proto::ApplyCodeActionResponse> {
3663 let sender_id = envelope.original_sender_id()?;
3664 let action = language::proto::deserialize_code_action(
3665 envelope
3666 .payload
3667 .action
3668 .ok_or_else(|| anyhow!("invalid action"))?,
3669 )?;
3670 let apply_code_action = this.update(&mut cx, |this, cx| {
3671 let buffer = this
3672 .opened_buffers
3673 .get(&envelope.payload.buffer_id)
3674 .map(|buffer| buffer.upgrade(cx).unwrap())
3675 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3676 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3677 })?;
3678
3679 let project_transaction = apply_code_action.await?;
3680 let project_transaction = this.update(&mut cx, |this, cx| {
3681 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3682 });
3683 Ok(proto::ApplyCodeActionResponse {
3684 transaction: Some(project_transaction),
3685 })
3686 }
3687
3688 async fn handle_lsp_command<T: LspCommand>(
3689 this: ModelHandle<Self>,
3690 envelope: TypedEnvelope<T::ProtoRequest>,
3691 _: Arc<Client>,
3692 mut cx: AsyncAppContext,
3693 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3694 where
3695 <T::LspRequest as lsp::request::Request>::Result: Send,
3696 {
3697 let sender_id = envelope.original_sender_id()?;
3698 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3699 let buffer_handle = this.read_with(&cx, |this, _| {
3700 this.opened_buffers
3701 .get(&buffer_id)
3702 .map(|buffer| buffer.upgrade(&cx).unwrap())
3703 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3704 })?;
3705 let request = T::from_proto(
3706 envelope.payload,
3707 this.clone(),
3708 buffer_handle.clone(),
3709 cx.clone(),
3710 )
3711 .await?;
3712 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3713 let response = this
3714 .update(&mut cx, |this, cx| {
3715 this.request_lsp(buffer_handle, request, cx)
3716 })
3717 .await?;
3718 this.update(&mut cx, |this, cx| {
3719 Ok(T::response_to_proto(
3720 response,
3721 this,
3722 sender_id,
3723 &buffer_version,
3724 cx,
3725 ))
3726 })
3727 }
3728
3729 async fn handle_get_project_symbols(
3730 this: ModelHandle<Self>,
3731 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3732 _: Arc<Client>,
3733 mut cx: AsyncAppContext,
3734 ) -> Result<proto::GetProjectSymbolsResponse> {
3735 let symbols = this
3736 .update(&mut cx, |this, cx| {
3737 this.symbols(&envelope.payload.query, cx)
3738 })
3739 .await?;
3740
3741 Ok(proto::GetProjectSymbolsResponse {
3742 symbols: symbols.iter().map(serialize_symbol).collect(),
3743 })
3744 }
3745
3746 async fn handle_search_project(
3747 this: ModelHandle<Self>,
3748 envelope: TypedEnvelope<proto::SearchProject>,
3749 _: Arc<Client>,
3750 mut cx: AsyncAppContext,
3751 ) -> Result<proto::SearchProjectResponse> {
3752 let peer_id = envelope.original_sender_id()?;
3753 let query = SearchQuery::from_proto(envelope.payload)?;
3754 let result = this
3755 .update(&mut cx, |this, cx| this.search(query, cx))
3756 .await?;
3757
3758 this.update(&mut cx, |this, cx| {
3759 let mut locations = Vec::new();
3760 for (buffer, ranges) in result {
3761 for range in ranges {
3762 let start = serialize_anchor(&range.start);
3763 let end = serialize_anchor(&range.end);
3764 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3765 locations.push(proto::Location {
3766 buffer: Some(buffer),
3767 start: Some(start),
3768 end: Some(end),
3769 });
3770 }
3771 }
3772 Ok(proto::SearchProjectResponse { locations })
3773 })
3774 }
3775
3776 async fn handle_open_buffer_for_symbol(
3777 this: ModelHandle<Self>,
3778 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3779 _: Arc<Client>,
3780 mut cx: AsyncAppContext,
3781 ) -> Result<proto::OpenBufferForSymbolResponse> {
3782 let peer_id = envelope.original_sender_id()?;
3783 let symbol = envelope
3784 .payload
3785 .symbol
3786 .ok_or_else(|| anyhow!("invalid symbol"))?;
3787 let symbol = this.read_with(&cx, |this, _| {
3788 let symbol = this.deserialize_symbol(symbol)?;
3789 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3790 if signature == symbol.signature {
3791 Ok(symbol)
3792 } else {
3793 Err(anyhow!("invalid symbol signature"))
3794 }
3795 })?;
3796 let buffer = this
3797 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3798 .await?;
3799
3800 Ok(proto::OpenBufferForSymbolResponse {
3801 buffer: Some(this.update(&mut cx, |this, cx| {
3802 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3803 })),
3804 })
3805 }
3806
3807 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3808 let mut hasher = Sha256::new();
3809 hasher.update(worktree_id.to_proto().to_be_bytes());
3810 hasher.update(path.to_string_lossy().as_bytes());
3811 hasher.update(self.nonce.to_be_bytes());
3812 hasher.finalize().as_slice().try_into().unwrap()
3813 }
3814
3815 async fn handle_open_buffer(
3816 this: ModelHandle<Self>,
3817 envelope: TypedEnvelope<proto::OpenBuffer>,
3818 _: Arc<Client>,
3819 mut cx: AsyncAppContext,
3820 ) -> Result<proto::OpenBufferResponse> {
3821 let peer_id = envelope.original_sender_id()?;
3822 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3823 let open_buffer = this.update(&mut cx, |this, cx| {
3824 this.open_buffer(
3825 ProjectPath {
3826 worktree_id,
3827 path: PathBuf::from(envelope.payload.path).into(),
3828 },
3829 cx,
3830 )
3831 });
3832
3833 let buffer = open_buffer.await?;
3834 this.update(&mut cx, |this, cx| {
3835 Ok(proto::OpenBufferResponse {
3836 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3837 })
3838 })
3839 }
3840
3841 fn serialize_project_transaction_for_peer(
3842 &mut self,
3843 project_transaction: ProjectTransaction,
3844 peer_id: PeerId,
3845 cx: &AppContext,
3846 ) -> proto::ProjectTransaction {
3847 let mut serialized_transaction = proto::ProjectTransaction {
3848 buffers: Default::default(),
3849 transactions: Default::default(),
3850 };
3851 for (buffer, transaction) in project_transaction.0 {
3852 serialized_transaction
3853 .buffers
3854 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3855 serialized_transaction
3856 .transactions
3857 .push(language::proto::serialize_transaction(&transaction));
3858 }
3859 serialized_transaction
3860 }
3861
3862 fn deserialize_project_transaction(
3863 &mut self,
3864 message: proto::ProjectTransaction,
3865 push_to_history: bool,
3866 cx: &mut ModelContext<Self>,
3867 ) -> Task<Result<ProjectTransaction>> {
3868 cx.spawn(|this, mut cx| async move {
3869 let mut project_transaction = ProjectTransaction::default();
3870 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3871 let buffer = this
3872 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3873 .await?;
3874 let transaction = language::proto::deserialize_transaction(transaction)?;
3875 project_transaction.0.insert(buffer, transaction);
3876 }
3877
3878 for (buffer, transaction) in &project_transaction.0 {
3879 buffer
3880 .update(&mut cx, |buffer, _| {
3881 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3882 })
3883 .await;
3884
3885 if push_to_history {
3886 buffer.update(&mut cx, |buffer, _| {
3887 buffer.push_transaction(transaction.clone(), Instant::now());
3888 });
3889 }
3890 }
3891
3892 Ok(project_transaction)
3893 })
3894 }
3895
3896 fn serialize_buffer_for_peer(
3897 &mut self,
3898 buffer: &ModelHandle<Buffer>,
3899 peer_id: PeerId,
3900 cx: &AppContext,
3901 ) -> proto::Buffer {
3902 let buffer_id = buffer.read(cx).remote_id();
3903 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3904 if shared_buffers.insert(buffer_id) {
3905 proto::Buffer {
3906 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3907 }
3908 } else {
3909 proto::Buffer {
3910 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3911 }
3912 }
3913 }
3914
3915 fn deserialize_buffer(
3916 &mut self,
3917 buffer: proto::Buffer,
3918 cx: &mut ModelContext<Self>,
3919 ) -> Task<Result<ModelHandle<Buffer>>> {
3920 let replica_id = self.replica_id();
3921
3922 let opened_buffer_tx = self.opened_buffer.0.clone();
3923 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3924 cx.spawn(|this, mut cx| async move {
3925 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3926 proto::buffer::Variant::Id(id) => {
3927 let buffer = loop {
3928 let buffer = this.read_with(&cx, |this, cx| {
3929 this.opened_buffers
3930 .get(&id)
3931 .and_then(|buffer| buffer.upgrade(cx))
3932 });
3933 if let Some(buffer) = buffer {
3934 break buffer;
3935 }
3936 opened_buffer_rx
3937 .next()
3938 .await
3939 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3940 };
3941 Ok(buffer)
3942 }
3943 proto::buffer::Variant::State(mut buffer) => {
3944 let mut buffer_worktree = None;
3945 let mut buffer_file = None;
3946 if let Some(file) = buffer.file.take() {
3947 this.read_with(&cx, |this, cx| {
3948 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3949 let worktree =
3950 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3951 anyhow!("no worktree found for id {}", file.worktree_id)
3952 })?;
3953 buffer_file =
3954 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3955 as Box<dyn language::File>);
3956 buffer_worktree = Some(worktree);
3957 Ok::<_, anyhow::Error>(())
3958 })?;
3959 }
3960
3961 let buffer = cx.add_model(|cx| {
3962 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3963 });
3964
3965 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
3966
3967 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3968 Ok(buffer)
3969 }
3970 }
3971 })
3972 }
3973
3974 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3975 let language = self
3976 .languages
3977 .get_language(&serialized_symbol.language_name);
3978 let start = serialized_symbol
3979 .start
3980 .ok_or_else(|| anyhow!("invalid start"))?;
3981 let end = serialized_symbol
3982 .end
3983 .ok_or_else(|| anyhow!("invalid end"))?;
3984 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3985 Ok(Symbol {
3986 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3987 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3988 language_name: serialized_symbol.language_name.clone(),
3989 label: language
3990 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3991 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3992 name: serialized_symbol.name,
3993 path: PathBuf::from(serialized_symbol.path),
3994 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3995 kind,
3996 signature: serialized_symbol
3997 .signature
3998 .try_into()
3999 .map_err(|_| anyhow!("invalid signature"))?,
4000 })
4001 }
4002
4003 async fn handle_buffer_saved(
4004 this: ModelHandle<Self>,
4005 envelope: TypedEnvelope<proto::BufferSaved>,
4006 _: Arc<Client>,
4007 mut cx: AsyncAppContext,
4008 ) -> Result<()> {
4009 let version = deserialize_version(envelope.payload.version);
4010 let mtime = envelope
4011 .payload
4012 .mtime
4013 .ok_or_else(|| anyhow!("missing mtime"))?
4014 .into();
4015
4016 this.update(&mut cx, |this, cx| {
4017 let buffer = this
4018 .opened_buffers
4019 .get(&envelope.payload.buffer_id)
4020 .and_then(|buffer| buffer.upgrade(cx));
4021 if let Some(buffer) = buffer {
4022 buffer.update(cx, |buffer, cx| {
4023 buffer.did_save(version, mtime, None, cx);
4024 });
4025 }
4026 Ok(())
4027 })
4028 }
4029
4030 async fn handle_buffer_reloaded(
4031 this: ModelHandle<Self>,
4032 envelope: TypedEnvelope<proto::BufferReloaded>,
4033 _: Arc<Client>,
4034 mut cx: AsyncAppContext,
4035 ) -> Result<()> {
4036 let payload = envelope.payload.clone();
4037 let version = deserialize_version(payload.version);
4038 let mtime = payload
4039 .mtime
4040 .ok_or_else(|| anyhow!("missing mtime"))?
4041 .into();
4042 this.update(&mut cx, |this, cx| {
4043 let buffer = this
4044 .opened_buffers
4045 .get(&payload.buffer_id)
4046 .and_then(|buffer| buffer.upgrade(cx));
4047 if let Some(buffer) = buffer {
4048 buffer.update(cx, |buffer, cx| {
4049 buffer.did_reload(version, mtime, cx);
4050 });
4051 }
4052 Ok(())
4053 })
4054 }
4055
4056 pub fn match_paths<'a>(
4057 &self,
4058 query: &'a str,
4059 include_ignored: bool,
4060 smart_case: bool,
4061 max_results: usize,
4062 cancel_flag: &'a AtomicBool,
4063 cx: &AppContext,
4064 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4065 let worktrees = self
4066 .worktrees(cx)
4067 .filter(|worktree| worktree.read(cx).is_visible())
4068 .collect::<Vec<_>>();
4069 let include_root_name = worktrees.len() > 1;
4070 let candidate_sets = worktrees
4071 .into_iter()
4072 .map(|worktree| CandidateSet {
4073 snapshot: worktree.read(cx).snapshot(),
4074 include_ignored,
4075 include_root_name,
4076 })
4077 .collect::<Vec<_>>();
4078
4079 let background = cx.background().clone();
4080 async move {
4081 fuzzy::match_paths(
4082 candidate_sets.as_slice(),
4083 query,
4084 smart_case,
4085 max_results,
4086 cancel_flag,
4087 background,
4088 )
4089 .await
4090 }
4091 }
4092
4093 fn edits_from_lsp(
4094 &mut self,
4095 buffer: &ModelHandle<Buffer>,
4096 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4097 version: Option<i32>,
4098 cx: &mut ModelContext<Self>,
4099 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4100 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4101 cx.background().spawn(async move {
4102 let snapshot = snapshot?;
4103 let mut lsp_edits = lsp_edits
4104 .into_iter()
4105 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4106 .peekable();
4107
4108 let mut edits = Vec::new();
4109 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4110 // Combine any LSP edits that are adjacent.
4111 //
4112 // Also, combine LSP edits that are separated from each other by only
4113 // a newline. This is important because for some code actions,
4114 // Rust-analyzer rewrites the entire buffer via a series of edits that
4115 // are separated by unchanged newline characters.
4116 //
4117 // In order for the diffing logic below to work properly, any edits that
4118 // cancel each other out must be combined into one.
4119 while let Some((next_range, next_text)) = lsp_edits.peek() {
4120 if next_range.start > range.end {
4121 if next_range.start.row > range.end.row + 1
4122 || next_range.start.column > 0
4123 || snapshot.clip_point_utf16(
4124 PointUtf16::new(range.end.row, u32::MAX),
4125 Bias::Left,
4126 ) > range.end
4127 {
4128 break;
4129 }
4130 new_text.push('\n');
4131 }
4132 range.end = next_range.end;
4133 new_text.push_str(&next_text);
4134 lsp_edits.next();
4135 }
4136
4137 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4138 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4139 {
4140 return Err(anyhow!("invalid edits received from language server"));
4141 }
4142
4143 // For multiline edits, perform a diff of the old and new text so that
4144 // we can identify the changes more precisely, preserving the locations
4145 // of any anchors positioned in the unchanged regions.
4146 if range.end.row > range.start.row {
4147 let mut offset = range.start.to_offset(&snapshot);
4148 let old_text = snapshot.text_for_range(range).collect::<String>();
4149
4150 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4151 let mut moved_since_edit = true;
4152 for change in diff.iter_all_changes() {
4153 let tag = change.tag();
4154 let value = change.value();
4155 match tag {
4156 ChangeTag::Equal => {
4157 offset += value.len();
4158 moved_since_edit = true;
4159 }
4160 ChangeTag::Delete => {
4161 let start = snapshot.anchor_after(offset);
4162 let end = snapshot.anchor_before(offset + value.len());
4163 if moved_since_edit {
4164 edits.push((start..end, String::new()));
4165 } else {
4166 edits.last_mut().unwrap().0.end = end;
4167 }
4168 offset += value.len();
4169 moved_since_edit = false;
4170 }
4171 ChangeTag::Insert => {
4172 if moved_since_edit {
4173 let anchor = snapshot.anchor_after(offset);
4174 edits.push((anchor.clone()..anchor, value.to_string()));
4175 } else {
4176 edits.last_mut().unwrap().1.push_str(value);
4177 }
4178 moved_since_edit = false;
4179 }
4180 }
4181 }
4182 } else if range.end == range.start {
4183 let anchor = snapshot.anchor_after(range.start);
4184 edits.push((anchor.clone()..anchor, new_text));
4185 } else {
4186 let edit_start = snapshot.anchor_after(range.start);
4187 let edit_end = snapshot.anchor_before(range.end);
4188 edits.push((edit_start..edit_end, new_text));
4189 }
4190 }
4191
4192 Ok(edits)
4193 })
4194 }
4195
4196 fn buffer_snapshot_for_lsp_version(
4197 &mut self,
4198 buffer: &ModelHandle<Buffer>,
4199 version: Option<i32>,
4200 cx: &AppContext,
4201 ) -> Result<TextBufferSnapshot> {
4202 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4203
4204 if let Some(version) = version {
4205 let buffer_id = buffer.read(cx).remote_id();
4206 let snapshots = self
4207 .buffer_snapshots
4208 .get_mut(&buffer_id)
4209 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4210 let mut found_snapshot = None;
4211 snapshots.retain(|(snapshot_version, snapshot)| {
4212 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4213 false
4214 } else {
4215 if *snapshot_version == version {
4216 found_snapshot = Some(snapshot.clone());
4217 }
4218 true
4219 }
4220 });
4221
4222 found_snapshot.ok_or_else(|| {
4223 anyhow!(
4224 "snapshot not found for buffer {} at version {}",
4225 buffer_id,
4226 version
4227 )
4228 })
4229 } else {
4230 Ok((buffer.read(cx)).text_snapshot())
4231 }
4232 }
4233
4234 fn language_server_for_buffer(
4235 &self,
4236 buffer: &Buffer,
4237 cx: &AppContext,
4238 ) -> Option<&Arc<LanguageServer>> {
4239 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4240 let worktree_id = file.worktree_id(cx);
4241 self.language_servers.get(&(worktree_id, language.name()))
4242 } else {
4243 None
4244 }
4245 }
4246}
4247
4248impl WorktreeHandle {
4249 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4250 match self {
4251 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4252 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4253 }
4254 }
4255}
4256
4257impl OpenBuffer {
4258 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4259 match self {
4260 OpenBuffer::Strong(handle) => Some(handle.clone()),
4261 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4262 OpenBuffer::Loading(_) => None,
4263 }
4264 }
4265}
4266
4267struct CandidateSet {
4268 snapshot: Snapshot,
4269 include_ignored: bool,
4270 include_root_name: bool,
4271}
4272
4273impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4274 type Candidates = CandidateSetIter<'a>;
4275
4276 fn id(&self) -> usize {
4277 self.snapshot.id().to_usize()
4278 }
4279
4280 fn len(&self) -> usize {
4281 if self.include_ignored {
4282 self.snapshot.file_count()
4283 } else {
4284 self.snapshot.visible_file_count()
4285 }
4286 }
4287
4288 fn prefix(&self) -> Arc<str> {
4289 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4290 self.snapshot.root_name().into()
4291 } else if self.include_root_name {
4292 format!("{}/", self.snapshot.root_name()).into()
4293 } else {
4294 "".into()
4295 }
4296 }
4297
4298 fn candidates(&'a self, start: usize) -> Self::Candidates {
4299 CandidateSetIter {
4300 traversal: self.snapshot.files(self.include_ignored, start),
4301 }
4302 }
4303}
4304
4305struct CandidateSetIter<'a> {
4306 traversal: Traversal<'a>,
4307}
4308
4309impl<'a> Iterator for CandidateSetIter<'a> {
4310 type Item = PathMatchCandidate<'a>;
4311
4312 fn next(&mut self) -> Option<Self::Item> {
4313 self.traversal.next().map(|entry| {
4314 if let EntryKind::File(char_bag) = entry.kind {
4315 PathMatchCandidate {
4316 path: &entry.path,
4317 char_bag,
4318 }
4319 } else {
4320 unreachable!()
4321 }
4322 })
4323 }
4324}
4325
4326impl Entity for Project {
4327 type Event = Event;
4328
4329 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4330 match &self.client_state {
4331 ProjectClientState::Local { remote_id_rx, .. } => {
4332 if let Some(project_id) = *remote_id_rx.borrow() {
4333 self.client
4334 .send(proto::UnregisterProject { project_id })
4335 .log_err();
4336 }
4337 }
4338 ProjectClientState::Remote { remote_id, .. } => {
4339 self.client
4340 .send(proto::LeaveProject {
4341 project_id: *remote_id,
4342 })
4343 .log_err();
4344 }
4345 }
4346 }
4347
4348 fn app_will_quit(
4349 &mut self,
4350 _: &mut MutableAppContext,
4351 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4352 let shutdown_futures = self
4353 .language_servers
4354 .drain()
4355 .filter_map(|(_, server)| server.shutdown())
4356 .collect::<Vec<_>>();
4357 Some(
4358 async move {
4359 futures::future::join_all(shutdown_futures).await;
4360 }
4361 .boxed(),
4362 )
4363 }
4364}
4365
4366impl Collaborator {
4367 fn from_proto(
4368 message: proto::Collaborator,
4369 user_store: &ModelHandle<UserStore>,
4370 cx: &mut AsyncAppContext,
4371 ) -> impl Future<Output = Result<Self>> {
4372 let user = user_store.update(cx, |user_store, cx| {
4373 user_store.fetch_user(message.user_id, cx)
4374 });
4375
4376 async move {
4377 Ok(Self {
4378 peer_id: PeerId(message.peer_id),
4379 user: user.await?,
4380 replica_id: message.replica_id as ReplicaId,
4381 })
4382 }
4383 }
4384}
4385
4386impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4387 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4388 Self {
4389 worktree_id,
4390 path: path.as_ref().into(),
4391 }
4392 }
4393}
4394
4395impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4396 fn from(options: lsp::CreateFileOptions) -> Self {
4397 Self {
4398 overwrite: options.overwrite.unwrap_or(false),
4399 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4400 }
4401 }
4402}
4403
4404impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4405 fn from(options: lsp::RenameFileOptions) -> Self {
4406 Self {
4407 overwrite: options.overwrite.unwrap_or(false),
4408 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4409 }
4410 }
4411}
4412
4413impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4414 fn from(options: lsp::DeleteFileOptions) -> Self {
4415 Self {
4416 recursive: options.recursive.unwrap_or(false),
4417 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4418 }
4419 }
4420}
4421
4422fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4423 proto::Symbol {
4424 source_worktree_id: symbol.source_worktree_id.to_proto(),
4425 worktree_id: symbol.worktree_id.to_proto(),
4426 language_name: symbol.language_name.clone(),
4427 name: symbol.name.clone(),
4428 kind: unsafe { mem::transmute(symbol.kind) },
4429 path: symbol.path.to_string_lossy().to_string(),
4430 start: Some(proto::Point {
4431 row: symbol.range.start.row,
4432 column: symbol.range.start.column,
4433 }),
4434 end: Some(proto::Point {
4435 row: symbol.range.end.row,
4436 column: symbol.range.end.column,
4437 }),
4438 signature: symbol.signature.to_vec(),
4439 }
4440}
4441
4442fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4443 let mut path_components = path.components();
4444 let mut base_components = base.components();
4445 let mut components: Vec<Component> = Vec::new();
4446 loop {
4447 match (path_components.next(), base_components.next()) {
4448 (None, None) => break,
4449 (Some(a), None) => {
4450 components.push(a);
4451 components.extend(path_components.by_ref());
4452 break;
4453 }
4454 (None, _) => components.push(Component::ParentDir),
4455 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4456 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4457 (Some(a), Some(_)) => {
4458 components.push(Component::ParentDir);
4459 for _ in base_components {
4460 components.push(Component::ParentDir);
4461 }
4462 components.push(a);
4463 components.extend(path_components.by_ref());
4464 break;
4465 }
4466 }
4467 }
4468 components.iter().map(|c| c.as_os_str()).collect()
4469}
4470
4471#[cfg(test)]
4472mod tests {
4473 use super::{Event, *};
4474 use fs::RealFs;
4475 use futures::StreamExt;
4476 use gpui::test::subscribe;
4477 use language::{
4478 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4479 ToPoint,
4480 };
4481 use lsp::Url;
4482 use serde_json::json;
4483 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4484 use unindent::Unindent as _;
4485 use util::test::temp_tree;
4486 use worktree::WorktreeHandle as _;
4487
4488 #[gpui::test]
4489 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4490 let dir = temp_tree(json!({
4491 "root": {
4492 "apple": "",
4493 "banana": {
4494 "carrot": {
4495 "date": "",
4496 "endive": "",
4497 }
4498 },
4499 "fennel": {
4500 "grape": "",
4501 }
4502 }
4503 }));
4504
4505 let root_link_path = dir.path().join("root_link");
4506 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4507 unix::fs::symlink(
4508 &dir.path().join("root/fennel"),
4509 &dir.path().join("root/finnochio"),
4510 )
4511 .unwrap();
4512
4513 let project = Project::test(Arc::new(RealFs), cx);
4514
4515 let (tree, _) = project
4516 .update(cx, |project, cx| {
4517 project.find_or_create_local_worktree(&root_link_path, true, cx)
4518 })
4519 .await
4520 .unwrap();
4521
4522 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4523 .await;
4524 cx.read(|cx| {
4525 let tree = tree.read(cx);
4526 assert_eq!(tree.file_count(), 5);
4527 assert_eq!(
4528 tree.inode_for_path("fennel/grape"),
4529 tree.inode_for_path("finnochio/grape")
4530 );
4531 });
4532
4533 let cancel_flag = Default::default();
4534 let results = project
4535 .read_with(cx, |project, cx| {
4536 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4537 })
4538 .await;
4539 assert_eq!(
4540 results
4541 .into_iter()
4542 .map(|result| result.path)
4543 .collect::<Vec<Arc<Path>>>(),
4544 vec![
4545 PathBuf::from("banana/carrot/date").into(),
4546 PathBuf::from("banana/carrot/endive").into(),
4547 ]
4548 );
4549 }
4550
4551 #[gpui::test]
4552 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4553 cx.foreground().forbid_parking();
4554
4555 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4556 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4557 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4558 completion_provider: Some(lsp::CompletionOptions {
4559 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4560 ..Default::default()
4561 }),
4562 ..Default::default()
4563 });
4564 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4565 completion_provider: Some(lsp::CompletionOptions {
4566 trigger_characters: Some(vec![":".to_string()]),
4567 ..Default::default()
4568 }),
4569 ..Default::default()
4570 });
4571
4572 let rust_language = Arc::new(Language::new(
4573 LanguageConfig {
4574 name: "Rust".into(),
4575 path_suffixes: vec!["rs".to_string()],
4576 language_server: Some(rust_lsp_config),
4577 ..Default::default()
4578 },
4579 Some(tree_sitter_rust::language()),
4580 ));
4581 let json_language = Arc::new(Language::new(
4582 LanguageConfig {
4583 name: "JSON".into(),
4584 path_suffixes: vec!["json".to_string()],
4585 language_server: Some(json_lsp_config),
4586 ..Default::default()
4587 },
4588 None,
4589 ));
4590
4591 let fs = FakeFs::new(cx.background());
4592 fs.insert_tree(
4593 "/the-root",
4594 json!({
4595 "test.rs": "const A: i32 = 1;",
4596 "test2.rs": "",
4597 "Cargo.toml": "a = 1",
4598 "package.json": "{\"a\": 1}",
4599 }),
4600 )
4601 .await;
4602
4603 let project = Project::test(fs, cx);
4604 project.update(cx, |project, _| {
4605 project.languages.add(rust_language);
4606 project.languages.add(json_language);
4607 });
4608
4609 let worktree_id = project
4610 .update(cx, |project, cx| {
4611 project.find_or_create_local_worktree("/the-root", true, cx)
4612 })
4613 .await
4614 .unwrap()
4615 .0
4616 .read_with(cx, |tree, _| tree.id());
4617
4618 // Open a buffer without an associated language server.
4619 let toml_buffer = project
4620 .update(cx, |project, cx| {
4621 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4622 })
4623 .await
4624 .unwrap();
4625
4626 // Open a buffer with an associated language server.
4627 let rust_buffer = project
4628 .update(cx, |project, cx| {
4629 project.open_buffer((worktree_id, "test.rs"), cx)
4630 })
4631 .await
4632 .unwrap();
4633
4634 // A server is started up, and it is notified about Rust files.
4635 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4636 assert_eq!(
4637 fake_rust_server
4638 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4639 .await
4640 .text_document,
4641 lsp::TextDocumentItem {
4642 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4643 version: 0,
4644 text: "const A: i32 = 1;".to_string(),
4645 language_id: Default::default()
4646 }
4647 );
4648
4649 // The buffer is configured based on the language server's capabilities.
4650 rust_buffer.read_with(cx, |buffer, _| {
4651 assert_eq!(
4652 buffer.completion_triggers(),
4653 &[".".to_string(), "::".to_string()]
4654 );
4655 });
4656 toml_buffer.read_with(cx, |buffer, _| {
4657 assert!(buffer.completion_triggers().is_empty());
4658 });
4659
4660 // Edit a buffer. The changes are reported to the language server.
4661 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4662 assert_eq!(
4663 fake_rust_server
4664 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4665 .await
4666 .text_document,
4667 lsp::VersionedTextDocumentIdentifier::new(
4668 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4669 1
4670 )
4671 );
4672
4673 // Open a third buffer with a different associated language server.
4674 let json_buffer = project
4675 .update(cx, |project, cx| {
4676 project.open_buffer((worktree_id, "package.json"), cx)
4677 })
4678 .await
4679 .unwrap();
4680
4681 // Another language server is started up, and it is notified about
4682 // all three open buffers.
4683 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4684 assert_eq!(
4685 fake_json_server
4686 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4687 .await
4688 .text_document,
4689 lsp::TextDocumentItem {
4690 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4691 version: 0,
4692 text: "{\"a\": 1}".to_string(),
4693 language_id: Default::default()
4694 }
4695 );
4696
4697 // This buffer is configured based on the second language server's
4698 // capabilities.
4699 json_buffer.read_with(cx, |buffer, _| {
4700 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4701 });
4702
4703 // When opening another buffer whose language server is already running,
4704 // it is also configured based on the existing language server's capabilities.
4705 let rust_buffer2 = project
4706 .update(cx, |project, cx| {
4707 project.open_buffer((worktree_id, "test2.rs"), cx)
4708 })
4709 .await
4710 .unwrap();
4711 rust_buffer2.read_with(cx, |buffer, _| {
4712 assert_eq!(
4713 buffer.completion_triggers(),
4714 &[".".to_string(), "::".to_string()]
4715 );
4716 });
4717
4718 // Changes are reported only to servers matching the buffer's language.
4719 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4720 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4721 assert_eq!(
4722 fake_rust_server
4723 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4724 .await
4725 .text_document,
4726 lsp::VersionedTextDocumentIdentifier::new(
4727 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4728 1
4729 )
4730 );
4731
4732 // Save notifications are reported to all servers.
4733 toml_buffer
4734 .update(cx, |buffer, cx| buffer.save(cx))
4735 .await
4736 .unwrap();
4737 assert_eq!(
4738 fake_rust_server
4739 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4740 .await
4741 .text_document,
4742 lsp::TextDocumentIdentifier::new(
4743 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4744 )
4745 );
4746 assert_eq!(
4747 fake_json_server
4748 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4749 .await
4750 .text_document,
4751 lsp::TextDocumentIdentifier::new(
4752 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4753 )
4754 );
4755
4756 // Close notifications are reported only to servers matching the buffer's language.
4757 cx.update(|_| drop(json_buffer));
4758 let close_message = lsp::DidCloseTextDocumentParams {
4759 text_document: lsp::TextDocumentIdentifier::new(
4760 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4761 ),
4762 };
4763 assert_eq!(
4764 fake_json_server
4765 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4766 .await,
4767 close_message,
4768 );
4769 }
4770
4771 #[gpui::test]
4772 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4773 cx.foreground().forbid_parking();
4774
4775 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4776 let progress_token = language_server_config
4777 .disk_based_diagnostics_progress_token
4778 .clone()
4779 .unwrap();
4780
4781 let language = Arc::new(Language::new(
4782 LanguageConfig {
4783 name: "Rust".into(),
4784 path_suffixes: vec!["rs".to_string()],
4785 language_server: Some(language_server_config),
4786 ..Default::default()
4787 },
4788 Some(tree_sitter_rust::language()),
4789 ));
4790
4791 let fs = FakeFs::new(cx.background());
4792 fs.insert_tree(
4793 "/dir",
4794 json!({
4795 "a.rs": "fn a() { A }",
4796 "b.rs": "const y: i32 = 1",
4797 }),
4798 )
4799 .await;
4800
4801 let project = Project::test(fs, cx);
4802 project.update(cx, |project, _| project.languages.add(language));
4803
4804 let (tree, _) = project
4805 .update(cx, |project, cx| {
4806 project.find_or_create_local_worktree("/dir", true, cx)
4807 })
4808 .await
4809 .unwrap();
4810 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4811
4812 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4813 .await;
4814
4815 // Cause worktree to start the fake language server
4816 let _buffer = project
4817 .update(cx, |project, cx| {
4818 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4819 })
4820 .await
4821 .unwrap();
4822
4823 let mut events = subscribe(&project, cx);
4824
4825 let mut fake_server = fake_servers.next().await.unwrap();
4826 fake_server.start_progress(&progress_token).await;
4827 assert_eq!(
4828 events.next().await.unwrap(),
4829 Event::DiskBasedDiagnosticsStarted
4830 );
4831
4832 fake_server.start_progress(&progress_token).await;
4833 fake_server.end_progress(&progress_token).await;
4834 fake_server.start_progress(&progress_token).await;
4835
4836 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4837 lsp::PublishDiagnosticsParams {
4838 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4839 version: None,
4840 diagnostics: vec![lsp::Diagnostic {
4841 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4842 severity: Some(lsp::DiagnosticSeverity::ERROR),
4843 message: "undefined variable 'A'".to_string(),
4844 ..Default::default()
4845 }],
4846 },
4847 );
4848 assert_eq!(
4849 events.next().await.unwrap(),
4850 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4851 );
4852
4853 fake_server.end_progress(&progress_token).await;
4854 fake_server.end_progress(&progress_token).await;
4855 assert_eq!(
4856 events.next().await.unwrap(),
4857 Event::DiskBasedDiagnosticsUpdated
4858 );
4859 assert_eq!(
4860 events.next().await.unwrap(),
4861 Event::DiskBasedDiagnosticsFinished
4862 );
4863
4864 let buffer = project
4865 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4866 .await
4867 .unwrap();
4868
4869 buffer.read_with(cx, |buffer, _| {
4870 let snapshot = buffer.snapshot();
4871 let diagnostics = snapshot
4872 .diagnostics_in_range::<_, Point>(0..buffer.len())
4873 .collect::<Vec<_>>();
4874 assert_eq!(
4875 diagnostics,
4876 &[DiagnosticEntry {
4877 range: Point::new(0, 9)..Point::new(0, 10),
4878 diagnostic: Diagnostic {
4879 severity: lsp::DiagnosticSeverity::ERROR,
4880 message: "undefined variable 'A'".to_string(),
4881 group_id: 0,
4882 is_primary: true,
4883 ..Default::default()
4884 }
4885 }]
4886 )
4887 });
4888 }
4889
4890 #[gpui::test]
4891 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4892 cx.foreground().forbid_parking();
4893
4894 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4895 lsp_config
4896 .disk_based_diagnostic_sources
4897 .insert("disk".to_string());
4898 let language = Arc::new(Language::new(
4899 LanguageConfig {
4900 name: "Rust".into(),
4901 path_suffixes: vec!["rs".to_string()],
4902 language_server: Some(lsp_config),
4903 ..Default::default()
4904 },
4905 Some(tree_sitter_rust::language()),
4906 ));
4907
4908 let text = "
4909 fn a() { A }
4910 fn b() { BB }
4911 fn c() { CCC }
4912 "
4913 .unindent();
4914
4915 let fs = FakeFs::new(cx.background());
4916 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4917
4918 let project = Project::test(fs, cx);
4919 project.update(cx, |project, _| project.languages.add(language));
4920
4921 let worktree_id = project
4922 .update(cx, |project, cx| {
4923 project.find_or_create_local_worktree("/dir", true, cx)
4924 })
4925 .await
4926 .unwrap()
4927 .0
4928 .read_with(cx, |tree, _| tree.id());
4929
4930 let buffer = project
4931 .update(cx, |project, cx| {
4932 project.open_buffer((worktree_id, "a.rs"), cx)
4933 })
4934 .await
4935 .unwrap();
4936
4937 let mut fake_server = fake_servers.next().await.unwrap();
4938 let open_notification = fake_server
4939 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4940 .await;
4941
4942 // Edit the buffer, moving the content down
4943 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
4944 let change_notification_1 = fake_server
4945 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4946 .await;
4947 assert!(
4948 change_notification_1.text_document.version > open_notification.text_document.version
4949 );
4950
4951 // Report some diagnostics for the initial version of the buffer
4952 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4953 lsp::PublishDiagnosticsParams {
4954 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4955 version: Some(open_notification.text_document.version),
4956 diagnostics: vec![
4957 lsp::Diagnostic {
4958 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4959 severity: Some(DiagnosticSeverity::ERROR),
4960 message: "undefined variable 'A'".to_string(),
4961 source: Some("disk".to_string()),
4962 ..Default::default()
4963 },
4964 lsp::Diagnostic {
4965 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4966 severity: Some(DiagnosticSeverity::ERROR),
4967 message: "undefined variable 'BB'".to_string(),
4968 source: Some("disk".to_string()),
4969 ..Default::default()
4970 },
4971 lsp::Diagnostic {
4972 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
4973 severity: Some(DiagnosticSeverity::ERROR),
4974 source: Some("disk".to_string()),
4975 message: "undefined variable 'CCC'".to_string(),
4976 ..Default::default()
4977 },
4978 ],
4979 },
4980 );
4981
4982 // The diagnostics have moved down since they were created.
4983 buffer.next_notification(cx).await;
4984 buffer.read_with(cx, |buffer, _| {
4985 assert_eq!(
4986 buffer
4987 .snapshot()
4988 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
4989 .collect::<Vec<_>>(),
4990 &[
4991 DiagnosticEntry {
4992 range: Point::new(3, 9)..Point::new(3, 11),
4993 diagnostic: Diagnostic {
4994 severity: DiagnosticSeverity::ERROR,
4995 message: "undefined variable 'BB'".to_string(),
4996 is_disk_based: true,
4997 group_id: 1,
4998 is_primary: true,
4999 ..Default::default()
5000 },
5001 },
5002 DiagnosticEntry {
5003 range: Point::new(4, 9)..Point::new(4, 12),
5004 diagnostic: Diagnostic {
5005 severity: DiagnosticSeverity::ERROR,
5006 message: "undefined variable 'CCC'".to_string(),
5007 is_disk_based: true,
5008 group_id: 2,
5009 is_primary: true,
5010 ..Default::default()
5011 }
5012 }
5013 ]
5014 );
5015 assert_eq!(
5016 chunks_with_diagnostics(buffer, 0..buffer.len()),
5017 [
5018 ("\n\nfn a() { ".to_string(), None),
5019 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5020 (" }\nfn b() { ".to_string(), None),
5021 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5022 (" }\nfn c() { ".to_string(), None),
5023 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5024 (" }\n".to_string(), None),
5025 ]
5026 );
5027 assert_eq!(
5028 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5029 [
5030 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5031 (" }\nfn c() { ".to_string(), None),
5032 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5033 ]
5034 );
5035 });
5036
5037 // Ensure overlapping diagnostics are highlighted correctly.
5038 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5039 lsp::PublishDiagnosticsParams {
5040 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5041 version: Some(open_notification.text_document.version),
5042 diagnostics: vec![
5043 lsp::Diagnostic {
5044 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5045 severity: Some(DiagnosticSeverity::ERROR),
5046 message: "undefined variable 'A'".to_string(),
5047 source: Some("disk".to_string()),
5048 ..Default::default()
5049 },
5050 lsp::Diagnostic {
5051 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5052 severity: Some(DiagnosticSeverity::WARNING),
5053 message: "unreachable statement".to_string(),
5054 source: Some("disk".to_string()),
5055 ..Default::default()
5056 },
5057 ],
5058 },
5059 );
5060
5061 buffer.next_notification(cx).await;
5062 buffer.read_with(cx, |buffer, _| {
5063 assert_eq!(
5064 buffer
5065 .snapshot()
5066 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
5067 .collect::<Vec<_>>(),
5068 &[
5069 DiagnosticEntry {
5070 range: Point::new(2, 9)..Point::new(2, 12),
5071 diagnostic: Diagnostic {
5072 severity: DiagnosticSeverity::WARNING,
5073 message: "unreachable statement".to_string(),
5074 is_disk_based: true,
5075 group_id: 1,
5076 is_primary: true,
5077 ..Default::default()
5078 }
5079 },
5080 DiagnosticEntry {
5081 range: Point::new(2, 9)..Point::new(2, 10),
5082 diagnostic: Diagnostic {
5083 severity: DiagnosticSeverity::ERROR,
5084 message: "undefined variable 'A'".to_string(),
5085 is_disk_based: true,
5086 group_id: 0,
5087 is_primary: true,
5088 ..Default::default()
5089 },
5090 }
5091 ]
5092 );
5093 assert_eq!(
5094 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5095 [
5096 ("fn a() { ".to_string(), None),
5097 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5098 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5099 ("\n".to_string(), None),
5100 ]
5101 );
5102 assert_eq!(
5103 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5104 [
5105 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5106 ("\n".to_string(), None),
5107 ]
5108 );
5109 });
5110
5111 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5112 // changes since the last save.
5113 buffer.update(cx, |buffer, cx| {
5114 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5115 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5116 });
5117 let change_notification_2 =
5118 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5119 assert!(
5120 change_notification_2.await.text_document.version
5121 > change_notification_1.text_document.version
5122 );
5123
5124 // Handle out-of-order diagnostics
5125 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5126 lsp::PublishDiagnosticsParams {
5127 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5128 version: Some(open_notification.text_document.version),
5129 diagnostics: vec![
5130 lsp::Diagnostic {
5131 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5132 severity: Some(DiagnosticSeverity::ERROR),
5133 message: "undefined variable 'BB'".to_string(),
5134 source: Some("disk".to_string()),
5135 ..Default::default()
5136 },
5137 lsp::Diagnostic {
5138 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5139 severity: Some(DiagnosticSeverity::WARNING),
5140 message: "undefined variable 'A'".to_string(),
5141 source: Some("disk".to_string()),
5142 ..Default::default()
5143 },
5144 ],
5145 },
5146 );
5147
5148 buffer.next_notification(cx).await;
5149 buffer.read_with(cx, |buffer, _| {
5150 assert_eq!(
5151 buffer
5152 .snapshot()
5153 .diagnostics_in_range::<_, Point>(0..buffer.len())
5154 .collect::<Vec<_>>(),
5155 &[
5156 DiagnosticEntry {
5157 range: Point::new(2, 21)..Point::new(2, 22),
5158 diagnostic: Diagnostic {
5159 severity: DiagnosticSeverity::WARNING,
5160 message: "undefined variable 'A'".to_string(),
5161 is_disk_based: true,
5162 group_id: 1,
5163 is_primary: true,
5164 ..Default::default()
5165 }
5166 },
5167 DiagnosticEntry {
5168 range: Point::new(3, 9)..Point::new(3, 11),
5169 diagnostic: Diagnostic {
5170 severity: DiagnosticSeverity::ERROR,
5171 message: "undefined variable 'BB'".to_string(),
5172 is_disk_based: true,
5173 group_id: 0,
5174 is_primary: true,
5175 ..Default::default()
5176 },
5177 }
5178 ]
5179 );
5180 });
5181 }
5182
5183 #[gpui::test]
5184 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5185 cx.foreground().forbid_parking();
5186
5187 let text = concat!(
5188 "let one = ;\n", //
5189 "let two = \n",
5190 "let three = 3;\n",
5191 );
5192
5193 let fs = FakeFs::new(cx.background());
5194 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5195
5196 let project = Project::test(fs, cx);
5197 let worktree_id = project
5198 .update(cx, |project, cx| {
5199 project.find_or_create_local_worktree("/dir", true, cx)
5200 })
5201 .await
5202 .unwrap()
5203 .0
5204 .read_with(cx, |tree, _| tree.id());
5205
5206 let buffer = project
5207 .update(cx, |project, cx| {
5208 project.open_buffer((worktree_id, "a.rs"), cx)
5209 })
5210 .await
5211 .unwrap();
5212
5213 project.update(cx, |project, cx| {
5214 project
5215 .update_buffer_diagnostics(
5216 &buffer,
5217 vec![
5218 DiagnosticEntry {
5219 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5220 diagnostic: Diagnostic {
5221 severity: DiagnosticSeverity::ERROR,
5222 message: "syntax error 1".to_string(),
5223 ..Default::default()
5224 },
5225 },
5226 DiagnosticEntry {
5227 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5228 diagnostic: Diagnostic {
5229 severity: DiagnosticSeverity::ERROR,
5230 message: "syntax error 2".to_string(),
5231 ..Default::default()
5232 },
5233 },
5234 ],
5235 None,
5236 cx,
5237 )
5238 .unwrap();
5239 });
5240
5241 // An empty range is extended forward to include the following character.
5242 // At the end of a line, an empty range is extended backward to include
5243 // the preceding character.
5244 buffer.read_with(cx, |buffer, _| {
5245 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5246 assert_eq!(
5247 chunks
5248 .iter()
5249 .map(|(s, d)| (s.as_str(), *d))
5250 .collect::<Vec<_>>(),
5251 &[
5252 ("let one = ", None),
5253 (";", Some(DiagnosticSeverity::ERROR)),
5254 ("\nlet two =", None),
5255 (" ", Some(DiagnosticSeverity::ERROR)),
5256 ("\nlet three = 3;\n", None)
5257 ]
5258 );
5259 });
5260 }
5261
5262 #[gpui::test]
5263 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5264 cx.foreground().forbid_parking();
5265
5266 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5267 let language = Arc::new(Language::new(
5268 LanguageConfig {
5269 name: "Rust".into(),
5270 path_suffixes: vec!["rs".to_string()],
5271 language_server: Some(lsp_config),
5272 ..Default::default()
5273 },
5274 Some(tree_sitter_rust::language()),
5275 ));
5276
5277 let text = "
5278 fn a() {
5279 f1();
5280 }
5281 fn b() {
5282 f2();
5283 }
5284 fn c() {
5285 f3();
5286 }
5287 "
5288 .unindent();
5289
5290 let fs = FakeFs::new(cx.background());
5291 fs.insert_tree(
5292 "/dir",
5293 json!({
5294 "a.rs": text.clone(),
5295 }),
5296 )
5297 .await;
5298
5299 let project = Project::test(fs, cx);
5300 project.update(cx, |project, _| project.languages.add(language));
5301
5302 let worktree_id = project
5303 .update(cx, |project, cx| {
5304 project.find_or_create_local_worktree("/dir", true, cx)
5305 })
5306 .await
5307 .unwrap()
5308 .0
5309 .read_with(cx, |tree, _| tree.id());
5310
5311 let buffer = project
5312 .update(cx, |project, cx| {
5313 project.open_buffer((worktree_id, "a.rs"), cx)
5314 })
5315 .await
5316 .unwrap();
5317
5318 let mut fake_server = fake_servers.next().await.unwrap();
5319 let lsp_document_version = fake_server
5320 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5321 .await
5322 .text_document
5323 .version;
5324
5325 // Simulate editing the buffer after the language server computes some edits.
5326 buffer.update(cx, |buffer, cx| {
5327 buffer.edit(
5328 [Point::new(0, 0)..Point::new(0, 0)],
5329 "// above first function\n",
5330 cx,
5331 );
5332 buffer.edit(
5333 [Point::new(2, 0)..Point::new(2, 0)],
5334 " // inside first function\n",
5335 cx,
5336 );
5337 buffer.edit(
5338 [Point::new(6, 4)..Point::new(6, 4)],
5339 "// inside second function ",
5340 cx,
5341 );
5342
5343 assert_eq!(
5344 buffer.text(),
5345 "
5346 // above first function
5347 fn a() {
5348 // inside first function
5349 f1();
5350 }
5351 fn b() {
5352 // inside second function f2();
5353 }
5354 fn c() {
5355 f3();
5356 }
5357 "
5358 .unindent()
5359 );
5360 });
5361
5362 let edits = project
5363 .update(cx, |project, cx| {
5364 project.edits_from_lsp(
5365 &buffer,
5366 vec![
5367 // replace body of first function
5368 lsp::TextEdit {
5369 range: lsp::Range::new(
5370 lsp::Position::new(0, 0),
5371 lsp::Position::new(3, 0),
5372 ),
5373 new_text: "
5374 fn a() {
5375 f10();
5376 }
5377 "
5378 .unindent(),
5379 },
5380 // edit inside second function
5381 lsp::TextEdit {
5382 range: lsp::Range::new(
5383 lsp::Position::new(4, 6),
5384 lsp::Position::new(4, 6),
5385 ),
5386 new_text: "00".into(),
5387 },
5388 // edit inside third function via two distinct edits
5389 lsp::TextEdit {
5390 range: lsp::Range::new(
5391 lsp::Position::new(7, 5),
5392 lsp::Position::new(7, 5),
5393 ),
5394 new_text: "4000".into(),
5395 },
5396 lsp::TextEdit {
5397 range: lsp::Range::new(
5398 lsp::Position::new(7, 5),
5399 lsp::Position::new(7, 6),
5400 ),
5401 new_text: "".into(),
5402 },
5403 ],
5404 Some(lsp_document_version),
5405 cx,
5406 )
5407 })
5408 .await
5409 .unwrap();
5410
5411 buffer.update(cx, |buffer, cx| {
5412 for (range, new_text) in edits {
5413 buffer.edit([range], new_text, cx);
5414 }
5415 assert_eq!(
5416 buffer.text(),
5417 "
5418 // above first function
5419 fn a() {
5420 // inside first function
5421 f10();
5422 }
5423 fn b() {
5424 // inside second function f200();
5425 }
5426 fn c() {
5427 f4000();
5428 }
5429 "
5430 .unindent()
5431 );
5432 });
5433 }
5434
5435 #[gpui::test]
5436 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5437 cx.foreground().forbid_parking();
5438
5439 let text = "
5440 use a::b;
5441 use a::c;
5442
5443 fn f() {
5444 b();
5445 c();
5446 }
5447 "
5448 .unindent();
5449
5450 let fs = FakeFs::new(cx.background());
5451 fs.insert_tree(
5452 "/dir",
5453 json!({
5454 "a.rs": text.clone(),
5455 }),
5456 )
5457 .await;
5458
5459 let project = Project::test(fs, cx);
5460 let worktree_id = project
5461 .update(cx, |project, cx| {
5462 project.find_or_create_local_worktree("/dir", true, cx)
5463 })
5464 .await
5465 .unwrap()
5466 .0
5467 .read_with(cx, |tree, _| tree.id());
5468
5469 let buffer = project
5470 .update(cx, |project, cx| {
5471 project.open_buffer((worktree_id, "a.rs"), cx)
5472 })
5473 .await
5474 .unwrap();
5475
5476 // Simulate the language server sending us a small edit in the form of a very large diff.
5477 // Rust-analyzer does this when performing a merge-imports code action.
5478 let edits = project
5479 .update(cx, |project, cx| {
5480 project.edits_from_lsp(
5481 &buffer,
5482 [
5483 // Replace the first use statement without editing the semicolon.
5484 lsp::TextEdit {
5485 range: lsp::Range::new(
5486 lsp::Position::new(0, 4),
5487 lsp::Position::new(0, 8),
5488 ),
5489 new_text: "a::{b, c}".into(),
5490 },
5491 // Reinsert the remainder of the file between the semicolon and the final
5492 // newline of the file.
5493 lsp::TextEdit {
5494 range: lsp::Range::new(
5495 lsp::Position::new(0, 9),
5496 lsp::Position::new(0, 9),
5497 ),
5498 new_text: "\n\n".into(),
5499 },
5500 lsp::TextEdit {
5501 range: lsp::Range::new(
5502 lsp::Position::new(0, 9),
5503 lsp::Position::new(0, 9),
5504 ),
5505 new_text: "
5506 fn f() {
5507 b();
5508 c();
5509 }"
5510 .unindent(),
5511 },
5512 // Delete everything after the first newline of the file.
5513 lsp::TextEdit {
5514 range: lsp::Range::new(
5515 lsp::Position::new(1, 0),
5516 lsp::Position::new(7, 0),
5517 ),
5518 new_text: "".into(),
5519 },
5520 ],
5521 None,
5522 cx,
5523 )
5524 })
5525 .await
5526 .unwrap();
5527
5528 buffer.update(cx, |buffer, cx| {
5529 let edits = edits
5530 .into_iter()
5531 .map(|(range, text)| {
5532 (
5533 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5534 text,
5535 )
5536 })
5537 .collect::<Vec<_>>();
5538
5539 assert_eq!(
5540 edits,
5541 [
5542 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5543 (Point::new(1, 0)..Point::new(2, 0), "".into())
5544 ]
5545 );
5546
5547 for (range, new_text) in edits {
5548 buffer.edit([range], new_text, cx);
5549 }
5550 assert_eq!(
5551 buffer.text(),
5552 "
5553 use a::{b, c};
5554
5555 fn f() {
5556 b();
5557 c();
5558 }
5559 "
5560 .unindent()
5561 );
5562 });
5563 }
5564
5565 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5566 buffer: &Buffer,
5567 range: Range<T>,
5568 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5569 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5570 for chunk in buffer.snapshot().chunks(range, true) {
5571 if chunks
5572 .last()
5573 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
5574 {
5575 chunks.last_mut().unwrap().0.push_str(chunk.text);
5576 } else {
5577 chunks.push((chunk.text.to_string(), chunk.diagnostic));
5578 }
5579 }
5580 chunks
5581 }
5582
5583 #[gpui::test]
5584 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5585 let dir = temp_tree(json!({
5586 "root": {
5587 "dir1": {},
5588 "dir2": {
5589 "dir3": {}
5590 }
5591 }
5592 }));
5593
5594 let project = Project::test(Arc::new(RealFs), cx);
5595 let (tree, _) = project
5596 .update(cx, |project, cx| {
5597 project.find_or_create_local_worktree(&dir.path(), true, cx)
5598 })
5599 .await
5600 .unwrap();
5601
5602 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5603 .await;
5604
5605 let cancel_flag = Default::default();
5606 let results = project
5607 .read_with(cx, |project, cx| {
5608 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5609 })
5610 .await;
5611
5612 assert!(results.is_empty());
5613 }
5614
5615 #[gpui::test]
5616 async fn test_definition(cx: &mut gpui::TestAppContext) {
5617 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5618 let language = Arc::new(Language::new(
5619 LanguageConfig {
5620 name: "Rust".into(),
5621 path_suffixes: vec!["rs".to_string()],
5622 language_server: Some(language_server_config),
5623 ..Default::default()
5624 },
5625 Some(tree_sitter_rust::language()),
5626 ));
5627
5628 let fs = FakeFs::new(cx.background());
5629 fs.insert_tree(
5630 "/dir",
5631 json!({
5632 "a.rs": "const fn a() { A }",
5633 "b.rs": "const y: i32 = crate::a()",
5634 }),
5635 )
5636 .await;
5637
5638 let project = Project::test(fs, cx);
5639 project.update(cx, |project, _| {
5640 Arc::get_mut(&mut project.languages).unwrap().add(language);
5641 });
5642
5643 let (tree, _) = project
5644 .update(cx, |project, cx| {
5645 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5646 })
5647 .await
5648 .unwrap();
5649 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5650 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5651 .await;
5652
5653 let buffer = project
5654 .update(cx, |project, cx| {
5655 project.open_buffer(
5656 ProjectPath {
5657 worktree_id,
5658 path: Path::new("").into(),
5659 },
5660 cx,
5661 )
5662 })
5663 .await
5664 .unwrap();
5665
5666 let mut fake_server = fake_servers.next().await.unwrap();
5667 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5668 let params = params.text_document_position_params;
5669 assert_eq!(
5670 params.text_document.uri.to_file_path().unwrap(),
5671 Path::new("/dir/b.rs"),
5672 );
5673 assert_eq!(params.position, lsp::Position::new(0, 22));
5674
5675 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5676 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5677 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5678 )))
5679 });
5680
5681 let mut definitions = project
5682 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5683 .await
5684 .unwrap();
5685
5686 assert_eq!(definitions.len(), 1);
5687 let definition = definitions.pop().unwrap();
5688 cx.update(|cx| {
5689 let target_buffer = definition.buffer.read(cx);
5690 assert_eq!(
5691 target_buffer
5692 .file()
5693 .unwrap()
5694 .as_local()
5695 .unwrap()
5696 .abs_path(cx),
5697 Path::new("/dir/a.rs"),
5698 );
5699 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5700 assert_eq!(
5701 list_worktrees(&project, cx),
5702 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5703 );
5704
5705 drop(definition);
5706 });
5707 cx.read(|cx| {
5708 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5709 });
5710
5711 fn list_worktrees<'a>(
5712 project: &'a ModelHandle<Project>,
5713 cx: &'a AppContext,
5714 ) -> Vec<(&'a Path, bool)> {
5715 project
5716 .read(cx)
5717 .worktrees(cx)
5718 .map(|worktree| {
5719 let worktree = worktree.read(cx);
5720 (
5721 worktree.as_local().unwrap().abs_path().as_ref(),
5722 worktree.is_visible(),
5723 )
5724 })
5725 .collect::<Vec<_>>()
5726 }
5727 }
5728
5729 #[gpui::test]
5730 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5731 let fs = FakeFs::new(cx.background());
5732 fs.insert_tree(
5733 "/dir",
5734 json!({
5735 "file1": "the old contents",
5736 }),
5737 )
5738 .await;
5739
5740 let project = Project::test(fs.clone(), cx);
5741 let worktree_id = project
5742 .update(cx, |p, cx| {
5743 p.find_or_create_local_worktree("/dir", true, cx)
5744 })
5745 .await
5746 .unwrap()
5747 .0
5748 .read_with(cx, |tree, _| tree.id());
5749
5750 let buffer = project
5751 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5752 .await
5753 .unwrap();
5754 buffer
5755 .update(cx, |buffer, cx| {
5756 assert_eq!(buffer.text(), "the old contents");
5757 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5758 buffer.save(cx)
5759 })
5760 .await
5761 .unwrap();
5762
5763 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5764 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5765 }
5766
5767 #[gpui::test]
5768 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5769 let fs = FakeFs::new(cx.background());
5770 fs.insert_tree(
5771 "/dir",
5772 json!({
5773 "file1": "the old contents",
5774 }),
5775 )
5776 .await;
5777
5778 let project = Project::test(fs.clone(), cx);
5779 let worktree_id = project
5780 .update(cx, |p, cx| {
5781 p.find_or_create_local_worktree("/dir/file1", true, cx)
5782 })
5783 .await
5784 .unwrap()
5785 .0
5786 .read_with(cx, |tree, _| tree.id());
5787
5788 let buffer = project
5789 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5790 .await
5791 .unwrap();
5792 buffer
5793 .update(cx, |buffer, cx| {
5794 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5795 buffer.save(cx)
5796 })
5797 .await
5798 .unwrap();
5799
5800 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5801 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5802 }
5803
5804 #[gpui::test]
5805 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5806 let fs = FakeFs::new(cx.background());
5807 fs.insert_tree("/dir", json!({})).await;
5808
5809 let project = Project::test(fs.clone(), cx);
5810 let (worktree, _) = project
5811 .update(cx, |project, cx| {
5812 project.find_or_create_local_worktree("/dir", true, cx)
5813 })
5814 .await
5815 .unwrap();
5816 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5817
5818 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5819 buffer.update(cx, |buffer, cx| {
5820 buffer.edit([0..0], "abc", cx);
5821 assert!(buffer.is_dirty());
5822 assert!(!buffer.has_conflict());
5823 });
5824 project
5825 .update(cx, |project, cx| {
5826 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5827 })
5828 .await
5829 .unwrap();
5830 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5831 buffer.read_with(cx, |buffer, cx| {
5832 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5833 assert!(!buffer.is_dirty());
5834 assert!(!buffer.has_conflict());
5835 });
5836
5837 let opened_buffer = project
5838 .update(cx, |project, cx| {
5839 project.open_buffer((worktree_id, "file1"), cx)
5840 })
5841 .await
5842 .unwrap();
5843 assert_eq!(opened_buffer, buffer);
5844 }
5845
5846 #[gpui::test(retries = 5)]
5847 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5848 let dir = temp_tree(json!({
5849 "a": {
5850 "file1": "",
5851 "file2": "",
5852 "file3": "",
5853 },
5854 "b": {
5855 "c": {
5856 "file4": "",
5857 "file5": "",
5858 }
5859 }
5860 }));
5861
5862 let project = Project::test(Arc::new(RealFs), cx);
5863 let rpc = project.read_with(cx, |p, _| p.client.clone());
5864
5865 let (tree, _) = project
5866 .update(cx, |p, cx| {
5867 p.find_or_create_local_worktree(dir.path(), true, cx)
5868 })
5869 .await
5870 .unwrap();
5871 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5872
5873 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5874 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5875 async move { buffer.await.unwrap() }
5876 };
5877 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5878 tree.read_with(cx, |tree, _| {
5879 tree.entry_for_path(path)
5880 .expect(&format!("no entry for path {}", path))
5881 .id
5882 })
5883 };
5884
5885 let buffer2 = buffer_for_path("a/file2", cx).await;
5886 let buffer3 = buffer_for_path("a/file3", cx).await;
5887 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5888 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5889
5890 let file2_id = id_for_path("a/file2", &cx);
5891 let file3_id = id_for_path("a/file3", &cx);
5892 let file4_id = id_for_path("b/c/file4", &cx);
5893
5894 // Wait for the initial scan.
5895 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5896 .await;
5897
5898 // Create a remote copy of this worktree.
5899 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5900 let (remote, load_task) = cx.update(|cx| {
5901 Worktree::remote(
5902 1,
5903 1,
5904 initial_snapshot.to_proto(&Default::default(), true),
5905 rpc.clone(),
5906 cx,
5907 )
5908 });
5909 load_task.await;
5910
5911 cx.read(|cx| {
5912 assert!(!buffer2.read(cx).is_dirty());
5913 assert!(!buffer3.read(cx).is_dirty());
5914 assert!(!buffer4.read(cx).is_dirty());
5915 assert!(!buffer5.read(cx).is_dirty());
5916 });
5917
5918 // Rename and delete files and directories.
5919 tree.flush_fs_events(&cx).await;
5920 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5921 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5922 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5923 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5924 tree.flush_fs_events(&cx).await;
5925
5926 let expected_paths = vec![
5927 "a",
5928 "a/file1",
5929 "a/file2.new",
5930 "b",
5931 "d",
5932 "d/file3",
5933 "d/file4",
5934 ];
5935
5936 cx.read(|app| {
5937 assert_eq!(
5938 tree.read(app)
5939 .paths()
5940 .map(|p| p.to_str().unwrap())
5941 .collect::<Vec<_>>(),
5942 expected_paths
5943 );
5944
5945 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
5946 assert_eq!(id_for_path("d/file3", &cx), file3_id);
5947 assert_eq!(id_for_path("d/file4", &cx), file4_id);
5948
5949 assert_eq!(
5950 buffer2.read(app).file().unwrap().path().as_ref(),
5951 Path::new("a/file2.new")
5952 );
5953 assert_eq!(
5954 buffer3.read(app).file().unwrap().path().as_ref(),
5955 Path::new("d/file3")
5956 );
5957 assert_eq!(
5958 buffer4.read(app).file().unwrap().path().as_ref(),
5959 Path::new("d/file4")
5960 );
5961 assert_eq!(
5962 buffer5.read(app).file().unwrap().path().as_ref(),
5963 Path::new("b/c/file5")
5964 );
5965
5966 assert!(!buffer2.read(app).file().unwrap().is_deleted());
5967 assert!(!buffer3.read(app).file().unwrap().is_deleted());
5968 assert!(!buffer4.read(app).file().unwrap().is_deleted());
5969 assert!(buffer5.read(app).file().unwrap().is_deleted());
5970 });
5971
5972 // Update the remote worktree. Check that it becomes consistent with the
5973 // local worktree.
5974 remote.update(cx, |remote, cx| {
5975 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
5976 &initial_snapshot,
5977 1,
5978 1,
5979 true,
5980 );
5981 remote
5982 .as_remote_mut()
5983 .unwrap()
5984 .snapshot
5985 .apply_remote_update(update_message)
5986 .unwrap();
5987
5988 assert_eq!(
5989 remote
5990 .paths()
5991 .map(|p| p.to_str().unwrap())
5992 .collect::<Vec<_>>(),
5993 expected_paths
5994 );
5995 });
5996 }
5997
5998 #[gpui::test]
5999 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6000 let fs = FakeFs::new(cx.background());
6001 fs.insert_tree(
6002 "/the-dir",
6003 json!({
6004 "a.txt": "a-contents",
6005 "b.txt": "b-contents",
6006 }),
6007 )
6008 .await;
6009
6010 let project = Project::test(fs.clone(), cx);
6011 let worktree_id = project
6012 .update(cx, |p, cx| {
6013 p.find_or_create_local_worktree("/the-dir", true, cx)
6014 })
6015 .await
6016 .unwrap()
6017 .0
6018 .read_with(cx, |tree, _| tree.id());
6019
6020 // Spawn multiple tasks to open paths, repeating some paths.
6021 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6022 (
6023 p.open_buffer((worktree_id, "a.txt"), cx),
6024 p.open_buffer((worktree_id, "b.txt"), cx),
6025 p.open_buffer((worktree_id, "a.txt"), cx),
6026 )
6027 });
6028
6029 let buffer_a_1 = buffer_a_1.await.unwrap();
6030 let buffer_a_2 = buffer_a_2.await.unwrap();
6031 let buffer_b = buffer_b.await.unwrap();
6032 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6033 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6034
6035 // There is only one buffer per path.
6036 let buffer_a_id = buffer_a_1.id();
6037 assert_eq!(buffer_a_2.id(), buffer_a_id);
6038
6039 // Open the same path again while it is still open.
6040 drop(buffer_a_1);
6041 let buffer_a_3 = project
6042 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6043 .await
6044 .unwrap();
6045
6046 // There's still only one buffer per path.
6047 assert_eq!(buffer_a_3.id(), buffer_a_id);
6048 }
6049
6050 #[gpui::test]
6051 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6052 use std::fs;
6053
6054 let dir = temp_tree(json!({
6055 "file1": "abc",
6056 "file2": "def",
6057 "file3": "ghi",
6058 }));
6059
6060 let project = Project::test(Arc::new(RealFs), cx);
6061 let (worktree, _) = project
6062 .update(cx, |p, cx| {
6063 p.find_or_create_local_worktree(dir.path(), true, cx)
6064 })
6065 .await
6066 .unwrap();
6067 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6068
6069 worktree.flush_fs_events(&cx).await;
6070 worktree
6071 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6072 .await;
6073
6074 let buffer1 = project
6075 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6076 .await
6077 .unwrap();
6078 let events = Rc::new(RefCell::new(Vec::new()));
6079
6080 // initially, the buffer isn't dirty.
6081 buffer1.update(cx, |buffer, cx| {
6082 cx.subscribe(&buffer1, {
6083 let events = events.clone();
6084 move |_, _, event, _| match event {
6085 BufferEvent::Operation(_) => {}
6086 _ => events.borrow_mut().push(event.clone()),
6087 }
6088 })
6089 .detach();
6090
6091 assert!(!buffer.is_dirty());
6092 assert!(events.borrow().is_empty());
6093
6094 buffer.edit(vec![1..2], "", cx);
6095 });
6096
6097 // after the first edit, the buffer is dirty, and emits a dirtied event.
6098 buffer1.update(cx, |buffer, cx| {
6099 assert!(buffer.text() == "ac");
6100 assert!(buffer.is_dirty());
6101 assert_eq!(
6102 *events.borrow(),
6103 &[language::Event::Edited, language::Event::Dirtied]
6104 );
6105 events.borrow_mut().clear();
6106 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6107 });
6108
6109 // after saving, the buffer is not dirty, and emits a saved event.
6110 buffer1.update(cx, |buffer, cx| {
6111 assert!(!buffer.is_dirty());
6112 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6113 events.borrow_mut().clear();
6114
6115 buffer.edit(vec![1..1], "B", cx);
6116 buffer.edit(vec![2..2], "D", cx);
6117 });
6118
6119 // after editing again, the buffer is dirty, and emits another dirty event.
6120 buffer1.update(cx, |buffer, cx| {
6121 assert!(buffer.text() == "aBDc");
6122 assert!(buffer.is_dirty());
6123 assert_eq!(
6124 *events.borrow(),
6125 &[
6126 language::Event::Edited,
6127 language::Event::Dirtied,
6128 language::Event::Edited,
6129 ],
6130 );
6131 events.borrow_mut().clear();
6132
6133 // TODO - currently, after restoring the buffer to its
6134 // previously-saved state, the is still considered dirty.
6135 buffer.edit([1..3], "", cx);
6136 assert!(buffer.text() == "ac");
6137 assert!(buffer.is_dirty());
6138 });
6139
6140 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6141
6142 // When a file is deleted, the buffer is considered dirty.
6143 let events = Rc::new(RefCell::new(Vec::new()));
6144 let buffer2 = project
6145 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6146 .await
6147 .unwrap();
6148 buffer2.update(cx, |_, cx| {
6149 cx.subscribe(&buffer2, {
6150 let events = events.clone();
6151 move |_, _, event, _| events.borrow_mut().push(event.clone())
6152 })
6153 .detach();
6154 });
6155
6156 fs::remove_file(dir.path().join("file2")).unwrap();
6157 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6158 assert_eq!(
6159 *events.borrow(),
6160 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6161 );
6162
6163 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6164 let events = Rc::new(RefCell::new(Vec::new()));
6165 let buffer3 = project
6166 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6167 .await
6168 .unwrap();
6169 buffer3.update(cx, |_, cx| {
6170 cx.subscribe(&buffer3, {
6171 let events = events.clone();
6172 move |_, _, event, _| events.borrow_mut().push(event.clone())
6173 })
6174 .detach();
6175 });
6176
6177 worktree.flush_fs_events(&cx).await;
6178 buffer3.update(cx, |buffer, cx| {
6179 buffer.edit(Some(0..0), "x", cx);
6180 });
6181 events.borrow_mut().clear();
6182 fs::remove_file(dir.path().join("file3")).unwrap();
6183 buffer3
6184 .condition(&cx, |_, _| !events.borrow().is_empty())
6185 .await;
6186 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6187 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6188 }
6189
6190 #[gpui::test]
6191 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6192 use std::fs;
6193
6194 let initial_contents = "aaa\nbbbbb\nc\n";
6195 let dir = temp_tree(json!({ "the-file": initial_contents }));
6196
6197 let project = Project::test(Arc::new(RealFs), cx);
6198 let (worktree, _) = project
6199 .update(cx, |p, cx| {
6200 p.find_or_create_local_worktree(dir.path(), true, cx)
6201 })
6202 .await
6203 .unwrap();
6204 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6205
6206 worktree
6207 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6208 .await;
6209
6210 let abs_path = dir.path().join("the-file");
6211 let buffer = project
6212 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6213 .await
6214 .unwrap();
6215
6216 // TODO
6217 // Add a cursor on each row.
6218 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6219 // assert!(!buffer.is_dirty());
6220 // buffer.add_selection_set(
6221 // &(0..3)
6222 // .map(|row| Selection {
6223 // id: row as usize,
6224 // start: Point::new(row, 1),
6225 // end: Point::new(row, 1),
6226 // reversed: false,
6227 // goal: SelectionGoal::None,
6228 // })
6229 // .collect::<Vec<_>>(),
6230 // cx,
6231 // )
6232 // });
6233
6234 // Change the file on disk, adding two new lines of text, and removing
6235 // one line.
6236 buffer.read_with(cx, |buffer, _| {
6237 assert!(!buffer.is_dirty());
6238 assert!(!buffer.has_conflict());
6239 });
6240 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6241 fs::write(&abs_path, new_contents).unwrap();
6242
6243 // Because the buffer was not modified, it is reloaded from disk. Its
6244 // contents are edited according to the diff between the old and new
6245 // file contents.
6246 buffer
6247 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6248 .await;
6249
6250 buffer.update(cx, |buffer, _| {
6251 assert_eq!(buffer.text(), new_contents);
6252 assert!(!buffer.is_dirty());
6253 assert!(!buffer.has_conflict());
6254
6255 // TODO
6256 // let cursor_positions = buffer
6257 // .selection_set(selection_set_id)
6258 // .unwrap()
6259 // .selections::<Point>(&*buffer)
6260 // .map(|selection| {
6261 // assert_eq!(selection.start, selection.end);
6262 // selection.start
6263 // })
6264 // .collect::<Vec<_>>();
6265 // assert_eq!(
6266 // cursor_positions,
6267 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6268 // );
6269 });
6270
6271 // Modify the buffer
6272 buffer.update(cx, |buffer, cx| {
6273 buffer.edit(vec![0..0], " ", cx);
6274 assert!(buffer.is_dirty());
6275 assert!(!buffer.has_conflict());
6276 });
6277
6278 // Change the file on disk again, adding blank lines to the beginning.
6279 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6280
6281 // Because the buffer is modified, it doesn't reload from disk, but is
6282 // marked as having a conflict.
6283 buffer
6284 .condition(&cx, |buffer, _| buffer.has_conflict())
6285 .await;
6286 }
6287
6288 #[gpui::test]
6289 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6290 cx.foreground().forbid_parking();
6291
6292 let fs = FakeFs::new(cx.background());
6293 fs.insert_tree(
6294 "/the-dir",
6295 json!({
6296 "a.rs": "
6297 fn foo(mut v: Vec<usize>) {
6298 for x in &v {
6299 v.push(1);
6300 }
6301 }
6302 "
6303 .unindent(),
6304 }),
6305 )
6306 .await;
6307
6308 let project = Project::test(fs.clone(), cx);
6309 let (worktree, _) = project
6310 .update(cx, |p, cx| {
6311 p.find_or_create_local_worktree("/the-dir", true, cx)
6312 })
6313 .await
6314 .unwrap();
6315 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6316
6317 let buffer = project
6318 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6319 .await
6320 .unwrap();
6321
6322 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6323 let message = lsp::PublishDiagnosticsParams {
6324 uri: buffer_uri.clone(),
6325 diagnostics: vec![
6326 lsp::Diagnostic {
6327 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6328 severity: Some(DiagnosticSeverity::WARNING),
6329 message: "error 1".to_string(),
6330 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6331 location: lsp::Location {
6332 uri: buffer_uri.clone(),
6333 range: lsp::Range::new(
6334 lsp::Position::new(1, 8),
6335 lsp::Position::new(1, 9),
6336 ),
6337 },
6338 message: "error 1 hint 1".to_string(),
6339 }]),
6340 ..Default::default()
6341 },
6342 lsp::Diagnostic {
6343 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6344 severity: Some(DiagnosticSeverity::HINT),
6345 message: "error 1 hint 1".to_string(),
6346 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6347 location: lsp::Location {
6348 uri: buffer_uri.clone(),
6349 range: lsp::Range::new(
6350 lsp::Position::new(1, 8),
6351 lsp::Position::new(1, 9),
6352 ),
6353 },
6354 message: "original diagnostic".to_string(),
6355 }]),
6356 ..Default::default()
6357 },
6358 lsp::Diagnostic {
6359 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6360 severity: Some(DiagnosticSeverity::ERROR),
6361 message: "error 2".to_string(),
6362 related_information: Some(vec![
6363 lsp::DiagnosticRelatedInformation {
6364 location: lsp::Location {
6365 uri: buffer_uri.clone(),
6366 range: lsp::Range::new(
6367 lsp::Position::new(1, 13),
6368 lsp::Position::new(1, 15),
6369 ),
6370 },
6371 message: "error 2 hint 1".to_string(),
6372 },
6373 lsp::DiagnosticRelatedInformation {
6374 location: lsp::Location {
6375 uri: buffer_uri.clone(),
6376 range: lsp::Range::new(
6377 lsp::Position::new(1, 13),
6378 lsp::Position::new(1, 15),
6379 ),
6380 },
6381 message: "error 2 hint 2".to_string(),
6382 },
6383 ]),
6384 ..Default::default()
6385 },
6386 lsp::Diagnostic {
6387 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6388 severity: Some(DiagnosticSeverity::HINT),
6389 message: "error 2 hint 1".to_string(),
6390 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6391 location: lsp::Location {
6392 uri: buffer_uri.clone(),
6393 range: lsp::Range::new(
6394 lsp::Position::new(2, 8),
6395 lsp::Position::new(2, 17),
6396 ),
6397 },
6398 message: "original diagnostic".to_string(),
6399 }]),
6400 ..Default::default()
6401 },
6402 lsp::Diagnostic {
6403 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6404 severity: Some(DiagnosticSeverity::HINT),
6405 message: "error 2 hint 2".to_string(),
6406 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6407 location: lsp::Location {
6408 uri: buffer_uri.clone(),
6409 range: lsp::Range::new(
6410 lsp::Position::new(2, 8),
6411 lsp::Position::new(2, 17),
6412 ),
6413 },
6414 message: "original diagnostic".to_string(),
6415 }]),
6416 ..Default::default()
6417 },
6418 ],
6419 version: None,
6420 };
6421
6422 project
6423 .update(cx, |p, cx| {
6424 p.update_diagnostics(message, &Default::default(), cx)
6425 })
6426 .unwrap();
6427 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6428
6429 assert_eq!(
6430 buffer
6431 .diagnostics_in_range::<_, Point>(0..buffer.len())
6432 .collect::<Vec<_>>(),
6433 &[
6434 DiagnosticEntry {
6435 range: Point::new(1, 8)..Point::new(1, 9),
6436 diagnostic: Diagnostic {
6437 severity: DiagnosticSeverity::WARNING,
6438 message: "error 1".to_string(),
6439 group_id: 0,
6440 is_primary: true,
6441 ..Default::default()
6442 }
6443 },
6444 DiagnosticEntry {
6445 range: Point::new(1, 8)..Point::new(1, 9),
6446 diagnostic: Diagnostic {
6447 severity: DiagnosticSeverity::HINT,
6448 message: "error 1 hint 1".to_string(),
6449 group_id: 0,
6450 is_primary: false,
6451 ..Default::default()
6452 }
6453 },
6454 DiagnosticEntry {
6455 range: Point::new(1, 13)..Point::new(1, 15),
6456 diagnostic: Diagnostic {
6457 severity: DiagnosticSeverity::HINT,
6458 message: "error 2 hint 1".to_string(),
6459 group_id: 1,
6460 is_primary: false,
6461 ..Default::default()
6462 }
6463 },
6464 DiagnosticEntry {
6465 range: Point::new(1, 13)..Point::new(1, 15),
6466 diagnostic: Diagnostic {
6467 severity: DiagnosticSeverity::HINT,
6468 message: "error 2 hint 2".to_string(),
6469 group_id: 1,
6470 is_primary: false,
6471 ..Default::default()
6472 }
6473 },
6474 DiagnosticEntry {
6475 range: Point::new(2, 8)..Point::new(2, 17),
6476 diagnostic: Diagnostic {
6477 severity: DiagnosticSeverity::ERROR,
6478 message: "error 2".to_string(),
6479 group_id: 1,
6480 is_primary: true,
6481 ..Default::default()
6482 }
6483 }
6484 ]
6485 );
6486
6487 assert_eq!(
6488 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6489 &[
6490 DiagnosticEntry {
6491 range: Point::new(1, 8)..Point::new(1, 9),
6492 diagnostic: Diagnostic {
6493 severity: DiagnosticSeverity::WARNING,
6494 message: "error 1".to_string(),
6495 group_id: 0,
6496 is_primary: true,
6497 ..Default::default()
6498 }
6499 },
6500 DiagnosticEntry {
6501 range: Point::new(1, 8)..Point::new(1, 9),
6502 diagnostic: Diagnostic {
6503 severity: DiagnosticSeverity::HINT,
6504 message: "error 1 hint 1".to_string(),
6505 group_id: 0,
6506 is_primary: false,
6507 ..Default::default()
6508 }
6509 },
6510 ]
6511 );
6512 assert_eq!(
6513 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6514 &[
6515 DiagnosticEntry {
6516 range: Point::new(1, 13)..Point::new(1, 15),
6517 diagnostic: Diagnostic {
6518 severity: DiagnosticSeverity::HINT,
6519 message: "error 2 hint 1".to_string(),
6520 group_id: 1,
6521 is_primary: false,
6522 ..Default::default()
6523 }
6524 },
6525 DiagnosticEntry {
6526 range: Point::new(1, 13)..Point::new(1, 15),
6527 diagnostic: Diagnostic {
6528 severity: DiagnosticSeverity::HINT,
6529 message: "error 2 hint 2".to_string(),
6530 group_id: 1,
6531 is_primary: false,
6532 ..Default::default()
6533 }
6534 },
6535 DiagnosticEntry {
6536 range: Point::new(2, 8)..Point::new(2, 17),
6537 diagnostic: Diagnostic {
6538 severity: DiagnosticSeverity::ERROR,
6539 message: "error 2".to_string(),
6540 group_id: 1,
6541 is_primary: true,
6542 ..Default::default()
6543 }
6544 }
6545 ]
6546 );
6547 }
6548
6549 #[gpui::test]
6550 async fn test_rename(cx: &mut gpui::TestAppContext) {
6551 cx.foreground().forbid_parking();
6552
6553 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6554 let language = Arc::new(Language::new(
6555 LanguageConfig {
6556 name: "Rust".into(),
6557 path_suffixes: vec!["rs".to_string()],
6558 language_server: Some(language_server_config),
6559 ..Default::default()
6560 },
6561 Some(tree_sitter_rust::language()),
6562 ));
6563
6564 let fs = FakeFs::new(cx.background());
6565 fs.insert_tree(
6566 "/dir",
6567 json!({
6568 "one.rs": "const ONE: usize = 1;",
6569 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6570 }),
6571 )
6572 .await;
6573
6574 let project = Project::test(fs.clone(), cx);
6575 project.update(cx, |project, _| {
6576 Arc::get_mut(&mut project.languages).unwrap().add(language);
6577 });
6578
6579 let (tree, _) = project
6580 .update(cx, |project, cx| {
6581 project.find_or_create_local_worktree("/dir", true, cx)
6582 })
6583 .await
6584 .unwrap();
6585 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6586 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6587 .await;
6588
6589 let buffer = project
6590 .update(cx, |project, cx| {
6591 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6592 })
6593 .await
6594 .unwrap();
6595
6596 let mut fake_server = fake_servers.next().await.unwrap();
6597
6598 let response = project.update(cx, |project, cx| {
6599 project.prepare_rename(buffer.clone(), 7, cx)
6600 });
6601 fake_server
6602 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6603 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6604 assert_eq!(params.position, lsp::Position::new(0, 7));
6605 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6606 lsp::Position::new(0, 6),
6607 lsp::Position::new(0, 9),
6608 )))
6609 })
6610 .next()
6611 .await
6612 .unwrap();
6613 let range = response.await.unwrap().unwrap();
6614 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6615 assert_eq!(range, 6..9);
6616
6617 let response = project.update(cx, |project, cx| {
6618 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6619 });
6620 fake_server
6621 .handle_request::<lsp::request::Rename, _>(|params, _| {
6622 assert_eq!(
6623 params.text_document_position.text_document.uri.as_str(),
6624 "file:///dir/one.rs"
6625 );
6626 assert_eq!(
6627 params.text_document_position.position,
6628 lsp::Position::new(0, 7)
6629 );
6630 assert_eq!(params.new_name, "THREE");
6631 Some(lsp::WorkspaceEdit {
6632 changes: Some(
6633 [
6634 (
6635 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6636 vec![lsp::TextEdit::new(
6637 lsp::Range::new(
6638 lsp::Position::new(0, 6),
6639 lsp::Position::new(0, 9),
6640 ),
6641 "THREE".to_string(),
6642 )],
6643 ),
6644 (
6645 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6646 vec![
6647 lsp::TextEdit::new(
6648 lsp::Range::new(
6649 lsp::Position::new(0, 24),
6650 lsp::Position::new(0, 27),
6651 ),
6652 "THREE".to_string(),
6653 ),
6654 lsp::TextEdit::new(
6655 lsp::Range::new(
6656 lsp::Position::new(0, 35),
6657 lsp::Position::new(0, 38),
6658 ),
6659 "THREE".to_string(),
6660 ),
6661 ],
6662 ),
6663 ]
6664 .into_iter()
6665 .collect(),
6666 ),
6667 ..Default::default()
6668 })
6669 })
6670 .next()
6671 .await
6672 .unwrap();
6673 let mut transaction = response.await.unwrap().0;
6674 assert_eq!(transaction.len(), 2);
6675 assert_eq!(
6676 transaction
6677 .remove_entry(&buffer)
6678 .unwrap()
6679 .0
6680 .read_with(cx, |buffer, _| buffer.text()),
6681 "const THREE: usize = 1;"
6682 );
6683 assert_eq!(
6684 transaction
6685 .into_keys()
6686 .next()
6687 .unwrap()
6688 .read_with(cx, |buffer, _| buffer.text()),
6689 "const TWO: usize = one::THREE + one::THREE;"
6690 );
6691 }
6692
6693 #[gpui::test]
6694 async fn test_search(cx: &mut gpui::TestAppContext) {
6695 let fs = FakeFs::new(cx.background());
6696 fs.insert_tree(
6697 "/dir",
6698 json!({
6699 "one.rs": "const ONE: usize = 1;",
6700 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6701 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6702 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6703 }),
6704 )
6705 .await;
6706 let project = Project::test(fs.clone(), cx);
6707 let (tree, _) = project
6708 .update(cx, |project, cx| {
6709 project.find_or_create_local_worktree("/dir", true, cx)
6710 })
6711 .await
6712 .unwrap();
6713 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6714 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6715 .await;
6716
6717 assert_eq!(
6718 search(&project, SearchQuery::text("TWO", false, true), cx)
6719 .await
6720 .unwrap(),
6721 HashMap::from_iter([
6722 ("two.rs".to_string(), vec![6..9]),
6723 ("three.rs".to_string(), vec![37..40])
6724 ])
6725 );
6726
6727 let buffer_4 = project
6728 .update(cx, |project, cx| {
6729 project.open_buffer((worktree_id, "four.rs"), cx)
6730 })
6731 .await
6732 .unwrap();
6733 buffer_4.update(cx, |buffer, cx| {
6734 buffer.edit([20..28, 31..43], "two::TWO", cx);
6735 });
6736
6737 assert_eq!(
6738 search(&project, SearchQuery::text("TWO", false, true), cx)
6739 .await
6740 .unwrap(),
6741 HashMap::from_iter([
6742 ("two.rs".to_string(), vec![6..9]),
6743 ("three.rs".to_string(), vec![37..40]),
6744 ("four.rs".to_string(), vec![25..28, 36..39])
6745 ])
6746 );
6747
6748 async fn search(
6749 project: &ModelHandle<Project>,
6750 query: SearchQuery,
6751 cx: &mut gpui::TestAppContext,
6752 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6753 let results = project
6754 .update(cx, |project, cx| project.search(query, cx))
6755 .await?;
6756
6757 Ok(results
6758 .into_iter()
6759 .map(|(buffer, ranges)| {
6760 buffer.read_with(cx, |buffer, _| {
6761 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6762 let ranges = ranges
6763 .into_iter()
6764 .map(|range| range.to_offset(buffer))
6765 .collect::<Vec<_>>();
6766 (path, ranges)
6767 })
6768 })
6769 .collect())
6770 }
6771 }
6772}