1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
61 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
62 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
63 language_server_settings: Arc<Mutex<serde_json::Value>>,
64 next_language_server_id: usize,
65 client: Arc<client::Client>,
66 next_entry_id: Arc<AtomicUsize>,
67 user_store: ModelHandle<UserStore>,
68 fs: Arc<dyn Fs>,
69 client_state: ProjectClientState,
70 collaborators: HashMap<PeerId, Collaborator>,
71 subscriptions: Vec<client::Subscription>,
72 language_servers_with_diagnostics_running: isize,
73 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
74 shared_buffers: HashMap<PeerId, HashSet<u64>>,
75 loading_buffers: HashMap<
76 ProjectPath,
77 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
78 >,
79 loading_local_worktrees:
80 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
81 opened_buffers: HashMap<u64, OpenBuffer>,
82 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
83 nonce: u128,
84}
85
86enum OpenBuffer {
87 Strong(ModelHandle<Buffer>),
88 Weak(WeakModelHandle<Buffer>),
89 Loading(Vec<Operation>),
90}
91
92enum WorktreeHandle {
93 Strong(ModelHandle<Worktree>),
94 Weak(WeakModelHandle<Worktree>),
95}
96
97enum ProjectClientState {
98 Local {
99 is_shared: bool,
100 remote_id_tx: watch::Sender<Option<u64>>,
101 remote_id_rx: watch::Receiver<Option<u64>>,
102 _maintain_remote_id_task: Task<Option<()>>,
103 },
104 Remote {
105 sharing_has_stopped: bool,
106 remote_id: u64,
107 replica_id: ReplicaId,
108 _detect_unshare_task: Task<Option<()>>,
109 },
110}
111
112#[derive(Clone, Debug)]
113pub struct Collaborator {
114 pub user: Arc<User>,
115 pub peer_id: PeerId,
116 pub replica_id: ReplicaId,
117}
118
119#[derive(Clone, Debug, PartialEq)]
120pub enum Event {
121 ActiveEntryChanged(Option<ProjectEntryId>),
122 WorktreeRemoved(WorktreeId),
123 DiskBasedDiagnosticsStarted,
124 DiskBasedDiagnosticsUpdated,
125 DiskBasedDiagnosticsFinished,
126 DiagnosticsUpdated(ProjectPath),
127}
128
129enum LanguageServerEvent {
130 WorkStart {
131 token: String,
132 },
133 WorkProgress {
134 token: String,
135 progress: LanguageServerProgress,
136 },
137 WorkEnd {
138 token: String,
139 },
140 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
141}
142
143pub struct LanguageServerStatus {
144 pub name: String,
145 pub pending_work: BTreeMap<String, LanguageServerProgress>,
146 pending_diagnostic_updates: isize,
147}
148
149#[derive(Clone, Debug)]
150pub struct LanguageServerProgress {
151 pub message: Option<String>,
152 pub percentage: Option<usize>,
153 pub last_update_at: Instant,
154}
155
156#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
157pub struct ProjectPath {
158 pub worktree_id: WorktreeId,
159 pub path: Arc<Path>,
160}
161
162#[derive(Clone, Debug, Default, PartialEq)]
163pub struct DiagnosticSummary {
164 pub error_count: usize,
165 pub warning_count: usize,
166 pub info_count: usize,
167 pub hint_count: usize,
168}
169
170#[derive(Debug)]
171pub struct Location {
172 pub buffer: ModelHandle<Buffer>,
173 pub range: Range<language::Anchor>,
174}
175
176#[derive(Debug)]
177pub struct DocumentHighlight {
178 pub range: Range<language::Anchor>,
179 pub kind: DocumentHighlightKind,
180}
181
182#[derive(Clone, Debug)]
183pub struct Symbol {
184 pub source_worktree_id: WorktreeId,
185 pub worktree_id: WorktreeId,
186 pub language_name: String,
187 pub path: PathBuf,
188 pub label: CodeLabel,
189 pub name: String,
190 pub kind: lsp::SymbolKind,
191 pub range: Range<PointUtf16>,
192 pub signature: [u8; 32],
193}
194
195#[derive(Default)]
196pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
197
198impl DiagnosticSummary {
199 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
200 let mut this = Self {
201 error_count: 0,
202 warning_count: 0,
203 info_count: 0,
204 hint_count: 0,
205 };
206
207 for entry in diagnostics {
208 if entry.diagnostic.is_primary {
209 match entry.diagnostic.severity {
210 DiagnosticSeverity::ERROR => this.error_count += 1,
211 DiagnosticSeverity::WARNING => this.warning_count += 1,
212 DiagnosticSeverity::INFORMATION => this.info_count += 1,
213 DiagnosticSeverity::HINT => this.hint_count += 1,
214 _ => {}
215 }
216 }
217 }
218
219 this
220 }
221
222 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
223 proto::DiagnosticSummary {
224 path: path.to_string_lossy().to_string(),
225 error_count: self.error_count as u32,
226 warning_count: self.warning_count as u32,
227 info_count: self.info_count as u32,
228 hint_count: self.hint_count as u32,
229 }
230 }
231}
232
233#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
234pub struct ProjectEntryId(usize);
235
236impl ProjectEntryId {
237 pub fn new(counter: &AtomicUsize) -> Self {
238 Self(counter.fetch_add(1, SeqCst))
239 }
240
241 pub fn from_proto(id: u64) -> Self {
242 Self(id as usize)
243 }
244
245 pub fn to_proto(&self) -> u64 {
246 self.0 as u64
247 }
248
249 pub fn to_usize(&self) -> usize {
250 self.0
251 }
252}
253
254impl Project {
255 pub fn init(client: &Arc<Client>) {
256 client.add_entity_message_handler(Self::handle_add_collaborator);
257 client.add_entity_message_handler(Self::handle_buffer_reloaded);
258 client.add_entity_message_handler(Self::handle_buffer_saved);
259 client.add_entity_message_handler(Self::handle_start_language_server);
260 client.add_entity_message_handler(Self::handle_update_language_server);
261 client.add_entity_message_handler(Self::handle_remove_collaborator);
262 client.add_entity_message_handler(Self::handle_register_worktree);
263 client.add_entity_message_handler(Self::handle_unregister_worktree);
264 client.add_entity_message_handler(Self::handle_unshare_project);
265 client.add_entity_message_handler(Self::handle_update_buffer_file);
266 client.add_entity_message_handler(Self::handle_update_buffer);
267 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
268 client.add_entity_message_handler(Self::handle_update_worktree);
269 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
270 client.add_entity_request_handler(Self::handle_apply_code_action);
271 client.add_entity_request_handler(Self::handle_format_buffers);
272 client.add_entity_request_handler(Self::handle_get_code_actions);
273 client.add_entity_request_handler(Self::handle_get_completions);
274 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
275 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
276 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
277 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
278 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
279 client.add_entity_request_handler(Self::handle_search_project);
280 client.add_entity_request_handler(Self::handle_get_project_symbols);
281 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
282 client.add_entity_request_handler(Self::handle_open_buffer_by_path);
283 client.add_entity_request_handler(Self::handle_save_buffer);
284 }
285
286 pub fn local(
287 client: Arc<Client>,
288 user_store: ModelHandle<UserStore>,
289 languages: Arc<LanguageRegistry>,
290 fs: Arc<dyn Fs>,
291 cx: &mut MutableAppContext,
292 ) -> ModelHandle<Self> {
293 cx.add_model(|cx: &mut ModelContext<Self>| {
294 let (remote_id_tx, remote_id_rx) = watch::channel();
295 let _maintain_remote_id_task = cx.spawn_weak({
296 let rpc = client.clone();
297 move |this, mut cx| {
298 async move {
299 let mut status = rpc.status();
300 while let Some(status) = status.next().await {
301 if let Some(this) = this.upgrade(&cx) {
302 let remote_id = if status.is_connected() {
303 let response = rpc.request(proto::RegisterProject {}).await?;
304 Some(response.project_id)
305 } else {
306 None
307 };
308
309 if let Some(project_id) = remote_id {
310 let mut registrations = Vec::new();
311 this.update(&mut cx, |this, cx| {
312 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
313 registrations.push(worktree.update(
314 cx,
315 |worktree, cx| {
316 let worktree = worktree.as_local_mut().unwrap();
317 worktree.register(project_id, cx)
318 },
319 ));
320 }
321 });
322 for registration in registrations {
323 registration.await?;
324 }
325 }
326 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
327 }
328 }
329 Ok(())
330 }
331 .log_err()
332 }
333 });
334
335 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
336 Self {
337 worktrees: Default::default(),
338 collaborators: Default::default(),
339 opened_buffers: Default::default(),
340 shared_buffers: Default::default(),
341 loading_buffers: Default::default(),
342 loading_local_worktrees: Default::default(),
343 buffer_snapshots: Default::default(),
344 client_state: ProjectClientState::Local {
345 is_shared: false,
346 remote_id_tx,
347 remote_id_rx,
348 _maintain_remote_id_task,
349 },
350 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
351 subscriptions: Vec::new(),
352 active_entry: None,
353 languages,
354 client,
355 user_store,
356 fs,
357 next_entry_id: Default::default(),
358 language_servers_with_diagnostics_running: 0,
359 language_servers: Default::default(),
360 started_language_servers: Default::default(),
361 language_server_statuses: Default::default(),
362 language_server_settings: Default::default(),
363 next_language_server_id: 0,
364 nonce: StdRng::from_entropy().gen(),
365 }
366 })
367 }
368
369 pub async fn remote(
370 remote_id: u64,
371 client: Arc<Client>,
372 user_store: ModelHandle<UserStore>,
373 languages: Arc<LanguageRegistry>,
374 fs: Arc<dyn Fs>,
375 cx: &mut AsyncAppContext,
376 ) -> Result<ModelHandle<Self>> {
377 client.authenticate_and_connect(&cx).await?;
378
379 let response = client
380 .request(proto::JoinProject {
381 project_id: remote_id,
382 })
383 .await?;
384
385 let replica_id = response.replica_id as ReplicaId;
386
387 let mut worktrees = Vec::new();
388 for worktree in response.worktrees {
389 let (worktree, load_task) = cx
390 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
391 worktrees.push(worktree);
392 load_task.detach();
393 }
394
395 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
396 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
397 let mut this = Self {
398 worktrees: Vec::new(),
399 loading_buffers: Default::default(),
400 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
401 shared_buffers: Default::default(),
402 loading_local_worktrees: Default::default(),
403 active_entry: None,
404 collaborators: Default::default(),
405 languages,
406 user_store: user_store.clone(),
407 fs,
408 next_entry_id: Default::default(),
409 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
410 client: client.clone(),
411 client_state: ProjectClientState::Remote {
412 sharing_has_stopped: false,
413 remote_id,
414 replica_id,
415 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
416 async move {
417 let mut status = client.status();
418 let is_connected =
419 status.next().await.map_or(false, |s| s.is_connected());
420 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
421 if !is_connected || status.next().await.is_some() {
422 if let Some(this) = this.upgrade(&cx) {
423 this.update(&mut cx, |this, cx| this.project_unshared(cx))
424 }
425 }
426 Ok(())
427 }
428 .log_err()
429 }),
430 },
431 language_servers_with_diagnostics_running: 0,
432 language_servers: Default::default(),
433 started_language_servers: Default::default(),
434 language_server_settings: Default::default(),
435 language_server_statuses: response
436 .language_servers
437 .into_iter()
438 .map(|server| {
439 (
440 server.id as usize,
441 LanguageServerStatus {
442 name: server.name,
443 pending_work: Default::default(),
444 pending_diagnostic_updates: 0,
445 },
446 )
447 })
448 .collect(),
449 next_language_server_id: 0,
450 opened_buffers: Default::default(),
451 buffer_snapshots: Default::default(),
452 nonce: StdRng::from_entropy().gen(),
453 };
454 for worktree in worktrees {
455 this.add_worktree(&worktree, cx);
456 }
457 this
458 });
459
460 let user_ids = response
461 .collaborators
462 .iter()
463 .map(|peer| peer.user_id)
464 .collect();
465 user_store
466 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
467 .await?;
468 let mut collaborators = HashMap::default();
469 for message in response.collaborators {
470 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
471 collaborators.insert(collaborator.peer_id, collaborator);
472 }
473
474 this.update(cx, |this, _| {
475 this.collaborators = collaborators;
476 });
477
478 Ok(this)
479 }
480
481 #[cfg(any(test, feature = "test-support"))]
482 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
483 let languages = Arc::new(LanguageRegistry::test());
484 let http_client = client::test::FakeHttpClient::with_404_response();
485 let client = client::Client::new(http_client.clone());
486 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
487 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
488 }
489
490 #[cfg(any(test, feature = "test-support"))]
491 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
492 self.opened_buffers
493 .get(&remote_id)
494 .and_then(|buffer| buffer.upgrade(cx))
495 }
496
497 #[cfg(any(test, feature = "test-support"))]
498 pub fn languages(&self) -> &Arc<LanguageRegistry> {
499 &self.languages
500 }
501
502 #[cfg(any(test, feature = "test-support"))]
503 pub fn check_invariants(&self, cx: &AppContext) {
504 if self.is_local() {
505 let mut worktree_root_paths = HashMap::default();
506 for worktree in self.worktrees(cx) {
507 let worktree = worktree.read(cx);
508 let abs_path = worktree.as_local().unwrap().abs_path().clone();
509 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
510 assert_eq!(
511 prev_worktree_id,
512 None,
513 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
514 abs_path,
515 worktree.id(),
516 prev_worktree_id
517 )
518 }
519 } else {
520 let replica_id = self.replica_id();
521 for buffer in self.opened_buffers.values() {
522 if let Some(buffer) = buffer.upgrade(cx) {
523 let buffer = buffer.read(cx);
524 assert_eq!(
525 buffer.deferred_ops_len(),
526 0,
527 "replica {}, buffer {} has deferred operations",
528 replica_id,
529 buffer.remote_id()
530 );
531 }
532 }
533 }
534 }
535
536 #[cfg(any(test, feature = "test-support"))]
537 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
538 let path = path.into();
539 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
540 self.opened_buffers.iter().any(|(_, buffer)| {
541 if let Some(buffer) = buffer.upgrade(cx) {
542 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
543 if file.worktree == worktree && file.path() == &path.path {
544 return true;
545 }
546 }
547 }
548 false
549 })
550 } else {
551 false
552 }
553 }
554
555 pub fn fs(&self) -> &Arc<dyn Fs> {
556 &self.fs
557 }
558
559 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
560 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
561 *remote_id_tx.borrow_mut() = remote_id;
562 }
563
564 self.subscriptions.clear();
565 if let Some(remote_id) = remote_id {
566 self.subscriptions
567 .push(self.client.add_model_for_remote_entity(remote_id, cx));
568 }
569 }
570
571 pub fn remote_id(&self) -> Option<u64> {
572 match &self.client_state {
573 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
574 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
575 }
576 }
577
578 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
579 let mut id = None;
580 let mut watch = None;
581 match &self.client_state {
582 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
583 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
584 }
585
586 async move {
587 if let Some(id) = id {
588 return id;
589 }
590 let mut watch = watch.unwrap();
591 loop {
592 let id = *watch.borrow();
593 if let Some(id) = id {
594 return id;
595 }
596 watch.next().await;
597 }
598 }
599 }
600
601 pub fn replica_id(&self) -> ReplicaId {
602 match &self.client_state {
603 ProjectClientState::Local { .. } => 0,
604 ProjectClientState::Remote { replica_id, .. } => *replica_id,
605 }
606 }
607
608 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
609 &self.collaborators
610 }
611
612 pub fn worktrees<'a>(
613 &'a self,
614 cx: &'a AppContext,
615 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
616 self.worktrees
617 .iter()
618 .filter_map(move |worktree| worktree.upgrade(cx))
619 }
620
621 pub fn visible_worktrees<'a>(
622 &'a self,
623 cx: &'a AppContext,
624 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
625 self.worktrees.iter().filter_map(|worktree| {
626 worktree.upgrade(cx).and_then(|worktree| {
627 if worktree.read(cx).is_visible() {
628 Some(worktree)
629 } else {
630 None
631 }
632 })
633 })
634 }
635
636 pub fn worktree_for_id(
637 &self,
638 id: WorktreeId,
639 cx: &AppContext,
640 ) -> Option<ModelHandle<Worktree>> {
641 self.worktrees(cx)
642 .find(|worktree| worktree.read(cx).id() == id)
643 }
644
645 pub fn worktree_for_entry(
646 &self,
647 entry_id: ProjectEntryId,
648 cx: &AppContext,
649 ) -> Option<ModelHandle<Worktree>> {
650 self.worktrees(cx)
651 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
652 }
653
654 pub fn worktree_id_for_entry(
655 &self,
656 entry_id: ProjectEntryId,
657 cx: &AppContext,
658 ) -> Option<WorktreeId> {
659 self.worktree_for_entry(entry_id, cx)
660 .map(|worktree| worktree.read(cx).id())
661 }
662
663 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
664 let rpc = self.client.clone();
665 cx.spawn(|this, mut cx| async move {
666 let project_id = this.update(&mut cx, |this, cx| {
667 if let ProjectClientState::Local {
668 is_shared,
669 remote_id_rx,
670 ..
671 } = &mut this.client_state
672 {
673 *is_shared = true;
674
675 for open_buffer in this.opened_buffers.values_mut() {
676 match open_buffer {
677 OpenBuffer::Strong(_) => {}
678 OpenBuffer::Weak(buffer) => {
679 if let Some(buffer) = buffer.upgrade(cx) {
680 *open_buffer = OpenBuffer::Strong(buffer);
681 }
682 }
683 OpenBuffer::Loading(_) => unreachable!(),
684 }
685 }
686
687 for worktree_handle in this.worktrees.iter_mut() {
688 match worktree_handle {
689 WorktreeHandle::Strong(_) => {}
690 WorktreeHandle::Weak(worktree) => {
691 if let Some(worktree) = worktree.upgrade(cx) {
692 *worktree_handle = WorktreeHandle::Strong(worktree);
693 }
694 }
695 }
696 }
697
698 remote_id_rx
699 .borrow()
700 .ok_or_else(|| anyhow!("no project id"))
701 } else {
702 Err(anyhow!("can't share a remote project"))
703 }
704 })?;
705
706 rpc.request(proto::ShareProject { project_id }).await?;
707
708 let mut tasks = Vec::new();
709 this.update(&mut cx, |this, cx| {
710 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
711 worktree.update(cx, |worktree, cx| {
712 let worktree = worktree.as_local_mut().unwrap();
713 tasks.push(worktree.share(project_id, cx));
714 });
715 }
716 });
717 for task in tasks {
718 task.await?;
719 }
720 this.update(&mut cx, |_, cx| cx.notify());
721 Ok(())
722 })
723 }
724
725 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
726 let rpc = self.client.clone();
727 cx.spawn(|this, mut cx| async move {
728 let project_id = this.update(&mut cx, |this, cx| {
729 if let ProjectClientState::Local {
730 is_shared,
731 remote_id_rx,
732 ..
733 } = &mut this.client_state
734 {
735 *is_shared = false;
736
737 for open_buffer in this.opened_buffers.values_mut() {
738 match open_buffer {
739 OpenBuffer::Strong(buffer) => {
740 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
741 }
742 _ => {}
743 }
744 }
745
746 for worktree_handle in this.worktrees.iter_mut() {
747 match worktree_handle {
748 WorktreeHandle::Strong(worktree) => {
749 if !worktree.read(cx).is_visible() {
750 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
751 }
752 }
753 _ => {}
754 }
755 }
756
757 remote_id_rx
758 .borrow()
759 .ok_or_else(|| anyhow!("no project id"))
760 } else {
761 Err(anyhow!("can't share a remote project"))
762 }
763 })?;
764
765 rpc.send(proto::UnshareProject { project_id })?;
766 this.update(&mut cx, |this, cx| {
767 this.collaborators.clear();
768 this.shared_buffers.clear();
769 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
770 worktree.update(cx, |worktree, _| {
771 worktree.as_local_mut().unwrap().unshare();
772 });
773 }
774 cx.notify()
775 });
776 Ok(())
777 })
778 }
779
780 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
781 if let ProjectClientState::Remote {
782 sharing_has_stopped,
783 ..
784 } = &mut self.client_state
785 {
786 *sharing_has_stopped = true;
787 self.collaborators.clear();
788 cx.notify();
789 }
790 }
791
792 pub fn is_read_only(&self) -> bool {
793 match &self.client_state {
794 ProjectClientState::Local { .. } => false,
795 ProjectClientState::Remote {
796 sharing_has_stopped,
797 ..
798 } => *sharing_has_stopped,
799 }
800 }
801
802 pub fn is_local(&self) -> bool {
803 match &self.client_state {
804 ProjectClientState::Local { .. } => true,
805 ProjectClientState::Remote { .. } => false,
806 }
807 }
808
809 pub fn is_remote(&self) -> bool {
810 !self.is_local()
811 }
812
813 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
814 if self.is_remote() {
815 return Err(anyhow!("creating buffers as a guest is not supported yet"));
816 }
817
818 let buffer = cx.add_model(|cx| {
819 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
820 });
821 self.register_buffer(&buffer, cx)?;
822 Ok(buffer)
823 }
824
825 pub fn open_path(
826 &mut self,
827 path: impl Into<ProjectPath>,
828 cx: &mut ModelContext<Self>,
829 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
830 let task = self.open_buffer(path, cx);
831 cx.spawn_weak(|_, cx| async move {
832 let buffer = task.await?;
833 let project_entry_id = buffer
834 .read_with(&cx, |buffer, cx| {
835 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
836 })
837 .ok_or_else(|| anyhow!("no project entry"))?;
838 Ok((project_entry_id, buffer.into()))
839 })
840 }
841
842 pub fn open_buffer(
843 &mut self,
844 path: impl Into<ProjectPath>,
845 cx: &mut ModelContext<Self>,
846 ) -> Task<Result<ModelHandle<Buffer>>> {
847 let project_path = path.into();
848 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
849 worktree
850 } else {
851 return Task::ready(Err(anyhow!("no such worktree")));
852 };
853
854 // If there is already a buffer for the given path, then return it.
855 let existing_buffer = self.get_open_buffer(&project_path, cx);
856 if let Some(existing_buffer) = existing_buffer {
857 return Task::ready(Ok(existing_buffer));
858 }
859
860 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
861 // If the given path is already being loaded, then wait for that existing
862 // task to complete and return the same buffer.
863 hash_map::Entry::Occupied(e) => e.get().clone(),
864
865 // Otherwise, record the fact that this path is now being loaded.
866 hash_map::Entry::Vacant(entry) => {
867 let (mut tx, rx) = postage::watch::channel();
868 entry.insert(rx.clone());
869
870 let load_buffer = if worktree.read(cx).is_local() {
871 self.open_local_buffer(&project_path.path, &worktree, cx)
872 } else {
873 self.open_remote_buffer(&project_path.path, &worktree, cx)
874 };
875
876 cx.spawn(move |this, mut cx| async move {
877 let load_result = load_buffer.await;
878 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
879 // Record the fact that the buffer is no longer loading.
880 this.loading_buffers.remove(&project_path);
881 let buffer = load_result.map_err(Arc::new)?;
882 Ok(buffer)
883 }));
884 })
885 .detach();
886 rx
887 }
888 };
889
890 cx.foreground().spawn(async move {
891 loop {
892 if let Some(result) = loading_watch.borrow().as_ref() {
893 match result {
894 Ok(buffer) => return Ok(buffer.clone()),
895 Err(error) => return Err(anyhow!("{}", error)),
896 }
897 }
898 loading_watch.next().await;
899 }
900 })
901 }
902
903 fn open_local_buffer(
904 &mut self,
905 path: &Arc<Path>,
906 worktree: &ModelHandle<Worktree>,
907 cx: &mut ModelContext<Self>,
908 ) -> Task<Result<ModelHandle<Buffer>>> {
909 let load_buffer = worktree.update(cx, |worktree, cx| {
910 let worktree = worktree.as_local_mut().unwrap();
911 worktree.load_buffer(path, cx)
912 });
913 cx.spawn(|this, mut cx| async move {
914 let buffer = load_buffer.await?;
915 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
916 Ok(buffer)
917 })
918 }
919
920 fn open_remote_buffer(
921 &mut self,
922 path: &Arc<Path>,
923 worktree: &ModelHandle<Worktree>,
924 cx: &mut ModelContext<Self>,
925 ) -> Task<Result<ModelHandle<Buffer>>> {
926 let rpc = self.client.clone();
927 let project_id = self.remote_id().unwrap();
928 let remote_worktree_id = worktree.read(cx).id();
929 let path = path.clone();
930 let path_string = path.to_string_lossy().to_string();
931 cx.spawn(|this, mut cx| async move {
932 let response = rpc
933 .request(proto::OpenBufferByPath {
934 project_id,
935 worktree_id: remote_worktree_id.to_proto(),
936 path: path_string,
937 })
938 .await?;
939 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
940 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
941 .await
942 })
943 }
944
945 fn open_local_buffer_via_lsp(
946 &mut self,
947 abs_path: lsp::Url,
948 lang_name: Arc<str>,
949 lang_server: Arc<LanguageServer>,
950 cx: &mut ModelContext<Self>,
951 ) -> Task<Result<ModelHandle<Buffer>>> {
952 cx.spawn(|this, mut cx| async move {
953 let abs_path = abs_path
954 .to_file_path()
955 .map_err(|_| anyhow!("can't convert URI to path"))?;
956 let (worktree, relative_path) = if let Some(result) =
957 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
958 {
959 result
960 } else {
961 let worktree = this
962 .update(&mut cx, |this, cx| {
963 this.create_local_worktree(&abs_path, false, cx)
964 })
965 .await?;
966 this.update(&mut cx, |this, cx| {
967 this.language_servers
968 .insert((worktree.read(cx).id(), lang_name), lang_server);
969 });
970 (worktree, PathBuf::new())
971 };
972
973 let project_path = ProjectPath {
974 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
975 path: relative_path.into(),
976 };
977 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
978 .await
979 })
980 }
981
982 pub fn save_buffer_as(
983 &mut self,
984 buffer: ModelHandle<Buffer>,
985 abs_path: PathBuf,
986 cx: &mut ModelContext<Project>,
987 ) -> Task<Result<()>> {
988 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
989 cx.spawn(|this, mut cx| async move {
990 let (worktree, path) = worktree_task.await?;
991 worktree
992 .update(&mut cx, |worktree, cx| {
993 worktree
994 .as_local_mut()
995 .unwrap()
996 .save_buffer_as(buffer.clone(), path, cx)
997 })
998 .await?;
999 this.update(&mut cx, |this, cx| {
1000 this.assign_language_to_buffer(&buffer, cx);
1001 this.register_buffer_with_language_server(&buffer, cx);
1002 });
1003 Ok(())
1004 })
1005 }
1006
1007 pub fn get_open_buffer(
1008 &mut self,
1009 path: &ProjectPath,
1010 cx: &mut ModelContext<Self>,
1011 ) -> Option<ModelHandle<Buffer>> {
1012 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1013 self.opened_buffers.values().find_map(|buffer| {
1014 let buffer = buffer.upgrade(cx)?;
1015 let file = File::from_dyn(buffer.read(cx).file())?;
1016 if file.worktree == worktree && file.path() == &path.path {
1017 Some(buffer)
1018 } else {
1019 None
1020 }
1021 })
1022 }
1023
1024 fn register_buffer(
1025 &mut self,
1026 buffer: &ModelHandle<Buffer>,
1027 cx: &mut ModelContext<Self>,
1028 ) -> Result<()> {
1029 let remote_id = buffer.read(cx).remote_id();
1030 let open_buffer = if self.is_remote() || self.is_shared() {
1031 OpenBuffer::Strong(buffer.clone())
1032 } else {
1033 OpenBuffer::Weak(buffer.downgrade())
1034 };
1035
1036 match self.opened_buffers.insert(remote_id, open_buffer) {
1037 None => {}
1038 Some(OpenBuffer::Loading(operations)) => {
1039 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1040 }
1041 Some(OpenBuffer::Weak(existing_handle)) => {
1042 if existing_handle.upgrade(cx).is_some() {
1043 Err(anyhow!(
1044 "already registered buffer with remote id {}",
1045 remote_id
1046 ))?
1047 }
1048 }
1049 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1050 "already registered buffer with remote id {}",
1051 remote_id
1052 ))?,
1053 }
1054 cx.subscribe(buffer, |this, buffer, event, cx| {
1055 this.on_buffer_event(buffer, event, cx);
1056 })
1057 .detach();
1058
1059 self.assign_language_to_buffer(buffer, cx);
1060 self.register_buffer_with_language_server(buffer, cx);
1061
1062 Ok(())
1063 }
1064
1065 fn register_buffer_with_language_server(
1066 &mut self,
1067 buffer_handle: &ModelHandle<Buffer>,
1068 cx: &mut ModelContext<Self>,
1069 ) {
1070 let buffer = buffer_handle.read(cx);
1071 let buffer_id = buffer.remote_id();
1072 if let Some(file) = File::from_dyn(buffer.file()) {
1073 if file.is_local() {
1074 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1075 let initial_snapshot = buffer.text_snapshot();
1076 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1077
1078 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1079 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1080 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1081 .log_err();
1082 }
1083 }
1084
1085 if let Some(server) = language_server {
1086 server
1087 .notify::<lsp::notification::DidOpenTextDocument>(
1088 lsp::DidOpenTextDocumentParams {
1089 text_document: lsp::TextDocumentItem::new(
1090 uri,
1091 Default::default(),
1092 0,
1093 initial_snapshot.text(),
1094 ),
1095 }
1096 .clone(),
1097 )
1098 .log_err();
1099 buffer_handle.update(cx, |buffer, cx| {
1100 buffer.set_completion_triggers(
1101 server
1102 .capabilities()
1103 .completion_provider
1104 .as_ref()
1105 .and_then(|provider| provider.trigger_characters.clone())
1106 .unwrap_or(Vec::new()),
1107 cx,
1108 )
1109 });
1110 self.buffer_snapshots
1111 .insert(buffer_id, vec![(0, initial_snapshot)]);
1112 }
1113
1114 cx.observe_release(buffer_handle, |this, buffer, cx| {
1115 if let Some(file) = File::from_dyn(buffer.file()) {
1116 if file.is_local() {
1117 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1118 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1119 server
1120 .notify::<lsp::notification::DidCloseTextDocument>(
1121 lsp::DidCloseTextDocumentParams {
1122 text_document: lsp::TextDocumentIdentifier::new(
1123 uri.clone(),
1124 ),
1125 },
1126 )
1127 .log_err();
1128 }
1129 }
1130 }
1131 })
1132 .detach();
1133 }
1134 }
1135 }
1136
1137 fn on_buffer_event(
1138 &mut self,
1139 buffer: ModelHandle<Buffer>,
1140 event: &BufferEvent,
1141 cx: &mut ModelContext<Self>,
1142 ) -> Option<()> {
1143 match event {
1144 BufferEvent::Operation(operation) => {
1145 let project_id = self.remote_id()?;
1146 let request = self.client.request(proto::UpdateBuffer {
1147 project_id,
1148 buffer_id: buffer.read(cx).remote_id(),
1149 operations: vec![language::proto::serialize_operation(&operation)],
1150 });
1151 cx.background().spawn(request).detach_and_log_err(cx);
1152 }
1153 BufferEvent::Edited => {
1154 let language_server = self
1155 .language_server_for_buffer(buffer.read(cx), cx)?
1156 .clone();
1157 let buffer = buffer.read(cx);
1158 let file = File::from_dyn(buffer.file())?;
1159 let abs_path = file.as_local()?.abs_path(cx);
1160 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1161 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1162 let (version, prev_snapshot) = buffer_snapshots.last()?;
1163 let next_snapshot = buffer.text_snapshot();
1164 let next_version = version + 1;
1165
1166 let content_changes = buffer
1167 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1168 .map(|edit| {
1169 let edit_start = edit.new.start.0;
1170 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1171 let new_text = next_snapshot
1172 .text_for_range(edit.new.start.1..edit.new.end.1)
1173 .collect();
1174 lsp::TextDocumentContentChangeEvent {
1175 range: Some(lsp::Range::new(
1176 edit_start.to_lsp_position(),
1177 edit_end.to_lsp_position(),
1178 )),
1179 range_length: None,
1180 text: new_text,
1181 }
1182 })
1183 .collect();
1184
1185 buffer_snapshots.push((next_version, next_snapshot));
1186
1187 language_server
1188 .notify::<lsp::notification::DidChangeTextDocument>(
1189 lsp::DidChangeTextDocumentParams {
1190 text_document: lsp::VersionedTextDocumentIdentifier::new(
1191 uri,
1192 next_version,
1193 ),
1194 content_changes,
1195 },
1196 )
1197 .log_err();
1198 }
1199 BufferEvent::Saved => {
1200 let file = File::from_dyn(buffer.read(cx).file())?;
1201 let worktree_id = file.worktree_id(cx);
1202 let abs_path = file.as_local()?.abs_path(cx);
1203 let text_document = lsp::TextDocumentIdentifier {
1204 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1205 };
1206
1207 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1208 server
1209 .notify::<lsp::notification::DidSaveTextDocument>(
1210 lsp::DidSaveTextDocumentParams {
1211 text_document: text_document.clone(),
1212 text: None,
1213 },
1214 )
1215 .log_err();
1216 }
1217 }
1218 _ => {}
1219 }
1220
1221 None
1222 }
1223
1224 fn language_servers_for_worktree(
1225 &self,
1226 worktree_id: WorktreeId,
1227 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1228 self.language_servers.iter().filter_map(
1229 move |((language_server_worktree_id, language_name), server)| {
1230 if *language_server_worktree_id == worktree_id {
1231 Some((language_name.as_ref(), server))
1232 } else {
1233 None
1234 }
1235 },
1236 )
1237 }
1238
1239 fn assign_language_to_buffer(
1240 &mut self,
1241 buffer: &ModelHandle<Buffer>,
1242 cx: &mut ModelContext<Self>,
1243 ) -> Option<()> {
1244 // If the buffer has a language, set it and start the language server if we haven't already.
1245 let full_path = buffer.read(cx).file()?.full_path(cx);
1246 let language = self.languages.select_language(&full_path)?;
1247 buffer.update(cx, |buffer, cx| {
1248 buffer.set_language(Some(language.clone()), cx);
1249 });
1250
1251 let file = File::from_dyn(buffer.read(cx).file())?;
1252 let worktree = file.worktree.read(cx).as_local()?;
1253 let worktree_id = worktree.id();
1254 let worktree_abs_path = worktree.abs_path().clone();
1255 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1256
1257 None
1258 }
1259
1260 fn start_language_server(
1261 &mut self,
1262 worktree_id: WorktreeId,
1263 worktree_path: Arc<Path>,
1264 language: Arc<Language>,
1265 cx: &mut ModelContext<Self>,
1266 ) {
1267 let key = (worktree_id, language.name());
1268 self.started_language_servers
1269 .entry(key.clone())
1270 .or_insert_with(|| {
1271 let server_id = post_inc(&mut self.next_language_server_id);
1272 let language_server = self.languages.start_language_server(
1273 language.clone(),
1274 worktree_path,
1275 self.client.http_client(),
1276 cx,
1277 );
1278 cx.spawn_weak(|this, mut cx| async move {
1279 let mut language_server = language_server?.await.log_err()?;
1280 let this = this.upgrade(&cx)?;
1281 let (language_server_events_tx, language_server_events_rx) =
1282 smol::channel::unbounded();
1283
1284 language_server
1285 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1286 let language_server_events_tx = language_server_events_tx.clone();
1287 move |params| {
1288 language_server_events_tx
1289 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1290 .ok();
1291 }
1292 })
1293 .detach();
1294
1295 language_server
1296 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1297 let settings = this
1298 .read_with(&cx, |this, _| this.language_server_settings.clone());
1299 move |params| {
1300 let settings = settings.lock();
1301 Ok(params
1302 .items
1303 .into_iter()
1304 .map(|item| {
1305 if let Some(section) = &item.section {
1306 settings
1307 .get(section)
1308 .cloned()
1309 .unwrap_or(serde_json::Value::Null)
1310 } else {
1311 settings.clone()
1312 }
1313 })
1314 .collect())
1315 }
1316 })
1317 .detach();
1318
1319 language_server
1320 .on_notification::<lsp::notification::Progress, _>(move |params| {
1321 let token = match params.token {
1322 lsp::NumberOrString::String(token) => token,
1323 lsp::NumberOrString::Number(token) => {
1324 log::info!("skipping numeric progress token {}", token);
1325 return;
1326 }
1327 };
1328
1329 match params.value {
1330 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1331 lsp::WorkDoneProgress::Begin(_) => {
1332 language_server_events_tx
1333 .try_send(LanguageServerEvent::WorkStart { token })
1334 .ok();
1335 }
1336 lsp::WorkDoneProgress::Report(report) => {
1337 language_server_events_tx
1338 .try_send(LanguageServerEvent::WorkProgress {
1339 token,
1340 progress: LanguageServerProgress {
1341 message: report.message,
1342 percentage: report
1343 .percentage
1344 .map(|p| p as usize),
1345 last_update_at: Instant::now(),
1346 },
1347 })
1348 .ok();
1349 }
1350 lsp::WorkDoneProgress::End(_) => {
1351 language_server_events_tx
1352 .try_send(LanguageServerEvent::WorkEnd { token })
1353 .ok();
1354 }
1355 },
1356 }
1357 })
1358 .detach();
1359
1360 // Process all the LSP events.
1361 cx.spawn(|mut cx| {
1362 let this = this.downgrade();
1363 async move {
1364 while let Ok(event) = language_server_events_rx.recv().await {
1365 let this = this.upgrade(&cx)?;
1366 this.update(&mut cx, |this, cx| {
1367 this.on_lsp_event(server_id, event, &language, cx)
1368 });
1369
1370 // Don't starve the main thread when lots of events arrive all at once.
1371 smol::future::yield_now().await;
1372 }
1373 Some(())
1374 }
1375 })
1376 .detach();
1377
1378 let language_server = language_server.initialize().await.log_err()?;
1379 this.update(&mut cx, |this, cx| {
1380 this.language_servers
1381 .insert(key.clone(), language_server.clone());
1382 this.language_server_statuses.insert(
1383 server_id,
1384 LanguageServerStatus {
1385 name: language_server.name().to_string(),
1386 pending_work: Default::default(),
1387 pending_diagnostic_updates: 0,
1388 },
1389 );
1390 language_server
1391 .notify::<lsp::notification::DidChangeConfiguration>(
1392 lsp::DidChangeConfigurationParams {
1393 settings: this.language_server_settings.lock().clone(),
1394 },
1395 )
1396 .ok();
1397
1398 if let Some(project_id) = this.remote_id() {
1399 this.client
1400 .send(proto::StartLanguageServer {
1401 project_id,
1402 server: Some(proto::LanguageServer {
1403 id: server_id as u64,
1404 name: language_server.name().to_string(),
1405 }),
1406 })
1407 .log_err();
1408 }
1409
1410 // Tell the language server about every open buffer in the worktree that matches the language.
1411 for buffer in this.opened_buffers.values() {
1412 if let Some(buffer_handle) = buffer.upgrade(cx) {
1413 let buffer = buffer_handle.read(cx);
1414 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1415 file
1416 } else {
1417 continue;
1418 };
1419 let language = if let Some(language) = buffer.language() {
1420 language
1421 } else {
1422 continue;
1423 };
1424 if (file.worktree.read(cx).id(), language.name()) != key {
1425 continue;
1426 }
1427
1428 let file = file.as_local()?;
1429 let versions = this
1430 .buffer_snapshots
1431 .entry(buffer.remote_id())
1432 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1433 let (version, initial_snapshot) = versions.last().unwrap();
1434 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1435 language_server
1436 .notify::<lsp::notification::DidOpenTextDocument>(
1437 lsp::DidOpenTextDocumentParams {
1438 text_document: lsp::TextDocumentItem::new(
1439 uri,
1440 Default::default(),
1441 *version,
1442 initial_snapshot.text(),
1443 ),
1444 },
1445 )
1446 .log_err()?;
1447 buffer_handle.update(cx, |buffer, cx| {
1448 buffer.set_completion_triggers(
1449 language_server
1450 .capabilities()
1451 .completion_provider
1452 .as_ref()
1453 .and_then(|provider| {
1454 provider.trigger_characters.clone()
1455 })
1456 .unwrap_or(Vec::new()),
1457 cx,
1458 )
1459 });
1460 }
1461 }
1462
1463 cx.notify();
1464 Some(())
1465 });
1466
1467 Some(language_server)
1468 })
1469 });
1470 }
1471
1472 fn on_lsp_event(
1473 &mut self,
1474 language_server_id: usize,
1475 event: LanguageServerEvent,
1476 language: &Arc<Language>,
1477 cx: &mut ModelContext<Self>,
1478 ) {
1479 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1480 let language_server_status =
1481 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1482 status
1483 } else {
1484 return;
1485 };
1486
1487 match event {
1488 LanguageServerEvent::WorkStart { token } => {
1489 if Some(&token) == disk_diagnostics_token {
1490 language_server_status.pending_diagnostic_updates += 1;
1491 if language_server_status.pending_diagnostic_updates == 1 {
1492 self.disk_based_diagnostics_started(cx);
1493 self.broadcast_language_server_update(
1494 language_server_id,
1495 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1496 proto::LspDiskBasedDiagnosticsUpdating {},
1497 ),
1498 );
1499 }
1500 } else {
1501 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1502 self.broadcast_language_server_update(
1503 language_server_id,
1504 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1505 token,
1506 }),
1507 );
1508 }
1509 }
1510 LanguageServerEvent::WorkProgress { token, progress } => {
1511 if Some(&token) != disk_diagnostics_token {
1512 self.on_lsp_work_progress(
1513 language_server_id,
1514 token.clone(),
1515 progress.clone(),
1516 cx,
1517 );
1518 self.broadcast_language_server_update(
1519 language_server_id,
1520 proto::update_language_server::Variant::WorkProgress(
1521 proto::LspWorkProgress {
1522 token,
1523 message: progress.message,
1524 percentage: progress.percentage.map(|p| p as u32),
1525 },
1526 ),
1527 );
1528 }
1529 }
1530 LanguageServerEvent::WorkEnd { token } => {
1531 if Some(&token) == disk_diagnostics_token {
1532 language_server_status.pending_diagnostic_updates -= 1;
1533 if language_server_status.pending_diagnostic_updates == 0 {
1534 self.disk_based_diagnostics_finished(cx);
1535 self.broadcast_language_server_update(
1536 language_server_id,
1537 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1538 proto::LspDiskBasedDiagnosticsUpdated {},
1539 ),
1540 );
1541 }
1542 } else {
1543 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1544 self.broadcast_language_server_update(
1545 language_server_id,
1546 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1547 token,
1548 }),
1549 );
1550 }
1551 }
1552 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1553 language.process_diagnostics(&mut params);
1554
1555 if disk_diagnostics_token.is_none() {
1556 self.disk_based_diagnostics_started(cx);
1557 self.broadcast_language_server_update(
1558 language_server_id,
1559 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1560 proto::LspDiskBasedDiagnosticsUpdating {},
1561 ),
1562 );
1563 }
1564 self.update_diagnostics(
1565 params,
1566 language
1567 .disk_based_diagnostic_sources()
1568 .unwrap_or(&Default::default()),
1569 cx,
1570 )
1571 .log_err();
1572 if disk_diagnostics_token.is_none() {
1573 self.disk_based_diagnostics_finished(cx);
1574 self.broadcast_language_server_update(
1575 language_server_id,
1576 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1577 proto::LspDiskBasedDiagnosticsUpdated {},
1578 ),
1579 );
1580 }
1581 }
1582 }
1583 }
1584
1585 fn on_lsp_work_start(
1586 &mut self,
1587 language_server_id: usize,
1588 token: String,
1589 cx: &mut ModelContext<Self>,
1590 ) {
1591 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1592 status.pending_work.insert(
1593 token,
1594 LanguageServerProgress {
1595 message: None,
1596 percentage: None,
1597 last_update_at: Instant::now(),
1598 },
1599 );
1600 cx.notify();
1601 }
1602 }
1603
1604 fn on_lsp_work_progress(
1605 &mut self,
1606 language_server_id: usize,
1607 token: String,
1608 progress: LanguageServerProgress,
1609 cx: &mut ModelContext<Self>,
1610 ) {
1611 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1612 status.pending_work.insert(token, progress);
1613 cx.notify();
1614 }
1615 }
1616
1617 fn on_lsp_work_end(
1618 &mut self,
1619 language_server_id: usize,
1620 token: String,
1621 cx: &mut ModelContext<Self>,
1622 ) {
1623 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1624 status.pending_work.remove(&token);
1625 cx.notify();
1626 }
1627 }
1628
1629 fn broadcast_language_server_update(
1630 &self,
1631 language_server_id: usize,
1632 event: proto::update_language_server::Variant,
1633 ) {
1634 if let Some(project_id) = self.remote_id() {
1635 self.client
1636 .send(proto::UpdateLanguageServer {
1637 project_id,
1638 language_server_id: language_server_id as u64,
1639 variant: Some(event),
1640 })
1641 .log_err();
1642 }
1643 }
1644
1645 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1646 for server in self.language_servers.values() {
1647 server
1648 .notify::<lsp::notification::DidChangeConfiguration>(
1649 lsp::DidChangeConfigurationParams {
1650 settings: settings.clone(),
1651 },
1652 )
1653 .ok();
1654 }
1655 *self.language_server_settings.lock() = settings;
1656 }
1657
1658 pub fn language_server_statuses(
1659 &self,
1660 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1661 self.language_server_statuses.values()
1662 }
1663
1664 pub fn update_diagnostics(
1665 &mut self,
1666 params: lsp::PublishDiagnosticsParams,
1667 disk_based_sources: &HashSet<String>,
1668 cx: &mut ModelContext<Self>,
1669 ) -> Result<()> {
1670 let abs_path = params
1671 .uri
1672 .to_file_path()
1673 .map_err(|_| anyhow!("URI is not a file"))?;
1674 let mut next_group_id = 0;
1675 let mut diagnostics = Vec::default();
1676 let mut primary_diagnostic_group_ids = HashMap::default();
1677 let mut sources_by_group_id = HashMap::default();
1678 let mut supporting_diagnostics = HashMap::default();
1679 for diagnostic in ¶ms.diagnostics {
1680 let source = diagnostic.source.as_ref();
1681 let code = diagnostic.code.as_ref().map(|code| match code {
1682 lsp::NumberOrString::Number(code) => code.to_string(),
1683 lsp::NumberOrString::String(code) => code.clone(),
1684 });
1685 let range = range_from_lsp(diagnostic.range);
1686 let is_supporting = diagnostic
1687 .related_information
1688 .as_ref()
1689 .map_or(false, |infos| {
1690 infos.iter().any(|info| {
1691 primary_diagnostic_group_ids.contains_key(&(
1692 source,
1693 code.clone(),
1694 range_from_lsp(info.location.range),
1695 ))
1696 })
1697 });
1698
1699 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1700 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1701 });
1702
1703 if is_supporting {
1704 supporting_diagnostics.insert(
1705 (source, code.clone(), range),
1706 (diagnostic.severity, is_unnecessary),
1707 );
1708 } else {
1709 let group_id = post_inc(&mut next_group_id);
1710 let is_disk_based =
1711 source.map_or(false, |source| disk_based_sources.contains(source));
1712
1713 sources_by_group_id.insert(group_id, source);
1714 primary_diagnostic_group_ids
1715 .insert((source, code.clone(), range.clone()), group_id);
1716
1717 diagnostics.push(DiagnosticEntry {
1718 range,
1719 diagnostic: Diagnostic {
1720 code: code.clone(),
1721 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1722 message: diagnostic.message.clone(),
1723 group_id,
1724 is_primary: true,
1725 is_valid: true,
1726 is_disk_based,
1727 is_unnecessary,
1728 },
1729 });
1730 if let Some(infos) = &diagnostic.related_information {
1731 for info in infos {
1732 if info.location.uri == params.uri && !info.message.is_empty() {
1733 let range = range_from_lsp(info.location.range);
1734 diagnostics.push(DiagnosticEntry {
1735 range,
1736 diagnostic: Diagnostic {
1737 code: code.clone(),
1738 severity: DiagnosticSeverity::INFORMATION,
1739 message: info.message.clone(),
1740 group_id,
1741 is_primary: false,
1742 is_valid: true,
1743 is_disk_based,
1744 is_unnecessary: false,
1745 },
1746 });
1747 }
1748 }
1749 }
1750 }
1751 }
1752
1753 for entry in &mut diagnostics {
1754 let diagnostic = &mut entry.diagnostic;
1755 if !diagnostic.is_primary {
1756 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1757 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1758 source,
1759 diagnostic.code.clone(),
1760 entry.range.clone(),
1761 )) {
1762 if let Some(severity) = severity {
1763 diagnostic.severity = severity;
1764 }
1765 diagnostic.is_unnecessary = is_unnecessary;
1766 }
1767 }
1768 }
1769
1770 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1771 Ok(())
1772 }
1773
1774 pub fn update_diagnostic_entries(
1775 &mut self,
1776 abs_path: PathBuf,
1777 version: Option<i32>,
1778 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1779 cx: &mut ModelContext<Project>,
1780 ) -> Result<(), anyhow::Error> {
1781 let (worktree, relative_path) = self
1782 .find_local_worktree(&abs_path, cx)
1783 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1784 if !worktree.read(cx).is_visible() {
1785 return Ok(());
1786 }
1787
1788 let project_path = ProjectPath {
1789 worktree_id: worktree.read(cx).id(),
1790 path: relative_path.into(),
1791 };
1792
1793 for buffer in self.opened_buffers.values() {
1794 if let Some(buffer) = buffer.upgrade(cx) {
1795 if buffer
1796 .read(cx)
1797 .file()
1798 .map_or(false, |file| *file.path() == project_path.path)
1799 {
1800 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1801 break;
1802 }
1803 }
1804 }
1805 worktree.update(cx, |worktree, cx| {
1806 worktree
1807 .as_local_mut()
1808 .ok_or_else(|| anyhow!("not a local worktree"))?
1809 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1810 })?;
1811 cx.emit(Event::DiagnosticsUpdated(project_path));
1812 Ok(())
1813 }
1814
1815 fn update_buffer_diagnostics(
1816 &mut self,
1817 buffer: &ModelHandle<Buffer>,
1818 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1819 version: Option<i32>,
1820 cx: &mut ModelContext<Self>,
1821 ) -> Result<()> {
1822 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1823 Ordering::Equal
1824 .then_with(|| b.is_primary.cmp(&a.is_primary))
1825 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1826 .then_with(|| a.severity.cmp(&b.severity))
1827 .then_with(|| a.message.cmp(&b.message))
1828 }
1829
1830 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1831
1832 diagnostics.sort_unstable_by(|a, b| {
1833 Ordering::Equal
1834 .then_with(|| a.range.start.cmp(&b.range.start))
1835 .then_with(|| b.range.end.cmp(&a.range.end))
1836 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1837 });
1838
1839 let mut sanitized_diagnostics = Vec::new();
1840 let mut edits_since_save = snapshot
1841 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1842 .peekable();
1843 let mut last_edit_old_end = PointUtf16::zero();
1844 let mut last_edit_new_end = PointUtf16::zero();
1845 'outer: for entry in diagnostics {
1846 let mut start = entry.range.start;
1847 let mut end = entry.range.end;
1848
1849 // Some diagnostics are based on files on disk instead of buffers'
1850 // current contents. Adjust these diagnostics' ranges to reflect
1851 // any unsaved edits.
1852 if entry.diagnostic.is_disk_based {
1853 while let Some(edit) = edits_since_save.peek() {
1854 if edit.old.end <= start {
1855 last_edit_old_end = edit.old.end;
1856 last_edit_new_end = edit.new.end;
1857 edits_since_save.next();
1858 } else if edit.old.start <= end && edit.old.end >= start {
1859 continue 'outer;
1860 } else {
1861 break;
1862 }
1863 }
1864
1865 let start_overshoot = start - last_edit_old_end;
1866 start = last_edit_new_end;
1867 start += start_overshoot;
1868
1869 let end_overshoot = end - last_edit_old_end;
1870 end = last_edit_new_end;
1871 end += end_overshoot;
1872 }
1873
1874 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1875 ..snapshot.clip_point_utf16(end, Bias::Right);
1876
1877 // Expand empty ranges by one character
1878 if range.start == range.end {
1879 range.end.column += 1;
1880 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1881 if range.start == range.end && range.end.column > 0 {
1882 range.start.column -= 1;
1883 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1884 }
1885 }
1886
1887 sanitized_diagnostics.push(DiagnosticEntry {
1888 range,
1889 diagnostic: entry.diagnostic,
1890 });
1891 }
1892 drop(edits_since_save);
1893
1894 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1895 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1896 Ok(())
1897 }
1898
1899 pub fn format(
1900 &self,
1901 buffers: HashSet<ModelHandle<Buffer>>,
1902 push_to_history: bool,
1903 cx: &mut ModelContext<Project>,
1904 ) -> Task<Result<ProjectTransaction>> {
1905 let mut local_buffers = Vec::new();
1906 let mut remote_buffers = None;
1907 for buffer_handle in buffers {
1908 let buffer = buffer_handle.read(cx);
1909 if let Some(file) = File::from_dyn(buffer.file()) {
1910 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1911 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1912 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1913 }
1914 } else {
1915 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1916 }
1917 } else {
1918 return Task::ready(Ok(Default::default()));
1919 }
1920 }
1921
1922 let remote_buffers = self.remote_id().zip(remote_buffers);
1923 let client = self.client.clone();
1924
1925 cx.spawn(|this, mut cx| async move {
1926 let mut project_transaction = ProjectTransaction::default();
1927
1928 if let Some((project_id, remote_buffers)) = remote_buffers {
1929 let response = client
1930 .request(proto::FormatBuffers {
1931 project_id,
1932 buffer_ids: remote_buffers
1933 .iter()
1934 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1935 .collect(),
1936 })
1937 .await?
1938 .transaction
1939 .ok_or_else(|| anyhow!("missing transaction"))?;
1940 project_transaction = this
1941 .update(&mut cx, |this, cx| {
1942 this.deserialize_project_transaction(response, push_to_history, cx)
1943 })
1944 .await?;
1945 }
1946
1947 for (buffer, buffer_abs_path, language_server) in local_buffers {
1948 let text_document = lsp::TextDocumentIdentifier::new(
1949 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1950 );
1951 let capabilities = &language_server.capabilities();
1952 let lsp_edits = if capabilities
1953 .document_formatting_provider
1954 .as_ref()
1955 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1956 {
1957 language_server
1958 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1959 text_document,
1960 options: Default::default(),
1961 work_done_progress_params: Default::default(),
1962 })
1963 .await?
1964 } else if capabilities
1965 .document_range_formatting_provider
1966 .as_ref()
1967 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1968 {
1969 let buffer_start = lsp::Position::new(0, 0);
1970 let buffer_end = buffer
1971 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1972 .to_lsp_position();
1973 language_server
1974 .request::<lsp::request::RangeFormatting>(
1975 lsp::DocumentRangeFormattingParams {
1976 text_document,
1977 range: lsp::Range::new(buffer_start, buffer_end),
1978 options: Default::default(),
1979 work_done_progress_params: Default::default(),
1980 },
1981 )
1982 .await?
1983 } else {
1984 continue;
1985 };
1986
1987 if let Some(lsp_edits) = lsp_edits {
1988 let edits = this
1989 .update(&mut cx, |this, cx| {
1990 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1991 })
1992 .await?;
1993 buffer.update(&mut cx, |buffer, cx| {
1994 buffer.finalize_last_transaction();
1995 buffer.start_transaction();
1996 for (range, text) in edits {
1997 buffer.edit([range], text, cx);
1998 }
1999 if buffer.end_transaction(cx).is_some() {
2000 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2001 if !push_to_history {
2002 buffer.forget_transaction(transaction.id);
2003 }
2004 project_transaction.0.insert(cx.handle(), transaction);
2005 }
2006 });
2007 }
2008 }
2009
2010 Ok(project_transaction)
2011 })
2012 }
2013
2014 pub fn definition<T: ToPointUtf16>(
2015 &self,
2016 buffer: &ModelHandle<Buffer>,
2017 position: T,
2018 cx: &mut ModelContext<Self>,
2019 ) -> Task<Result<Vec<Location>>> {
2020 let position = position.to_point_utf16(buffer.read(cx));
2021 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2022 }
2023
2024 pub fn references<T: ToPointUtf16>(
2025 &self,
2026 buffer: &ModelHandle<Buffer>,
2027 position: T,
2028 cx: &mut ModelContext<Self>,
2029 ) -> Task<Result<Vec<Location>>> {
2030 let position = position.to_point_utf16(buffer.read(cx));
2031 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2032 }
2033
2034 pub fn document_highlights<T: ToPointUtf16>(
2035 &self,
2036 buffer: &ModelHandle<Buffer>,
2037 position: T,
2038 cx: &mut ModelContext<Self>,
2039 ) -> Task<Result<Vec<DocumentHighlight>>> {
2040 let position = position.to_point_utf16(buffer.read(cx));
2041
2042 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2043 }
2044
2045 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2046 if self.is_local() {
2047 let mut language_servers = HashMap::default();
2048 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2049 if let Some((worktree, language)) = self
2050 .worktree_for_id(*worktree_id, cx)
2051 .and_then(|worktree| worktree.read(cx).as_local())
2052 .zip(self.languages.get_language(language_name))
2053 {
2054 language_servers
2055 .entry(Arc::as_ptr(language_server))
2056 .or_insert((
2057 language_server.clone(),
2058 *worktree_id,
2059 worktree.abs_path().clone(),
2060 language.clone(),
2061 ));
2062 }
2063 }
2064
2065 let mut requests = Vec::new();
2066 for (language_server, _, _, _) in language_servers.values() {
2067 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2068 lsp::WorkspaceSymbolParams {
2069 query: query.to_string(),
2070 ..Default::default()
2071 },
2072 ));
2073 }
2074
2075 cx.spawn_weak(|this, cx| async move {
2076 let responses = futures::future::try_join_all(requests).await?;
2077
2078 let mut symbols = Vec::new();
2079 if let Some(this) = this.upgrade(&cx) {
2080 this.read_with(&cx, |this, cx| {
2081 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2082 language_servers.into_values().zip(responses)
2083 {
2084 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2085 |lsp_symbol| {
2086 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2087 let mut worktree_id = source_worktree_id;
2088 let path;
2089 if let Some((worktree, rel_path)) =
2090 this.find_local_worktree(&abs_path, cx)
2091 {
2092 worktree_id = worktree.read(cx).id();
2093 path = rel_path;
2094 } else {
2095 path = relativize_path(&worktree_abs_path, &abs_path);
2096 }
2097
2098 let label = language
2099 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2100 .unwrap_or_else(|| {
2101 CodeLabel::plain(lsp_symbol.name.clone(), None)
2102 });
2103 let signature = this.symbol_signature(worktree_id, &path);
2104
2105 Some(Symbol {
2106 source_worktree_id,
2107 worktree_id,
2108 language_name: language.name().to_string(),
2109 name: lsp_symbol.name,
2110 kind: lsp_symbol.kind,
2111 label,
2112 path,
2113 range: range_from_lsp(lsp_symbol.location.range),
2114 signature,
2115 })
2116 },
2117 ));
2118 }
2119 })
2120 }
2121
2122 Ok(symbols)
2123 })
2124 } else if let Some(project_id) = self.remote_id() {
2125 let request = self.client.request(proto::GetProjectSymbols {
2126 project_id,
2127 query: query.to_string(),
2128 });
2129 cx.spawn_weak(|this, cx| async move {
2130 let response = request.await?;
2131 let mut symbols = Vec::new();
2132 if let Some(this) = this.upgrade(&cx) {
2133 this.read_with(&cx, |this, _| {
2134 symbols.extend(
2135 response
2136 .symbols
2137 .into_iter()
2138 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2139 );
2140 })
2141 }
2142 Ok(symbols)
2143 })
2144 } else {
2145 Task::ready(Ok(Default::default()))
2146 }
2147 }
2148
2149 pub fn open_buffer_for_symbol(
2150 &mut self,
2151 symbol: &Symbol,
2152 cx: &mut ModelContext<Self>,
2153 ) -> Task<Result<ModelHandle<Buffer>>> {
2154 if self.is_local() {
2155 let language_server = if let Some(server) = self.language_servers.get(&(
2156 symbol.source_worktree_id,
2157 Arc::from(symbol.language_name.as_str()),
2158 )) {
2159 server.clone()
2160 } else {
2161 return Task::ready(Err(anyhow!(
2162 "language server for worktree and language not found"
2163 )));
2164 };
2165
2166 let worktree_abs_path = if let Some(worktree_abs_path) = self
2167 .worktree_for_id(symbol.worktree_id, cx)
2168 .and_then(|worktree| worktree.read(cx).as_local())
2169 .map(|local_worktree| local_worktree.abs_path())
2170 {
2171 worktree_abs_path
2172 } else {
2173 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2174 };
2175 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2176 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2177 uri
2178 } else {
2179 return Task::ready(Err(anyhow!("invalid symbol path")));
2180 };
2181
2182 self.open_local_buffer_via_lsp(
2183 symbol_uri,
2184 Arc::from(symbol.language_name.as_str()),
2185 language_server,
2186 cx,
2187 )
2188 } else if let Some(project_id) = self.remote_id() {
2189 let request = self.client.request(proto::OpenBufferForSymbol {
2190 project_id,
2191 symbol: Some(serialize_symbol(symbol)),
2192 });
2193 cx.spawn(|this, mut cx| async move {
2194 let response = request.await?;
2195 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2196 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2197 .await
2198 })
2199 } else {
2200 Task::ready(Err(anyhow!("project does not have a remote id")))
2201 }
2202 }
2203
2204 pub fn completions<T: ToPointUtf16>(
2205 &self,
2206 source_buffer_handle: &ModelHandle<Buffer>,
2207 position: T,
2208 cx: &mut ModelContext<Self>,
2209 ) -> Task<Result<Vec<Completion>>> {
2210 let source_buffer_handle = source_buffer_handle.clone();
2211 let source_buffer = source_buffer_handle.read(cx);
2212 let buffer_id = source_buffer.remote_id();
2213 let language = source_buffer.language().cloned();
2214 let worktree;
2215 let buffer_abs_path;
2216 if let Some(file) = File::from_dyn(source_buffer.file()) {
2217 worktree = file.worktree.clone();
2218 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2219 } else {
2220 return Task::ready(Ok(Default::default()));
2221 };
2222
2223 let position = position.to_point_utf16(source_buffer);
2224 let anchor = source_buffer.anchor_after(position);
2225
2226 if worktree.read(cx).as_local().is_some() {
2227 let buffer_abs_path = buffer_abs_path.unwrap();
2228 let lang_server =
2229 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2230 server.clone()
2231 } else {
2232 return Task::ready(Ok(Default::default()));
2233 };
2234
2235 cx.spawn(|_, cx| async move {
2236 let completions = lang_server
2237 .request::<lsp::request::Completion>(lsp::CompletionParams {
2238 text_document_position: lsp::TextDocumentPositionParams::new(
2239 lsp::TextDocumentIdentifier::new(
2240 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2241 ),
2242 position.to_lsp_position(),
2243 ),
2244 context: Default::default(),
2245 work_done_progress_params: Default::default(),
2246 partial_result_params: Default::default(),
2247 })
2248 .await
2249 .context("lsp completion request failed")?;
2250
2251 let completions = if let Some(completions) = completions {
2252 match completions {
2253 lsp::CompletionResponse::Array(completions) => completions,
2254 lsp::CompletionResponse::List(list) => list.items,
2255 }
2256 } else {
2257 Default::default()
2258 };
2259
2260 source_buffer_handle.read_with(&cx, |this, _| {
2261 Ok(completions
2262 .into_iter()
2263 .filter_map(|lsp_completion| {
2264 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2265 lsp::CompletionTextEdit::Edit(edit) => {
2266 (range_from_lsp(edit.range), edit.new_text.clone())
2267 }
2268 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2269 log::info!("unsupported insert/replace completion");
2270 return None;
2271 }
2272 };
2273
2274 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2275 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2276 if clipped_start == old_range.start && clipped_end == old_range.end {
2277 Some(Completion {
2278 old_range: this.anchor_before(old_range.start)
2279 ..this.anchor_after(old_range.end),
2280 new_text,
2281 label: language
2282 .as_ref()
2283 .and_then(|l| l.label_for_completion(&lsp_completion))
2284 .unwrap_or_else(|| {
2285 CodeLabel::plain(
2286 lsp_completion.label.clone(),
2287 lsp_completion.filter_text.as_deref(),
2288 )
2289 }),
2290 lsp_completion,
2291 })
2292 } else {
2293 None
2294 }
2295 })
2296 .collect())
2297 })
2298 })
2299 } else if let Some(project_id) = self.remote_id() {
2300 let rpc = self.client.clone();
2301 let message = proto::GetCompletions {
2302 project_id,
2303 buffer_id,
2304 position: Some(language::proto::serialize_anchor(&anchor)),
2305 version: serialize_version(&source_buffer.version()),
2306 };
2307 cx.spawn_weak(|_, mut cx| async move {
2308 let response = rpc.request(message).await?;
2309
2310 source_buffer_handle
2311 .update(&mut cx, |buffer, _| {
2312 buffer.wait_for_version(deserialize_version(response.version))
2313 })
2314 .await;
2315
2316 response
2317 .completions
2318 .into_iter()
2319 .map(|completion| {
2320 language::proto::deserialize_completion(completion, language.as_ref())
2321 })
2322 .collect()
2323 })
2324 } else {
2325 Task::ready(Ok(Default::default()))
2326 }
2327 }
2328
2329 pub fn apply_additional_edits_for_completion(
2330 &self,
2331 buffer_handle: ModelHandle<Buffer>,
2332 completion: Completion,
2333 push_to_history: bool,
2334 cx: &mut ModelContext<Self>,
2335 ) -> Task<Result<Option<Transaction>>> {
2336 let buffer = buffer_handle.read(cx);
2337 let buffer_id = buffer.remote_id();
2338
2339 if self.is_local() {
2340 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2341 server.clone()
2342 } else {
2343 return Task::ready(Ok(Default::default()));
2344 };
2345
2346 cx.spawn(|this, mut cx| async move {
2347 let resolved_completion = lang_server
2348 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2349 .await?;
2350 if let Some(edits) = resolved_completion.additional_text_edits {
2351 let edits = this
2352 .update(&mut cx, |this, cx| {
2353 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2354 })
2355 .await?;
2356 buffer_handle.update(&mut cx, |buffer, cx| {
2357 buffer.finalize_last_transaction();
2358 buffer.start_transaction();
2359 for (range, text) in edits {
2360 buffer.edit([range], text, cx);
2361 }
2362 let transaction = if buffer.end_transaction(cx).is_some() {
2363 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2364 if !push_to_history {
2365 buffer.forget_transaction(transaction.id);
2366 }
2367 Some(transaction)
2368 } else {
2369 None
2370 };
2371 Ok(transaction)
2372 })
2373 } else {
2374 Ok(None)
2375 }
2376 })
2377 } else if let Some(project_id) = self.remote_id() {
2378 let client = self.client.clone();
2379 cx.spawn(|_, mut cx| async move {
2380 let response = client
2381 .request(proto::ApplyCompletionAdditionalEdits {
2382 project_id,
2383 buffer_id,
2384 completion: Some(language::proto::serialize_completion(&completion)),
2385 })
2386 .await?;
2387
2388 if let Some(transaction) = response.transaction {
2389 let transaction = language::proto::deserialize_transaction(transaction)?;
2390 buffer_handle
2391 .update(&mut cx, |buffer, _| {
2392 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2393 })
2394 .await;
2395 if push_to_history {
2396 buffer_handle.update(&mut cx, |buffer, _| {
2397 buffer.push_transaction(transaction.clone(), Instant::now());
2398 });
2399 }
2400 Ok(Some(transaction))
2401 } else {
2402 Ok(None)
2403 }
2404 })
2405 } else {
2406 Task::ready(Err(anyhow!("project does not have a remote id")))
2407 }
2408 }
2409
2410 pub fn code_actions<T: ToOffset>(
2411 &self,
2412 buffer_handle: &ModelHandle<Buffer>,
2413 range: Range<T>,
2414 cx: &mut ModelContext<Self>,
2415 ) -> Task<Result<Vec<CodeAction>>> {
2416 let buffer_handle = buffer_handle.clone();
2417 let buffer = buffer_handle.read(cx);
2418 let buffer_id = buffer.remote_id();
2419 let worktree;
2420 let buffer_abs_path;
2421 if let Some(file) = File::from_dyn(buffer.file()) {
2422 worktree = file.worktree.clone();
2423 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2424 } else {
2425 return Task::ready(Ok(Default::default()));
2426 };
2427 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2428
2429 if worktree.read(cx).as_local().is_some() {
2430 let buffer_abs_path = buffer_abs_path.unwrap();
2431 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2432 server.clone()
2433 } else {
2434 return Task::ready(Ok(Default::default()));
2435 };
2436
2437 let lsp_range = lsp::Range::new(
2438 range.start.to_point_utf16(buffer).to_lsp_position(),
2439 range.end.to_point_utf16(buffer).to_lsp_position(),
2440 );
2441 cx.foreground().spawn(async move {
2442 if !lang_server.capabilities().code_action_provider.is_some() {
2443 return Ok(Default::default());
2444 }
2445
2446 Ok(lang_server
2447 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2448 text_document: lsp::TextDocumentIdentifier::new(
2449 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2450 ),
2451 range: lsp_range,
2452 work_done_progress_params: Default::default(),
2453 partial_result_params: Default::default(),
2454 context: lsp::CodeActionContext {
2455 diagnostics: Default::default(),
2456 only: Some(vec![
2457 lsp::CodeActionKind::QUICKFIX,
2458 lsp::CodeActionKind::REFACTOR,
2459 lsp::CodeActionKind::REFACTOR_EXTRACT,
2460 ]),
2461 },
2462 })
2463 .await?
2464 .unwrap_or_default()
2465 .into_iter()
2466 .filter_map(|entry| {
2467 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2468 Some(CodeAction {
2469 range: range.clone(),
2470 lsp_action,
2471 })
2472 } else {
2473 None
2474 }
2475 })
2476 .collect())
2477 })
2478 } else if let Some(project_id) = self.remote_id() {
2479 let rpc = self.client.clone();
2480 let version = buffer.version();
2481 cx.spawn_weak(|_, mut cx| async move {
2482 let response = rpc
2483 .request(proto::GetCodeActions {
2484 project_id,
2485 buffer_id,
2486 start: Some(language::proto::serialize_anchor(&range.start)),
2487 end: Some(language::proto::serialize_anchor(&range.end)),
2488 version: serialize_version(&version),
2489 })
2490 .await?;
2491
2492 buffer_handle
2493 .update(&mut cx, |buffer, _| {
2494 buffer.wait_for_version(deserialize_version(response.version))
2495 })
2496 .await;
2497
2498 response
2499 .actions
2500 .into_iter()
2501 .map(language::proto::deserialize_code_action)
2502 .collect()
2503 })
2504 } else {
2505 Task::ready(Ok(Default::default()))
2506 }
2507 }
2508
2509 pub fn apply_code_action(
2510 &self,
2511 buffer_handle: ModelHandle<Buffer>,
2512 mut action: CodeAction,
2513 push_to_history: bool,
2514 cx: &mut ModelContext<Self>,
2515 ) -> Task<Result<ProjectTransaction>> {
2516 if self.is_local() {
2517 let buffer = buffer_handle.read(cx);
2518 let lang_name = if let Some(lang) = buffer.language() {
2519 lang.name()
2520 } else {
2521 return Task::ready(Ok(Default::default()));
2522 };
2523 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2524 server.clone()
2525 } else {
2526 return Task::ready(Ok(Default::default()));
2527 };
2528 let range = action.range.to_point_utf16(buffer);
2529
2530 cx.spawn(|this, mut cx| async move {
2531 if let Some(lsp_range) = action
2532 .lsp_action
2533 .data
2534 .as_mut()
2535 .and_then(|d| d.get_mut("codeActionParams"))
2536 .and_then(|d| d.get_mut("range"))
2537 {
2538 *lsp_range = serde_json::to_value(&lsp::Range::new(
2539 range.start.to_lsp_position(),
2540 range.end.to_lsp_position(),
2541 ))
2542 .unwrap();
2543 action.lsp_action = lang_server
2544 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2545 .await?;
2546 } else {
2547 let actions = this
2548 .update(&mut cx, |this, cx| {
2549 this.code_actions(&buffer_handle, action.range, cx)
2550 })
2551 .await?;
2552 action.lsp_action = actions
2553 .into_iter()
2554 .find(|a| a.lsp_action.title == action.lsp_action.title)
2555 .ok_or_else(|| anyhow!("code action is outdated"))?
2556 .lsp_action;
2557 }
2558
2559 if let Some(edit) = action.lsp_action.edit {
2560 Self::deserialize_workspace_edit(
2561 this,
2562 edit,
2563 push_to_history,
2564 lang_name,
2565 lang_server,
2566 &mut cx,
2567 )
2568 .await
2569 } else {
2570 Ok(ProjectTransaction::default())
2571 }
2572 })
2573 } else if let Some(project_id) = self.remote_id() {
2574 let client = self.client.clone();
2575 let request = proto::ApplyCodeAction {
2576 project_id,
2577 buffer_id: buffer_handle.read(cx).remote_id(),
2578 action: Some(language::proto::serialize_code_action(&action)),
2579 };
2580 cx.spawn(|this, mut cx| async move {
2581 let response = client
2582 .request(request)
2583 .await?
2584 .transaction
2585 .ok_or_else(|| anyhow!("missing transaction"))?;
2586 this.update(&mut cx, |this, cx| {
2587 this.deserialize_project_transaction(response, push_to_history, cx)
2588 })
2589 .await
2590 })
2591 } else {
2592 Task::ready(Err(anyhow!("project does not have a remote id")))
2593 }
2594 }
2595
2596 async fn deserialize_workspace_edit(
2597 this: ModelHandle<Self>,
2598 edit: lsp::WorkspaceEdit,
2599 push_to_history: bool,
2600 language_name: Arc<str>,
2601 language_server: Arc<LanguageServer>,
2602 cx: &mut AsyncAppContext,
2603 ) -> Result<ProjectTransaction> {
2604 let fs = this.read_with(cx, |this, _| this.fs.clone());
2605 let mut operations = Vec::new();
2606 if let Some(document_changes) = edit.document_changes {
2607 match document_changes {
2608 lsp::DocumentChanges::Edits(edits) => {
2609 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2610 }
2611 lsp::DocumentChanges::Operations(ops) => operations = ops,
2612 }
2613 } else if let Some(changes) = edit.changes {
2614 operations.extend(changes.into_iter().map(|(uri, edits)| {
2615 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2616 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2617 uri,
2618 version: None,
2619 },
2620 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2621 })
2622 }));
2623 }
2624
2625 let mut project_transaction = ProjectTransaction::default();
2626 for operation in operations {
2627 match operation {
2628 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2629 let abs_path = op
2630 .uri
2631 .to_file_path()
2632 .map_err(|_| anyhow!("can't convert URI to path"))?;
2633
2634 if let Some(parent_path) = abs_path.parent() {
2635 fs.create_dir(parent_path).await?;
2636 }
2637 if abs_path.ends_with("/") {
2638 fs.create_dir(&abs_path).await?;
2639 } else {
2640 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2641 .await?;
2642 }
2643 }
2644 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2645 let source_abs_path = op
2646 .old_uri
2647 .to_file_path()
2648 .map_err(|_| anyhow!("can't convert URI to path"))?;
2649 let target_abs_path = op
2650 .new_uri
2651 .to_file_path()
2652 .map_err(|_| anyhow!("can't convert URI to path"))?;
2653 fs.rename(
2654 &source_abs_path,
2655 &target_abs_path,
2656 op.options.map(Into::into).unwrap_or_default(),
2657 )
2658 .await?;
2659 }
2660 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2661 let abs_path = op
2662 .uri
2663 .to_file_path()
2664 .map_err(|_| anyhow!("can't convert URI to path"))?;
2665 let options = op.options.map(Into::into).unwrap_or_default();
2666 if abs_path.ends_with("/") {
2667 fs.remove_dir(&abs_path, options).await?;
2668 } else {
2669 fs.remove_file(&abs_path, options).await?;
2670 }
2671 }
2672 lsp::DocumentChangeOperation::Edit(op) => {
2673 let buffer_to_edit = this
2674 .update(cx, |this, cx| {
2675 this.open_local_buffer_via_lsp(
2676 op.text_document.uri,
2677 language_name.clone(),
2678 language_server.clone(),
2679 cx,
2680 )
2681 })
2682 .await?;
2683
2684 let edits = this
2685 .update(cx, |this, cx| {
2686 let edits = op.edits.into_iter().map(|edit| match edit {
2687 lsp::OneOf::Left(edit) => edit,
2688 lsp::OneOf::Right(edit) => edit.text_edit,
2689 });
2690 this.edits_from_lsp(
2691 &buffer_to_edit,
2692 edits,
2693 op.text_document.version,
2694 cx,
2695 )
2696 })
2697 .await?;
2698
2699 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2700 buffer.finalize_last_transaction();
2701 buffer.start_transaction();
2702 for (range, text) in edits {
2703 buffer.edit([range], text, cx);
2704 }
2705 let transaction = if buffer.end_transaction(cx).is_some() {
2706 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2707 if !push_to_history {
2708 buffer.forget_transaction(transaction.id);
2709 }
2710 Some(transaction)
2711 } else {
2712 None
2713 };
2714
2715 transaction
2716 });
2717 if let Some(transaction) = transaction {
2718 project_transaction.0.insert(buffer_to_edit, transaction);
2719 }
2720 }
2721 }
2722 }
2723
2724 Ok(project_transaction)
2725 }
2726
2727 pub fn prepare_rename<T: ToPointUtf16>(
2728 &self,
2729 buffer: ModelHandle<Buffer>,
2730 position: T,
2731 cx: &mut ModelContext<Self>,
2732 ) -> Task<Result<Option<Range<Anchor>>>> {
2733 let position = position.to_point_utf16(buffer.read(cx));
2734 self.request_lsp(buffer, PrepareRename { position }, cx)
2735 }
2736
2737 pub fn perform_rename<T: ToPointUtf16>(
2738 &self,
2739 buffer: ModelHandle<Buffer>,
2740 position: T,
2741 new_name: String,
2742 push_to_history: bool,
2743 cx: &mut ModelContext<Self>,
2744 ) -> Task<Result<ProjectTransaction>> {
2745 let position = position.to_point_utf16(buffer.read(cx));
2746 self.request_lsp(
2747 buffer,
2748 PerformRename {
2749 position,
2750 new_name,
2751 push_to_history,
2752 },
2753 cx,
2754 )
2755 }
2756
2757 pub fn search(
2758 &self,
2759 query: SearchQuery,
2760 cx: &mut ModelContext<Self>,
2761 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2762 if self.is_local() {
2763 let snapshots = self
2764 .visible_worktrees(cx)
2765 .filter_map(|tree| {
2766 let tree = tree.read(cx).as_local()?;
2767 Some(tree.snapshot())
2768 })
2769 .collect::<Vec<_>>();
2770
2771 let background = cx.background().clone();
2772 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2773 if path_count == 0 {
2774 return Task::ready(Ok(Default::default()));
2775 }
2776 let workers = background.num_cpus().min(path_count);
2777 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2778 cx.background()
2779 .spawn({
2780 let fs = self.fs.clone();
2781 let background = cx.background().clone();
2782 let query = query.clone();
2783 async move {
2784 let fs = &fs;
2785 let query = &query;
2786 let matching_paths_tx = &matching_paths_tx;
2787 let paths_per_worker = (path_count + workers - 1) / workers;
2788 let snapshots = &snapshots;
2789 background
2790 .scoped(|scope| {
2791 for worker_ix in 0..workers {
2792 let worker_start_ix = worker_ix * paths_per_worker;
2793 let worker_end_ix = worker_start_ix + paths_per_worker;
2794 scope.spawn(async move {
2795 let mut snapshot_start_ix = 0;
2796 let mut abs_path = PathBuf::new();
2797 for snapshot in snapshots {
2798 let snapshot_end_ix =
2799 snapshot_start_ix + snapshot.visible_file_count();
2800 if worker_end_ix <= snapshot_start_ix {
2801 break;
2802 } else if worker_start_ix > snapshot_end_ix {
2803 snapshot_start_ix = snapshot_end_ix;
2804 continue;
2805 } else {
2806 let start_in_snapshot = worker_start_ix
2807 .saturating_sub(snapshot_start_ix);
2808 let end_in_snapshot =
2809 cmp::min(worker_end_ix, snapshot_end_ix)
2810 - snapshot_start_ix;
2811
2812 for entry in snapshot
2813 .files(false, start_in_snapshot)
2814 .take(end_in_snapshot - start_in_snapshot)
2815 {
2816 if matching_paths_tx.is_closed() {
2817 break;
2818 }
2819
2820 abs_path.clear();
2821 abs_path.push(&snapshot.abs_path());
2822 abs_path.push(&entry.path);
2823 let matches = if let Some(file) =
2824 fs.open_sync(&abs_path).await.log_err()
2825 {
2826 query.detect(file).unwrap_or(false)
2827 } else {
2828 false
2829 };
2830
2831 if matches {
2832 let project_path =
2833 (snapshot.id(), entry.path.clone());
2834 if matching_paths_tx
2835 .send(project_path)
2836 .await
2837 .is_err()
2838 {
2839 break;
2840 }
2841 }
2842 }
2843
2844 snapshot_start_ix = snapshot_end_ix;
2845 }
2846 }
2847 });
2848 }
2849 })
2850 .await;
2851 }
2852 })
2853 .detach();
2854
2855 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2856 let open_buffers = self
2857 .opened_buffers
2858 .values()
2859 .filter_map(|b| b.upgrade(cx))
2860 .collect::<HashSet<_>>();
2861 cx.spawn(|this, cx| async move {
2862 for buffer in &open_buffers {
2863 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2864 buffers_tx.send((buffer.clone(), snapshot)).await?;
2865 }
2866
2867 let open_buffers = Rc::new(RefCell::new(open_buffers));
2868 while let Some(project_path) = matching_paths_rx.next().await {
2869 if buffers_tx.is_closed() {
2870 break;
2871 }
2872
2873 let this = this.clone();
2874 let open_buffers = open_buffers.clone();
2875 let buffers_tx = buffers_tx.clone();
2876 cx.spawn(|mut cx| async move {
2877 if let Some(buffer) = this
2878 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2879 .await
2880 .log_err()
2881 {
2882 if open_buffers.borrow_mut().insert(buffer.clone()) {
2883 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2884 buffers_tx.send((buffer, snapshot)).await?;
2885 }
2886 }
2887
2888 Ok::<_, anyhow::Error>(())
2889 })
2890 .detach();
2891 }
2892
2893 Ok::<_, anyhow::Error>(())
2894 })
2895 .detach_and_log_err(cx);
2896
2897 let background = cx.background().clone();
2898 cx.background().spawn(async move {
2899 let query = &query;
2900 let mut matched_buffers = Vec::new();
2901 for _ in 0..workers {
2902 matched_buffers.push(HashMap::default());
2903 }
2904 background
2905 .scoped(|scope| {
2906 for worker_matched_buffers in matched_buffers.iter_mut() {
2907 let mut buffers_rx = buffers_rx.clone();
2908 scope.spawn(async move {
2909 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2910 let buffer_matches = query
2911 .search(snapshot.as_rope())
2912 .await
2913 .iter()
2914 .map(|range| {
2915 snapshot.anchor_before(range.start)
2916 ..snapshot.anchor_after(range.end)
2917 })
2918 .collect::<Vec<_>>();
2919 if !buffer_matches.is_empty() {
2920 worker_matched_buffers
2921 .insert(buffer.clone(), buffer_matches);
2922 }
2923 }
2924 });
2925 }
2926 })
2927 .await;
2928 Ok(matched_buffers.into_iter().flatten().collect())
2929 })
2930 } else if let Some(project_id) = self.remote_id() {
2931 let request = self.client.request(query.to_proto(project_id));
2932 cx.spawn(|this, mut cx| async move {
2933 let response = request.await?;
2934 let mut result = HashMap::default();
2935 for location in response.locations {
2936 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2937 let target_buffer = this
2938 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2939 .await?;
2940 let start = location
2941 .start
2942 .and_then(deserialize_anchor)
2943 .ok_or_else(|| anyhow!("missing target start"))?;
2944 let end = location
2945 .end
2946 .and_then(deserialize_anchor)
2947 .ok_or_else(|| anyhow!("missing target end"))?;
2948 result
2949 .entry(target_buffer)
2950 .or_insert(Vec::new())
2951 .push(start..end)
2952 }
2953 Ok(result)
2954 })
2955 } else {
2956 Task::ready(Ok(Default::default()))
2957 }
2958 }
2959
2960 fn request_lsp<R: LspCommand>(
2961 &self,
2962 buffer_handle: ModelHandle<Buffer>,
2963 request: R,
2964 cx: &mut ModelContext<Self>,
2965 ) -> Task<Result<R::Response>>
2966 where
2967 <R::LspRequest as lsp::request::Request>::Result: Send,
2968 {
2969 let buffer = buffer_handle.read(cx);
2970 if self.is_local() {
2971 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2972 if let Some((file, language_server)) =
2973 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2974 {
2975 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2976 return cx.spawn(|this, cx| async move {
2977 if !request.check_capabilities(&language_server.capabilities()) {
2978 return Ok(Default::default());
2979 }
2980
2981 let response = language_server
2982 .request::<R::LspRequest>(lsp_params)
2983 .await
2984 .context("lsp request failed")?;
2985 request
2986 .response_from_lsp(response, this, buffer_handle, cx)
2987 .await
2988 });
2989 }
2990 } else if let Some(project_id) = self.remote_id() {
2991 let rpc = self.client.clone();
2992 let message = request.to_proto(project_id, buffer);
2993 return cx.spawn(|this, cx| async move {
2994 let response = rpc.request(message).await?;
2995 request
2996 .response_from_proto(response, this, buffer_handle, cx)
2997 .await
2998 });
2999 }
3000 Task::ready(Ok(Default::default()))
3001 }
3002
3003 pub fn find_or_create_local_worktree(
3004 &mut self,
3005 abs_path: impl AsRef<Path>,
3006 visible: bool,
3007 cx: &mut ModelContext<Self>,
3008 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3009 let abs_path = abs_path.as_ref();
3010 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3011 Task::ready(Ok((tree.clone(), relative_path.into())))
3012 } else {
3013 let worktree = self.create_local_worktree(abs_path, visible, cx);
3014 cx.foreground()
3015 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3016 }
3017 }
3018
3019 pub fn find_local_worktree(
3020 &self,
3021 abs_path: &Path,
3022 cx: &AppContext,
3023 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3024 for tree in self.worktrees(cx) {
3025 if let Some(relative_path) = tree
3026 .read(cx)
3027 .as_local()
3028 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3029 {
3030 return Some((tree.clone(), relative_path.into()));
3031 }
3032 }
3033 None
3034 }
3035
3036 pub fn is_shared(&self) -> bool {
3037 match &self.client_state {
3038 ProjectClientState::Local { is_shared, .. } => *is_shared,
3039 ProjectClientState::Remote { .. } => false,
3040 }
3041 }
3042
3043 fn create_local_worktree(
3044 &mut self,
3045 abs_path: impl AsRef<Path>,
3046 visible: bool,
3047 cx: &mut ModelContext<Self>,
3048 ) -> Task<Result<ModelHandle<Worktree>>> {
3049 let fs = self.fs.clone();
3050 let client = self.client.clone();
3051 let next_entry_id = self.next_entry_id.clone();
3052 let path: Arc<Path> = abs_path.as_ref().into();
3053 let task = self
3054 .loading_local_worktrees
3055 .entry(path.clone())
3056 .or_insert_with(|| {
3057 cx.spawn(|project, mut cx| {
3058 async move {
3059 let worktree = Worktree::local(
3060 client.clone(),
3061 path.clone(),
3062 visible,
3063 fs,
3064 next_entry_id,
3065 &mut cx,
3066 )
3067 .await;
3068 project.update(&mut cx, |project, _| {
3069 project.loading_local_worktrees.remove(&path);
3070 });
3071 let worktree = worktree?;
3072
3073 let (remote_project_id, is_shared) =
3074 project.update(&mut cx, |project, cx| {
3075 project.add_worktree(&worktree, cx);
3076 (project.remote_id(), project.is_shared())
3077 });
3078
3079 if let Some(project_id) = remote_project_id {
3080 if is_shared {
3081 worktree
3082 .update(&mut cx, |worktree, cx| {
3083 worktree.as_local_mut().unwrap().share(project_id, cx)
3084 })
3085 .await?;
3086 } else {
3087 worktree
3088 .update(&mut cx, |worktree, cx| {
3089 worktree.as_local_mut().unwrap().register(project_id, cx)
3090 })
3091 .await?;
3092 }
3093 }
3094
3095 Ok(worktree)
3096 }
3097 .map_err(|err| Arc::new(err))
3098 })
3099 .shared()
3100 })
3101 .clone();
3102 cx.foreground().spawn(async move {
3103 match task.await {
3104 Ok(worktree) => Ok(worktree),
3105 Err(err) => Err(anyhow!("{}", err)),
3106 }
3107 })
3108 }
3109
3110 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3111 self.worktrees.retain(|worktree| {
3112 worktree
3113 .upgrade(cx)
3114 .map_or(false, |w| w.read(cx).id() != id)
3115 });
3116 cx.notify();
3117 }
3118
3119 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3120 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3121 if worktree.read(cx).is_local() {
3122 cx.subscribe(&worktree, |this, worktree, _, cx| {
3123 this.update_local_worktree_buffers(worktree, cx);
3124 })
3125 .detach();
3126 }
3127
3128 let push_strong_handle = {
3129 let worktree = worktree.read(cx);
3130 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3131 };
3132 if push_strong_handle {
3133 self.worktrees
3134 .push(WorktreeHandle::Strong(worktree.clone()));
3135 } else {
3136 cx.observe_release(&worktree, |this, _, cx| {
3137 this.worktrees
3138 .retain(|worktree| worktree.upgrade(cx).is_some());
3139 cx.notify();
3140 })
3141 .detach();
3142 self.worktrees
3143 .push(WorktreeHandle::Weak(worktree.downgrade()));
3144 }
3145 cx.notify();
3146 }
3147
3148 fn update_local_worktree_buffers(
3149 &mut self,
3150 worktree_handle: ModelHandle<Worktree>,
3151 cx: &mut ModelContext<Self>,
3152 ) {
3153 let snapshot = worktree_handle.read(cx).snapshot();
3154 let mut buffers_to_delete = Vec::new();
3155 for (buffer_id, buffer) in &self.opened_buffers {
3156 if let Some(buffer) = buffer.upgrade(cx) {
3157 buffer.update(cx, |buffer, cx| {
3158 if let Some(old_file) = File::from_dyn(buffer.file()) {
3159 if old_file.worktree != worktree_handle {
3160 return;
3161 }
3162
3163 let new_file = if let Some(entry) = old_file
3164 .entry_id
3165 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3166 {
3167 File {
3168 is_local: true,
3169 entry_id: Some(entry.id),
3170 mtime: entry.mtime,
3171 path: entry.path.clone(),
3172 worktree: worktree_handle.clone(),
3173 }
3174 } else if let Some(entry) =
3175 snapshot.entry_for_path(old_file.path().as_ref())
3176 {
3177 File {
3178 is_local: true,
3179 entry_id: Some(entry.id),
3180 mtime: entry.mtime,
3181 path: entry.path.clone(),
3182 worktree: worktree_handle.clone(),
3183 }
3184 } else {
3185 File {
3186 is_local: true,
3187 entry_id: None,
3188 path: old_file.path().clone(),
3189 mtime: old_file.mtime(),
3190 worktree: worktree_handle.clone(),
3191 }
3192 };
3193
3194 if let Some(project_id) = self.remote_id() {
3195 self.client
3196 .send(proto::UpdateBufferFile {
3197 project_id,
3198 buffer_id: *buffer_id as u64,
3199 file: Some(new_file.to_proto()),
3200 })
3201 .log_err();
3202 }
3203 buffer.file_updated(Box::new(new_file), cx).detach();
3204 }
3205 });
3206 } else {
3207 buffers_to_delete.push(*buffer_id);
3208 }
3209 }
3210
3211 for buffer_id in buffers_to_delete {
3212 self.opened_buffers.remove(&buffer_id);
3213 }
3214 }
3215
3216 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3217 let new_active_entry = entry.and_then(|project_path| {
3218 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3219 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3220 Some(entry.id)
3221 });
3222 if new_active_entry != self.active_entry {
3223 self.active_entry = new_active_entry;
3224 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3225 }
3226 }
3227
3228 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3229 self.language_servers_with_diagnostics_running > 0
3230 }
3231
3232 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3233 let mut summary = DiagnosticSummary::default();
3234 for (_, path_summary) in self.diagnostic_summaries(cx) {
3235 summary.error_count += path_summary.error_count;
3236 summary.warning_count += path_summary.warning_count;
3237 summary.info_count += path_summary.info_count;
3238 summary.hint_count += path_summary.hint_count;
3239 }
3240 summary
3241 }
3242
3243 pub fn diagnostic_summaries<'a>(
3244 &'a self,
3245 cx: &'a AppContext,
3246 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3247 self.worktrees(cx).flat_map(move |worktree| {
3248 let worktree = worktree.read(cx);
3249 let worktree_id = worktree.id();
3250 worktree
3251 .diagnostic_summaries()
3252 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3253 })
3254 }
3255
3256 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3257 self.language_servers_with_diagnostics_running += 1;
3258 if self.language_servers_with_diagnostics_running == 1 {
3259 cx.emit(Event::DiskBasedDiagnosticsStarted);
3260 }
3261 }
3262
3263 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3264 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3265 self.language_servers_with_diagnostics_running -= 1;
3266 if self.language_servers_with_diagnostics_running == 0 {
3267 cx.emit(Event::DiskBasedDiagnosticsFinished);
3268 }
3269 }
3270
3271 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3272 self.active_entry
3273 }
3274
3275 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3276 self.worktree_for_id(path.worktree_id, cx)?
3277 .read(cx)
3278 .entry_for_path(&path.path)
3279 .map(|entry| entry.id)
3280 }
3281
3282 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3283 let worktree = self.worktree_for_entry(entry_id, cx)?;
3284 let worktree = worktree.read(cx);
3285 let worktree_id = worktree.id();
3286 let path = worktree.entry_for_id(entry_id)?.path.clone();
3287 Some(ProjectPath { worktree_id, path })
3288 }
3289
3290 // RPC message handlers
3291
3292 async fn handle_unshare_project(
3293 this: ModelHandle<Self>,
3294 _: TypedEnvelope<proto::UnshareProject>,
3295 _: Arc<Client>,
3296 mut cx: AsyncAppContext,
3297 ) -> Result<()> {
3298 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3299 Ok(())
3300 }
3301
3302 async fn handle_add_collaborator(
3303 this: ModelHandle<Self>,
3304 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3305 _: Arc<Client>,
3306 mut cx: AsyncAppContext,
3307 ) -> Result<()> {
3308 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3309 let collaborator = envelope
3310 .payload
3311 .collaborator
3312 .take()
3313 .ok_or_else(|| anyhow!("empty collaborator"))?;
3314
3315 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3316 this.update(&mut cx, |this, cx| {
3317 this.collaborators
3318 .insert(collaborator.peer_id, collaborator);
3319 cx.notify();
3320 });
3321
3322 Ok(())
3323 }
3324
3325 async fn handle_remove_collaborator(
3326 this: ModelHandle<Self>,
3327 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3328 _: Arc<Client>,
3329 mut cx: AsyncAppContext,
3330 ) -> Result<()> {
3331 this.update(&mut cx, |this, cx| {
3332 let peer_id = PeerId(envelope.payload.peer_id);
3333 let replica_id = this
3334 .collaborators
3335 .remove(&peer_id)
3336 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3337 .replica_id;
3338 for (_, buffer) in &this.opened_buffers {
3339 if let Some(buffer) = buffer.upgrade(cx) {
3340 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3341 }
3342 }
3343 cx.notify();
3344 Ok(())
3345 })
3346 }
3347
3348 async fn handle_register_worktree(
3349 this: ModelHandle<Self>,
3350 envelope: TypedEnvelope<proto::RegisterWorktree>,
3351 client: Arc<Client>,
3352 mut cx: AsyncAppContext,
3353 ) -> Result<()> {
3354 this.update(&mut cx, |this, cx| {
3355 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3356 let replica_id = this.replica_id();
3357 let worktree = proto::Worktree {
3358 id: envelope.payload.worktree_id,
3359 root_name: envelope.payload.root_name,
3360 entries: Default::default(),
3361 diagnostic_summaries: Default::default(),
3362 visible: envelope.payload.visible,
3363 };
3364 let (worktree, load_task) =
3365 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3366 this.add_worktree(&worktree, cx);
3367 load_task.detach();
3368 Ok(())
3369 })
3370 }
3371
3372 async fn handle_unregister_worktree(
3373 this: ModelHandle<Self>,
3374 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3375 _: Arc<Client>,
3376 mut cx: AsyncAppContext,
3377 ) -> Result<()> {
3378 this.update(&mut cx, |this, cx| {
3379 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3380 this.remove_worktree(worktree_id, cx);
3381 Ok(())
3382 })
3383 }
3384
3385 async fn handle_update_worktree(
3386 this: ModelHandle<Self>,
3387 envelope: TypedEnvelope<proto::UpdateWorktree>,
3388 _: Arc<Client>,
3389 mut cx: AsyncAppContext,
3390 ) -> Result<()> {
3391 this.update(&mut cx, |this, cx| {
3392 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3393 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3394 worktree.update(cx, |worktree, _| {
3395 let worktree = worktree.as_remote_mut().unwrap();
3396 worktree.update_from_remote(envelope)
3397 })?;
3398 }
3399 Ok(())
3400 })
3401 }
3402
3403 async fn handle_update_diagnostic_summary(
3404 this: ModelHandle<Self>,
3405 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3406 _: Arc<Client>,
3407 mut cx: AsyncAppContext,
3408 ) -> Result<()> {
3409 this.update(&mut cx, |this, cx| {
3410 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3411 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3412 if let Some(summary) = envelope.payload.summary {
3413 let project_path = ProjectPath {
3414 worktree_id,
3415 path: Path::new(&summary.path).into(),
3416 };
3417 worktree.update(cx, |worktree, _| {
3418 worktree
3419 .as_remote_mut()
3420 .unwrap()
3421 .update_diagnostic_summary(project_path.path.clone(), &summary);
3422 });
3423 cx.emit(Event::DiagnosticsUpdated(project_path));
3424 }
3425 }
3426 Ok(())
3427 })
3428 }
3429
3430 async fn handle_start_language_server(
3431 this: ModelHandle<Self>,
3432 envelope: TypedEnvelope<proto::StartLanguageServer>,
3433 _: Arc<Client>,
3434 mut cx: AsyncAppContext,
3435 ) -> Result<()> {
3436 let server = envelope
3437 .payload
3438 .server
3439 .ok_or_else(|| anyhow!("invalid server"))?;
3440 this.update(&mut cx, |this, cx| {
3441 this.language_server_statuses.insert(
3442 server.id as usize,
3443 LanguageServerStatus {
3444 name: server.name,
3445 pending_work: Default::default(),
3446 pending_diagnostic_updates: 0,
3447 },
3448 );
3449 cx.notify();
3450 });
3451 Ok(())
3452 }
3453
3454 async fn handle_update_language_server(
3455 this: ModelHandle<Self>,
3456 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3457 _: Arc<Client>,
3458 mut cx: AsyncAppContext,
3459 ) -> Result<()> {
3460 let language_server_id = envelope.payload.language_server_id as usize;
3461 match envelope
3462 .payload
3463 .variant
3464 .ok_or_else(|| anyhow!("invalid variant"))?
3465 {
3466 proto::update_language_server::Variant::WorkStart(payload) => {
3467 this.update(&mut cx, |this, cx| {
3468 this.on_lsp_work_start(language_server_id, payload.token, cx);
3469 })
3470 }
3471 proto::update_language_server::Variant::WorkProgress(payload) => {
3472 this.update(&mut cx, |this, cx| {
3473 this.on_lsp_work_progress(
3474 language_server_id,
3475 payload.token,
3476 LanguageServerProgress {
3477 message: payload.message,
3478 percentage: payload.percentage.map(|p| p as usize),
3479 last_update_at: Instant::now(),
3480 },
3481 cx,
3482 );
3483 })
3484 }
3485 proto::update_language_server::Variant::WorkEnd(payload) => {
3486 this.update(&mut cx, |this, cx| {
3487 this.on_lsp_work_end(language_server_id, payload.token, cx);
3488 })
3489 }
3490 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3491 this.update(&mut cx, |this, cx| {
3492 this.disk_based_diagnostics_started(cx);
3493 })
3494 }
3495 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3496 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3497 }
3498 }
3499
3500 Ok(())
3501 }
3502
3503 async fn handle_update_buffer(
3504 this: ModelHandle<Self>,
3505 envelope: TypedEnvelope<proto::UpdateBuffer>,
3506 _: Arc<Client>,
3507 mut cx: AsyncAppContext,
3508 ) -> Result<()> {
3509 this.update(&mut cx, |this, cx| {
3510 let payload = envelope.payload.clone();
3511 let buffer_id = payload.buffer_id;
3512 let ops = payload
3513 .operations
3514 .into_iter()
3515 .map(|op| language::proto::deserialize_operation(op))
3516 .collect::<Result<Vec<_>, _>>()?;
3517 match this.opened_buffers.entry(buffer_id) {
3518 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3519 OpenBuffer::Strong(buffer) => {
3520 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3521 }
3522 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3523 OpenBuffer::Weak(_) => {}
3524 },
3525 hash_map::Entry::Vacant(e) => {
3526 e.insert(OpenBuffer::Loading(ops));
3527 }
3528 }
3529 Ok(())
3530 })
3531 }
3532
3533 async fn handle_update_buffer_file(
3534 this: ModelHandle<Self>,
3535 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3536 _: Arc<Client>,
3537 mut cx: AsyncAppContext,
3538 ) -> Result<()> {
3539 this.update(&mut cx, |this, cx| {
3540 let payload = envelope.payload.clone();
3541 let buffer_id = payload.buffer_id;
3542 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3543 let worktree = this
3544 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3545 .ok_or_else(|| anyhow!("no such worktree"))?;
3546 let file = File::from_proto(file, worktree.clone(), cx)?;
3547 let buffer = this
3548 .opened_buffers
3549 .get_mut(&buffer_id)
3550 .and_then(|b| b.upgrade(cx))
3551 .ok_or_else(|| anyhow!("no such buffer"))?;
3552 buffer.update(cx, |buffer, cx| {
3553 buffer.file_updated(Box::new(file), cx).detach();
3554 });
3555 Ok(())
3556 })
3557 }
3558
3559 async fn handle_save_buffer(
3560 this: ModelHandle<Self>,
3561 envelope: TypedEnvelope<proto::SaveBuffer>,
3562 _: Arc<Client>,
3563 mut cx: AsyncAppContext,
3564 ) -> Result<proto::BufferSaved> {
3565 let buffer_id = envelope.payload.buffer_id;
3566 let requested_version = deserialize_version(envelope.payload.version);
3567
3568 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3569 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3570 let buffer = this
3571 .opened_buffers
3572 .get(&buffer_id)
3573 .map(|buffer| buffer.upgrade(cx).unwrap())
3574 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3575 Ok::<_, anyhow::Error>((project_id, buffer))
3576 })?;
3577 buffer
3578 .update(&mut cx, |buffer, _| {
3579 buffer.wait_for_version(requested_version)
3580 })
3581 .await;
3582
3583 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3584 Ok(proto::BufferSaved {
3585 project_id,
3586 buffer_id,
3587 version: serialize_version(&saved_version),
3588 mtime: Some(mtime.into()),
3589 })
3590 }
3591
3592 async fn handle_format_buffers(
3593 this: ModelHandle<Self>,
3594 envelope: TypedEnvelope<proto::FormatBuffers>,
3595 _: Arc<Client>,
3596 mut cx: AsyncAppContext,
3597 ) -> Result<proto::FormatBuffersResponse> {
3598 let sender_id = envelope.original_sender_id()?;
3599 let format = this.update(&mut cx, |this, cx| {
3600 let mut buffers = HashSet::default();
3601 for buffer_id in &envelope.payload.buffer_ids {
3602 buffers.insert(
3603 this.opened_buffers
3604 .get(buffer_id)
3605 .map(|buffer| buffer.upgrade(cx).unwrap())
3606 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3607 );
3608 }
3609 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3610 })?;
3611
3612 let project_transaction = format.await?;
3613 let project_transaction = this.update(&mut cx, |this, cx| {
3614 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3615 });
3616 Ok(proto::FormatBuffersResponse {
3617 transaction: Some(project_transaction),
3618 })
3619 }
3620
3621 async fn handle_get_completions(
3622 this: ModelHandle<Self>,
3623 envelope: TypedEnvelope<proto::GetCompletions>,
3624 _: Arc<Client>,
3625 mut cx: AsyncAppContext,
3626 ) -> Result<proto::GetCompletionsResponse> {
3627 let position = envelope
3628 .payload
3629 .position
3630 .and_then(language::proto::deserialize_anchor)
3631 .ok_or_else(|| anyhow!("invalid position"))?;
3632 let version = deserialize_version(envelope.payload.version);
3633 let buffer = this.read_with(&cx, |this, cx| {
3634 this.opened_buffers
3635 .get(&envelope.payload.buffer_id)
3636 .map(|buffer| buffer.upgrade(cx).unwrap())
3637 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3638 })?;
3639 buffer
3640 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3641 .await;
3642 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3643 let completions = this
3644 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3645 .await?;
3646
3647 Ok(proto::GetCompletionsResponse {
3648 completions: completions
3649 .iter()
3650 .map(language::proto::serialize_completion)
3651 .collect(),
3652 version: serialize_version(&version),
3653 })
3654 }
3655
3656 async fn handle_apply_additional_edits_for_completion(
3657 this: ModelHandle<Self>,
3658 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3659 _: Arc<Client>,
3660 mut cx: AsyncAppContext,
3661 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3662 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3663 let buffer = this
3664 .opened_buffers
3665 .get(&envelope.payload.buffer_id)
3666 .map(|buffer| buffer.upgrade(cx).unwrap())
3667 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3668 let language = buffer.read(cx).language();
3669 let completion = language::proto::deserialize_completion(
3670 envelope
3671 .payload
3672 .completion
3673 .ok_or_else(|| anyhow!("invalid completion"))?,
3674 language,
3675 )?;
3676 Ok::<_, anyhow::Error>(
3677 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3678 )
3679 })?;
3680
3681 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3682 transaction: apply_additional_edits
3683 .await?
3684 .as_ref()
3685 .map(language::proto::serialize_transaction),
3686 })
3687 }
3688
3689 async fn handle_get_code_actions(
3690 this: ModelHandle<Self>,
3691 envelope: TypedEnvelope<proto::GetCodeActions>,
3692 _: Arc<Client>,
3693 mut cx: AsyncAppContext,
3694 ) -> Result<proto::GetCodeActionsResponse> {
3695 let start = envelope
3696 .payload
3697 .start
3698 .and_then(language::proto::deserialize_anchor)
3699 .ok_or_else(|| anyhow!("invalid start"))?;
3700 let end = envelope
3701 .payload
3702 .end
3703 .and_then(language::proto::deserialize_anchor)
3704 .ok_or_else(|| anyhow!("invalid end"))?;
3705 let buffer = this.update(&mut cx, |this, cx| {
3706 this.opened_buffers
3707 .get(&envelope.payload.buffer_id)
3708 .map(|buffer| buffer.upgrade(cx).unwrap())
3709 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3710 })?;
3711 buffer
3712 .update(&mut cx, |buffer, _| {
3713 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3714 })
3715 .await;
3716
3717 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3718 let code_actions = this.update(&mut cx, |this, cx| {
3719 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3720 })?;
3721
3722 Ok(proto::GetCodeActionsResponse {
3723 actions: code_actions
3724 .await?
3725 .iter()
3726 .map(language::proto::serialize_code_action)
3727 .collect(),
3728 version: serialize_version(&version),
3729 })
3730 }
3731
3732 async fn handle_apply_code_action(
3733 this: ModelHandle<Self>,
3734 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3735 _: Arc<Client>,
3736 mut cx: AsyncAppContext,
3737 ) -> Result<proto::ApplyCodeActionResponse> {
3738 let sender_id = envelope.original_sender_id()?;
3739 let action = language::proto::deserialize_code_action(
3740 envelope
3741 .payload
3742 .action
3743 .ok_or_else(|| anyhow!("invalid action"))?,
3744 )?;
3745 let apply_code_action = this.update(&mut cx, |this, cx| {
3746 let buffer = this
3747 .opened_buffers
3748 .get(&envelope.payload.buffer_id)
3749 .map(|buffer| buffer.upgrade(cx).unwrap())
3750 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3751 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3752 })?;
3753
3754 let project_transaction = apply_code_action.await?;
3755 let project_transaction = this.update(&mut cx, |this, cx| {
3756 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3757 });
3758 Ok(proto::ApplyCodeActionResponse {
3759 transaction: Some(project_transaction),
3760 })
3761 }
3762
3763 async fn handle_lsp_command<T: LspCommand>(
3764 this: ModelHandle<Self>,
3765 envelope: TypedEnvelope<T::ProtoRequest>,
3766 _: Arc<Client>,
3767 mut cx: AsyncAppContext,
3768 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3769 where
3770 <T::LspRequest as lsp::request::Request>::Result: Send,
3771 {
3772 let sender_id = envelope.original_sender_id()?;
3773 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3774 let buffer_handle = this.read_with(&cx, |this, _| {
3775 this.opened_buffers
3776 .get(&buffer_id)
3777 .and_then(|buffer| buffer.upgrade(&cx))
3778 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3779 })?;
3780 let request = T::from_proto(
3781 envelope.payload,
3782 this.clone(),
3783 buffer_handle.clone(),
3784 cx.clone(),
3785 )
3786 .await?;
3787 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3788 let response = this
3789 .update(&mut cx, |this, cx| {
3790 this.request_lsp(buffer_handle, request, cx)
3791 })
3792 .await?;
3793 this.update(&mut cx, |this, cx| {
3794 Ok(T::response_to_proto(
3795 response,
3796 this,
3797 sender_id,
3798 &buffer_version,
3799 cx,
3800 ))
3801 })
3802 }
3803
3804 async fn handle_get_project_symbols(
3805 this: ModelHandle<Self>,
3806 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3807 _: Arc<Client>,
3808 mut cx: AsyncAppContext,
3809 ) -> Result<proto::GetProjectSymbolsResponse> {
3810 let symbols = this
3811 .update(&mut cx, |this, cx| {
3812 this.symbols(&envelope.payload.query, cx)
3813 })
3814 .await?;
3815
3816 Ok(proto::GetProjectSymbolsResponse {
3817 symbols: symbols.iter().map(serialize_symbol).collect(),
3818 })
3819 }
3820
3821 async fn handle_search_project(
3822 this: ModelHandle<Self>,
3823 envelope: TypedEnvelope<proto::SearchProject>,
3824 _: Arc<Client>,
3825 mut cx: AsyncAppContext,
3826 ) -> Result<proto::SearchProjectResponse> {
3827 let peer_id = envelope.original_sender_id()?;
3828 let query = SearchQuery::from_proto(envelope.payload)?;
3829 let result = this
3830 .update(&mut cx, |this, cx| this.search(query, cx))
3831 .await?;
3832
3833 this.update(&mut cx, |this, cx| {
3834 let mut locations = Vec::new();
3835 for (buffer, ranges) in result {
3836 for range in ranges {
3837 let start = serialize_anchor(&range.start);
3838 let end = serialize_anchor(&range.end);
3839 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3840 locations.push(proto::Location {
3841 buffer: Some(buffer),
3842 start: Some(start),
3843 end: Some(end),
3844 });
3845 }
3846 }
3847 Ok(proto::SearchProjectResponse { locations })
3848 })
3849 }
3850
3851 async fn handle_open_buffer_for_symbol(
3852 this: ModelHandle<Self>,
3853 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3854 _: Arc<Client>,
3855 mut cx: AsyncAppContext,
3856 ) -> Result<proto::OpenBufferForSymbolResponse> {
3857 let peer_id = envelope.original_sender_id()?;
3858 let symbol = envelope
3859 .payload
3860 .symbol
3861 .ok_or_else(|| anyhow!("invalid symbol"))?;
3862 let symbol = this.read_with(&cx, |this, _| {
3863 let symbol = this.deserialize_symbol(symbol)?;
3864 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3865 if signature == symbol.signature {
3866 Ok(symbol)
3867 } else {
3868 Err(anyhow!("invalid symbol signature"))
3869 }
3870 })?;
3871 let buffer = this
3872 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3873 .await?;
3874
3875 Ok(proto::OpenBufferForSymbolResponse {
3876 buffer: Some(this.update(&mut cx, |this, cx| {
3877 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3878 })),
3879 })
3880 }
3881
3882 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3883 let mut hasher = Sha256::new();
3884 hasher.update(worktree_id.to_proto().to_be_bytes());
3885 hasher.update(path.to_string_lossy().as_bytes());
3886 hasher.update(self.nonce.to_be_bytes());
3887 hasher.finalize().as_slice().try_into().unwrap()
3888 }
3889
3890 async fn handle_open_buffer_by_path(
3891 this: ModelHandle<Self>,
3892 envelope: TypedEnvelope<proto::OpenBufferByPath>,
3893 _: Arc<Client>,
3894 mut cx: AsyncAppContext,
3895 ) -> Result<proto::OpenBufferResponse> {
3896 let peer_id = envelope.original_sender_id()?;
3897 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3898 let open_buffer = this.update(&mut cx, |this, cx| {
3899 this.open_buffer(
3900 ProjectPath {
3901 worktree_id,
3902 path: PathBuf::from(envelope.payload.path).into(),
3903 },
3904 cx,
3905 )
3906 });
3907
3908 let buffer = open_buffer.await?;
3909 this.update(&mut cx, |this, cx| {
3910 Ok(proto::OpenBufferResponse {
3911 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3912 })
3913 })
3914 }
3915
3916 fn serialize_project_transaction_for_peer(
3917 &mut self,
3918 project_transaction: ProjectTransaction,
3919 peer_id: PeerId,
3920 cx: &AppContext,
3921 ) -> proto::ProjectTransaction {
3922 let mut serialized_transaction = proto::ProjectTransaction {
3923 buffers: Default::default(),
3924 transactions: Default::default(),
3925 };
3926 for (buffer, transaction) in project_transaction.0 {
3927 serialized_transaction
3928 .buffers
3929 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3930 serialized_transaction
3931 .transactions
3932 .push(language::proto::serialize_transaction(&transaction));
3933 }
3934 serialized_transaction
3935 }
3936
3937 fn deserialize_project_transaction(
3938 &mut self,
3939 message: proto::ProjectTransaction,
3940 push_to_history: bool,
3941 cx: &mut ModelContext<Self>,
3942 ) -> Task<Result<ProjectTransaction>> {
3943 cx.spawn(|this, mut cx| async move {
3944 let mut project_transaction = ProjectTransaction::default();
3945 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3946 let buffer = this
3947 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3948 .await?;
3949 let transaction = language::proto::deserialize_transaction(transaction)?;
3950 project_transaction.0.insert(buffer, transaction);
3951 }
3952
3953 for (buffer, transaction) in &project_transaction.0 {
3954 buffer
3955 .update(&mut cx, |buffer, _| {
3956 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3957 })
3958 .await;
3959
3960 if push_to_history {
3961 buffer.update(&mut cx, |buffer, _| {
3962 buffer.push_transaction(transaction.clone(), Instant::now());
3963 });
3964 }
3965 }
3966
3967 Ok(project_transaction)
3968 })
3969 }
3970
3971 fn serialize_buffer_for_peer(
3972 &mut self,
3973 buffer: &ModelHandle<Buffer>,
3974 peer_id: PeerId,
3975 cx: &AppContext,
3976 ) -> proto::Buffer {
3977 let buffer_id = buffer.read(cx).remote_id();
3978 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3979 if shared_buffers.insert(buffer_id) {
3980 proto::Buffer {
3981 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3982 }
3983 } else {
3984 proto::Buffer {
3985 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3986 }
3987 }
3988 }
3989
3990 fn deserialize_buffer(
3991 &mut self,
3992 buffer: proto::Buffer,
3993 cx: &mut ModelContext<Self>,
3994 ) -> Task<Result<ModelHandle<Buffer>>> {
3995 let replica_id = self.replica_id();
3996
3997 let opened_buffer_tx = self.opened_buffer.0.clone();
3998 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3999 cx.spawn(|this, mut cx| async move {
4000 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4001 proto::buffer::Variant::Id(id) => {
4002 let buffer = loop {
4003 let buffer = this.read_with(&cx, |this, cx| {
4004 this.opened_buffers
4005 .get(&id)
4006 .and_then(|buffer| buffer.upgrade(cx))
4007 });
4008 if let Some(buffer) = buffer {
4009 break buffer;
4010 }
4011 opened_buffer_rx
4012 .next()
4013 .await
4014 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4015 };
4016 Ok(buffer)
4017 }
4018 proto::buffer::Variant::State(mut buffer) => {
4019 let mut buffer_worktree = None;
4020 let mut buffer_file = None;
4021 if let Some(file) = buffer.file.take() {
4022 this.read_with(&cx, |this, cx| {
4023 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4024 let worktree =
4025 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4026 anyhow!("no worktree found for id {}", file.worktree_id)
4027 })?;
4028 buffer_file =
4029 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4030 as Box<dyn language::File>);
4031 buffer_worktree = Some(worktree);
4032 Ok::<_, anyhow::Error>(())
4033 })?;
4034 }
4035
4036 let buffer = cx.add_model(|cx| {
4037 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4038 });
4039
4040 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4041
4042 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4043 Ok(buffer)
4044 }
4045 }
4046 })
4047 }
4048
4049 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4050 let language = self
4051 .languages
4052 .get_language(&serialized_symbol.language_name);
4053 let start = serialized_symbol
4054 .start
4055 .ok_or_else(|| anyhow!("invalid start"))?;
4056 let end = serialized_symbol
4057 .end
4058 .ok_or_else(|| anyhow!("invalid end"))?;
4059 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4060 Ok(Symbol {
4061 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4062 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4063 language_name: serialized_symbol.language_name.clone(),
4064 label: language
4065 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4066 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4067 name: serialized_symbol.name,
4068 path: PathBuf::from(serialized_symbol.path),
4069 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4070 kind,
4071 signature: serialized_symbol
4072 .signature
4073 .try_into()
4074 .map_err(|_| anyhow!("invalid signature"))?,
4075 })
4076 }
4077
4078 async fn handle_buffer_saved(
4079 this: ModelHandle<Self>,
4080 envelope: TypedEnvelope<proto::BufferSaved>,
4081 _: Arc<Client>,
4082 mut cx: AsyncAppContext,
4083 ) -> Result<()> {
4084 let version = deserialize_version(envelope.payload.version);
4085 let mtime = envelope
4086 .payload
4087 .mtime
4088 .ok_or_else(|| anyhow!("missing mtime"))?
4089 .into();
4090
4091 this.update(&mut cx, |this, cx| {
4092 let buffer = this
4093 .opened_buffers
4094 .get(&envelope.payload.buffer_id)
4095 .and_then(|buffer| buffer.upgrade(cx));
4096 if let Some(buffer) = buffer {
4097 buffer.update(cx, |buffer, cx| {
4098 buffer.did_save(version, mtime, None, cx);
4099 });
4100 }
4101 Ok(())
4102 })
4103 }
4104
4105 async fn handle_buffer_reloaded(
4106 this: ModelHandle<Self>,
4107 envelope: TypedEnvelope<proto::BufferReloaded>,
4108 _: Arc<Client>,
4109 mut cx: AsyncAppContext,
4110 ) -> Result<()> {
4111 let payload = envelope.payload.clone();
4112 let version = deserialize_version(payload.version);
4113 let mtime = payload
4114 .mtime
4115 .ok_or_else(|| anyhow!("missing mtime"))?
4116 .into();
4117 this.update(&mut cx, |this, cx| {
4118 let buffer = this
4119 .opened_buffers
4120 .get(&payload.buffer_id)
4121 .and_then(|buffer| buffer.upgrade(cx));
4122 if let Some(buffer) = buffer {
4123 buffer.update(cx, |buffer, cx| {
4124 buffer.did_reload(version, mtime, cx);
4125 });
4126 }
4127 Ok(())
4128 })
4129 }
4130
4131 pub fn match_paths<'a>(
4132 &self,
4133 query: &'a str,
4134 include_ignored: bool,
4135 smart_case: bool,
4136 max_results: usize,
4137 cancel_flag: &'a AtomicBool,
4138 cx: &AppContext,
4139 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4140 let worktrees = self
4141 .worktrees(cx)
4142 .filter(|worktree| worktree.read(cx).is_visible())
4143 .collect::<Vec<_>>();
4144 let include_root_name = worktrees.len() > 1;
4145 let candidate_sets = worktrees
4146 .into_iter()
4147 .map(|worktree| CandidateSet {
4148 snapshot: worktree.read(cx).snapshot(),
4149 include_ignored,
4150 include_root_name,
4151 })
4152 .collect::<Vec<_>>();
4153
4154 let background = cx.background().clone();
4155 async move {
4156 fuzzy::match_paths(
4157 candidate_sets.as_slice(),
4158 query,
4159 smart_case,
4160 max_results,
4161 cancel_flag,
4162 background,
4163 )
4164 .await
4165 }
4166 }
4167
4168 fn edits_from_lsp(
4169 &mut self,
4170 buffer: &ModelHandle<Buffer>,
4171 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4172 version: Option<i32>,
4173 cx: &mut ModelContext<Self>,
4174 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4175 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4176 cx.background().spawn(async move {
4177 let snapshot = snapshot?;
4178 let mut lsp_edits = lsp_edits
4179 .into_iter()
4180 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4181 .peekable();
4182
4183 let mut edits = Vec::new();
4184 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4185 // Combine any LSP edits that are adjacent.
4186 //
4187 // Also, combine LSP edits that are separated from each other by only
4188 // a newline. This is important because for some code actions,
4189 // Rust-analyzer rewrites the entire buffer via a series of edits that
4190 // are separated by unchanged newline characters.
4191 //
4192 // In order for the diffing logic below to work properly, any edits that
4193 // cancel each other out must be combined into one.
4194 while let Some((next_range, next_text)) = lsp_edits.peek() {
4195 if next_range.start > range.end {
4196 if next_range.start.row > range.end.row + 1
4197 || next_range.start.column > 0
4198 || snapshot.clip_point_utf16(
4199 PointUtf16::new(range.end.row, u32::MAX),
4200 Bias::Left,
4201 ) > range.end
4202 {
4203 break;
4204 }
4205 new_text.push('\n');
4206 }
4207 range.end = next_range.end;
4208 new_text.push_str(&next_text);
4209 lsp_edits.next();
4210 }
4211
4212 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4213 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4214 {
4215 return Err(anyhow!("invalid edits received from language server"));
4216 }
4217
4218 // For multiline edits, perform a diff of the old and new text so that
4219 // we can identify the changes more precisely, preserving the locations
4220 // of any anchors positioned in the unchanged regions.
4221 if range.end.row > range.start.row {
4222 let mut offset = range.start.to_offset(&snapshot);
4223 let old_text = snapshot.text_for_range(range).collect::<String>();
4224
4225 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4226 let mut moved_since_edit = true;
4227 for change in diff.iter_all_changes() {
4228 let tag = change.tag();
4229 let value = change.value();
4230 match tag {
4231 ChangeTag::Equal => {
4232 offset += value.len();
4233 moved_since_edit = true;
4234 }
4235 ChangeTag::Delete => {
4236 let start = snapshot.anchor_after(offset);
4237 let end = snapshot.anchor_before(offset + value.len());
4238 if moved_since_edit {
4239 edits.push((start..end, String::new()));
4240 } else {
4241 edits.last_mut().unwrap().0.end = end;
4242 }
4243 offset += value.len();
4244 moved_since_edit = false;
4245 }
4246 ChangeTag::Insert => {
4247 if moved_since_edit {
4248 let anchor = snapshot.anchor_after(offset);
4249 edits.push((anchor.clone()..anchor, value.to_string()));
4250 } else {
4251 edits.last_mut().unwrap().1.push_str(value);
4252 }
4253 moved_since_edit = false;
4254 }
4255 }
4256 }
4257 } else if range.end == range.start {
4258 let anchor = snapshot.anchor_after(range.start);
4259 edits.push((anchor.clone()..anchor, new_text));
4260 } else {
4261 let edit_start = snapshot.anchor_after(range.start);
4262 let edit_end = snapshot.anchor_before(range.end);
4263 edits.push((edit_start..edit_end, new_text));
4264 }
4265 }
4266
4267 Ok(edits)
4268 })
4269 }
4270
4271 fn buffer_snapshot_for_lsp_version(
4272 &mut self,
4273 buffer: &ModelHandle<Buffer>,
4274 version: Option<i32>,
4275 cx: &AppContext,
4276 ) -> Result<TextBufferSnapshot> {
4277 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4278
4279 if let Some(version) = version {
4280 let buffer_id = buffer.read(cx).remote_id();
4281 let snapshots = self
4282 .buffer_snapshots
4283 .get_mut(&buffer_id)
4284 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4285 let mut found_snapshot = None;
4286 snapshots.retain(|(snapshot_version, snapshot)| {
4287 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4288 false
4289 } else {
4290 if *snapshot_version == version {
4291 found_snapshot = Some(snapshot.clone());
4292 }
4293 true
4294 }
4295 });
4296
4297 found_snapshot.ok_or_else(|| {
4298 anyhow!(
4299 "snapshot not found for buffer {} at version {}",
4300 buffer_id,
4301 version
4302 )
4303 })
4304 } else {
4305 Ok((buffer.read(cx)).text_snapshot())
4306 }
4307 }
4308
4309 fn language_server_for_buffer(
4310 &self,
4311 buffer: &Buffer,
4312 cx: &AppContext,
4313 ) -> Option<&Arc<LanguageServer>> {
4314 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4315 let worktree_id = file.worktree_id(cx);
4316 self.language_servers.get(&(worktree_id, language.name()))
4317 } else {
4318 None
4319 }
4320 }
4321}
4322
4323impl WorktreeHandle {
4324 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4325 match self {
4326 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4327 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4328 }
4329 }
4330}
4331
4332impl OpenBuffer {
4333 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4334 match self {
4335 OpenBuffer::Strong(handle) => Some(handle.clone()),
4336 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4337 OpenBuffer::Loading(_) => None,
4338 }
4339 }
4340}
4341
4342struct CandidateSet {
4343 snapshot: Snapshot,
4344 include_ignored: bool,
4345 include_root_name: bool,
4346}
4347
4348impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4349 type Candidates = CandidateSetIter<'a>;
4350
4351 fn id(&self) -> usize {
4352 self.snapshot.id().to_usize()
4353 }
4354
4355 fn len(&self) -> usize {
4356 if self.include_ignored {
4357 self.snapshot.file_count()
4358 } else {
4359 self.snapshot.visible_file_count()
4360 }
4361 }
4362
4363 fn prefix(&self) -> Arc<str> {
4364 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4365 self.snapshot.root_name().into()
4366 } else if self.include_root_name {
4367 format!("{}/", self.snapshot.root_name()).into()
4368 } else {
4369 "".into()
4370 }
4371 }
4372
4373 fn candidates(&'a self, start: usize) -> Self::Candidates {
4374 CandidateSetIter {
4375 traversal: self.snapshot.files(self.include_ignored, start),
4376 }
4377 }
4378}
4379
4380struct CandidateSetIter<'a> {
4381 traversal: Traversal<'a>,
4382}
4383
4384impl<'a> Iterator for CandidateSetIter<'a> {
4385 type Item = PathMatchCandidate<'a>;
4386
4387 fn next(&mut self) -> Option<Self::Item> {
4388 self.traversal.next().map(|entry| {
4389 if let EntryKind::File(char_bag) = entry.kind {
4390 PathMatchCandidate {
4391 path: &entry.path,
4392 char_bag,
4393 }
4394 } else {
4395 unreachable!()
4396 }
4397 })
4398 }
4399}
4400
4401impl Entity for Project {
4402 type Event = Event;
4403
4404 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4405 match &self.client_state {
4406 ProjectClientState::Local { remote_id_rx, .. } => {
4407 if let Some(project_id) = *remote_id_rx.borrow() {
4408 self.client
4409 .send(proto::UnregisterProject { project_id })
4410 .log_err();
4411 }
4412 }
4413 ProjectClientState::Remote { remote_id, .. } => {
4414 self.client
4415 .send(proto::LeaveProject {
4416 project_id: *remote_id,
4417 })
4418 .log_err();
4419 }
4420 }
4421 }
4422
4423 fn app_will_quit(
4424 &mut self,
4425 _: &mut MutableAppContext,
4426 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4427 let shutdown_futures = self
4428 .language_servers
4429 .drain()
4430 .filter_map(|(_, server)| server.shutdown())
4431 .collect::<Vec<_>>();
4432 Some(
4433 async move {
4434 futures::future::join_all(shutdown_futures).await;
4435 }
4436 .boxed(),
4437 )
4438 }
4439}
4440
4441impl Collaborator {
4442 fn from_proto(
4443 message: proto::Collaborator,
4444 user_store: &ModelHandle<UserStore>,
4445 cx: &mut AsyncAppContext,
4446 ) -> impl Future<Output = Result<Self>> {
4447 let user = user_store.update(cx, |user_store, cx| {
4448 user_store.fetch_user(message.user_id, cx)
4449 });
4450
4451 async move {
4452 Ok(Self {
4453 peer_id: PeerId(message.peer_id),
4454 user: user.await?,
4455 replica_id: message.replica_id as ReplicaId,
4456 })
4457 }
4458 }
4459}
4460
4461impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4462 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4463 Self {
4464 worktree_id,
4465 path: path.as_ref().into(),
4466 }
4467 }
4468}
4469
4470impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4471 fn from(options: lsp::CreateFileOptions) -> Self {
4472 Self {
4473 overwrite: options.overwrite.unwrap_or(false),
4474 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4475 }
4476 }
4477}
4478
4479impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4480 fn from(options: lsp::RenameFileOptions) -> Self {
4481 Self {
4482 overwrite: options.overwrite.unwrap_or(false),
4483 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4484 }
4485 }
4486}
4487
4488impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4489 fn from(options: lsp::DeleteFileOptions) -> Self {
4490 Self {
4491 recursive: options.recursive.unwrap_or(false),
4492 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4493 }
4494 }
4495}
4496
4497fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4498 proto::Symbol {
4499 source_worktree_id: symbol.source_worktree_id.to_proto(),
4500 worktree_id: symbol.worktree_id.to_proto(),
4501 language_name: symbol.language_name.clone(),
4502 name: symbol.name.clone(),
4503 kind: unsafe { mem::transmute(symbol.kind) },
4504 path: symbol.path.to_string_lossy().to_string(),
4505 start: Some(proto::Point {
4506 row: symbol.range.start.row,
4507 column: symbol.range.start.column,
4508 }),
4509 end: Some(proto::Point {
4510 row: symbol.range.end.row,
4511 column: symbol.range.end.column,
4512 }),
4513 signature: symbol.signature.to_vec(),
4514 }
4515}
4516
4517fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4518 let mut path_components = path.components();
4519 let mut base_components = base.components();
4520 let mut components: Vec<Component> = Vec::new();
4521 loop {
4522 match (path_components.next(), base_components.next()) {
4523 (None, None) => break,
4524 (Some(a), None) => {
4525 components.push(a);
4526 components.extend(path_components.by_ref());
4527 break;
4528 }
4529 (None, _) => components.push(Component::ParentDir),
4530 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4531 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4532 (Some(a), Some(_)) => {
4533 components.push(Component::ParentDir);
4534 for _ in base_components {
4535 components.push(Component::ParentDir);
4536 }
4537 components.push(a);
4538 components.extend(path_components.by_ref());
4539 break;
4540 }
4541 }
4542 }
4543 components.iter().map(|c| c.as_os_str()).collect()
4544}
4545
4546impl Item for Buffer {
4547 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4548 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4549 }
4550}
4551
4552#[cfg(test)]
4553mod tests {
4554 use super::{Event, *};
4555 use fs::RealFs;
4556 use futures::StreamExt;
4557 use gpui::test::subscribe;
4558 use language::{
4559 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4560 ToPoint,
4561 };
4562 use lsp::Url;
4563 use serde_json::json;
4564 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4565 use unindent::Unindent as _;
4566 use util::test::temp_tree;
4567 use worktree::WorktreeHandle as _;
4568
4569 #[gpui::test]
4570 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4571 let dir = temp_tree(json!({
4572 "root": {
4573 "apple": "",
4574 "banana": {
4575 "carrot": {
4576 "date": "",
4577 "endive": "",
4578 }
4579 },
4580 "fennel": {
4581 "grape": "",
4582 }
4583 }
4584 }));
4585
4586 let root_link_path = dir.path().join("root_link");
4587 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4588 unix::fs::symlink(
4589 &dir.path().join("root/fennel"),
4590 &dir.path().join("root/finnochio"),
4591 )
4592 .unwrap();
4593
4594 let project = Project::test(Arc::new(RealFs), cx);
4595
4596 let (tree, _) = project
4597 .update(cx, |project, cx| {
4598 project.find_or_create_local_worktree(&root_link_path, true, cx)
4599 })
4600 .await
4601 .unwrap();
4602
4603 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4604 .await;
4605 cx.read(|cx| {
4606 let tree = tree.read(cx);
4607 assert_eq!(tree.file_count(), 5);
4608 assert_eq!(
4609 tree.inode_for_path("fennel/grape"),
4610 tree.inode_for_path("finnochio/grape")
4611 );
4612 });
4613
4614 let cancel_flag = Default::default();
4615 let results = project
4616 .read_with(cx, |project, cx| {
4617 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4618 })
4619 .await;
4620 assert_eq!(
4621 results
4622 .into_iter()
4623 .map(|result| result.path)
4624 .collect::<Vec<Arc<Path>>>(),
4625 vec![
4626 PathBuf::from("banana/carrot/date").into(),
4627 PathBuf::from("banana/carrot/endive").into(),
4628 ]
4629 );
4630 }
4631
4632 #[gpui::test]
4633 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4634 cx.foreground().forbid_parking();
4635
4636 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4637 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4638 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4639 completion_provider: Some(lsp::CompletionOptions {
4640 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4641 ..Default::default()
4642 }),
4643 ..Default::default()
4644 });
4645 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4646 completion_provider: Some(lsp::CompletionOptions {
4647 trigger_characters: Some(vec![":".to_string()]),
4648 ..Default::default()
4649 }),
4650 ..Default::default()
4651 });
4652
4653 let rust_language = Arc::new(Language::new(
4654 LanguageConfig {
4655 name: "Rust".into(),
4656 path_suffixes: vec!["rs".to_string()],
4657 language_server: Some(rust_lsp_config),
4658 ..Default::default()
4659 },
4660 Some(tree_sitter_rust::language()),
4661 ));
4662 let json_language = Arc::new(Language::new(
4663 LanguageConfig {
4664 name: "JSON".into(),
4665 path_suffixes: vec!["json".to_string()],
4666 language_server: Some(json_lsp_config),
4667 ..Default::default()
4668 },
4669 None,
4670 ));
4671
4672 let fs = FakeFs::new(cx.background());
4673 fs.insert_tree(
4674 "/the-root",
4675 json!({
4676 "test.rs": "const A: i32 = 1;",
4677 "test2.rs": "",
4678 "Cargo.toml": "a = 1",
4679 "package.json": "{\"a\": 1}",
4680 }),
4681 )
4682 .await;
4683
4684 let project = Project::test(fs, cx);
4685 project.update(cx, |project, _| {
4686 project.languages.add(rust_language);
4687 project.languages.add(json_language);
4688 });
4689
4690 let worktree_id = project
4691 .update(cx, |project, cx| {
4692 project.find_or_create_local_worktree("/the-root", true, cx)
4693 })
4694 .await
4695 .unwrap()
4696 .0
4697 .read_with(cx, |tree, _| tree.id());
4698
4699 // Open a buffer without an associated language server.
4700 let toml_buffer = project
4701 .update(cx, |project, cx| {
4702 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4703 })
4704 .await
4705 .unwrap();
4706
4707 // Open a buffer with an associated language server.
4708 let rust_buffer = project
4709 .update(cx, |project, cx| {
4710 project.open_buffer((worktree_id, "test.rs"), cx)
4711 })
4712 .await
4713 .unwrap();
4714
4715 // A server is started up, and it is notified about Rust files.
4716 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4717 assert_eq!(
4718 fake_rust_server
4719 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4720 .await
4721 .text_document,
4722 lsp::TextDocumentItem {
4723 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4724 version: 0,
4725 text: "const A: i32 = 1;".to_string(),
4726 language_id: Default::default()
4727 }
4728 );
4729
4730 // The buffer is configured based on the language server's capabilities.
4731 rust_buffer.read_with(cx, |buffer, _| {
4732 assert_eq!(
4733 buffer.completion_triggers(),
4734 &[".".to_string(), "::".to_string()]
4735 );
4736 });
4737 toml_buffer.read_with(cx, |buffer, _| {
4738 assert!(buffer.completion_triggers().is_empty());
4739 });
4740
4741 // Edit a buffer. The changes are reported to the language server.
4742 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4743 assert_eq!(
4744 fake_rust_server
4745 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4746 .await
4747 .text_document,
4748 lsp::VersionedTextDocumentIdentifier::new(
4749 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4750 1
4751 )
4752 );
4753
4754 // Open a third buffer with a different associated language server.
4755 let json_buffer = project
4756 .update(cx, |project, cx| {
4757 project.open_buffer((worktree_id, "package.json"), cx)
4758 })
4759 .await
4760 .unwrap();
4761
4762 // Another language server is started up, and it is notified about
4763 // all three open buffers.
4764 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4765 assert_eq!(
4766 fake_json_server
4767 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4768 .await
4769 .text_document,
4770 lsp::TextDocumentItem {
4771 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4772 version: 0,
4773 text: "{\"a\": 1}".to_string(),
4774 language_id: Default::default()
4775 }
4776 );
4777
4778 // This buffer is configured based on the second language server's
4779 // capabilities.
4780 json_buffer.read_with(cx, |buffer, _| {
4781 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4782 });
4783
4784 // When opening another buffer whose language server is already running,
4785 // it is also configured based on the existing language server's capabilities.
4786 let rust_buffer2 = project
4787 .update(cx, |project, cx| {
4788 project.open_buffer((worktree_id, "test2.rs"), cx)
4789 })
4790 .await
4791 .unwrap();
4792 rust_buffer2.read_with(cx, |buffer, _| {
4793 assert_eq!(
4794 buffer.completion_triggers(),
4795 &[".".to_string(), "::".to_string()]
4796 );
4797 });
4798
4799 // Changes are reported only to servers matching the buffer's language.
4800 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4801 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4802 assert_eq!(
4803 fake_rust_server
4804 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4805 .await
4806 .text_document,
4807 lsp::VersionedTextDocumentIdentifier::new(
4808 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4809 1
4810 )
4811 );
4812
4813 // Save notifications are reported to all servers.
4814 toml_buffer
4815 .update(cx, |buffer, cx| buffer.save(cx))
4816 .await
4817 .unwrap();
4818 assert_eq!(
4819 fake_rust_server
4820 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4821 .await
4822 .text_document,
4823 lsp::TextDocumentIdentifier::new(
4824 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4825 )
4826 );
4827 assert_eq!(
4828 fake_json_server
4829 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4830 .await
4831 .text_document,
4832 lsp::TextDocumentIdentifier::new(
4833 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4834 )
4835 );
4836
4837 // Close notifications are reported only to servers matching the buffer's language.
4838 cx.update(|_| drop(json_buffer));
4839 let close_message = lsp::DidCloseTextDocumentParams {
4840 text_document: lsp::TextDocumentIdentifier::new(
4841 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4842 ),
4843 };
4844 assert_eq!(
4845 fake_json_server
4846 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4847 .await,
4848 close_message,
4849 );
4850 }
4851
4852 #[gpui::test]
4853 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4854 cx.foreground().forbid_parking();
4855
4856 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4857 let progress_token = language_server_config
4858 .disk_based_diagnostics_progress_token
4859 .clone()
4860 .unwrap();
4861
4862 let language = Arc::new(Language::new(
4863 LanguageConfig {
4864 name: "Rust".into(),
4865 path_suffixes: vec!["rs".to_string()],
4866 language_server: Some(language_server_config),
4867 ..Default::default()
4868 },
4869 Some(tree_sitter_rust::language()),
4870 ));
4871
4872 let fs = FakeFs::new(cx.background());
4873 fs.insert_tree(
4874 "/dir",
4875 json!({
4876 "a.rs": "fn a() { A }",
4877 "b.rs": "const y: i32 = 1",
4878 }),
4879 )
4880 .await;
4881
4882 let project = Project::test(fs, cx);
4883 project.update(cx, |project, _| project.languages.add(language));
4884
4885 let (tree, _) = project
4886 .update(cx, |project, cx| {
4887 project.find_or_create_local_worktree("/dir", true, cx)
4888 })
4889 .await
4890 .unwrap();
4891 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4892
4893 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4894 .await;
4895
4896 // Cause worktree to start the fake language server
4897 let _buffer = project
4898 .update(cx, |project, cx| {
4899 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4900 })
4901 .await
4902 .unwrap();
4903
4904 let mut events = subscribe(&project, cx);
4905
4906 let mut fake_server = fake_servers.next().await.unwrap();
4907 fake_server.start_progress(&progress_token).await;
4908 assert_eq!(
4909 events.next().await.unwrap(),
4910 Event::DiskBasedDiagnosticsStarted
4911 );
4912
4913 fake_server.start_progress(&progress_token).await;
4914 fake_server.end_progress(&progress_token).await;
4915 fake_server.start_progress(&progress_token).await;
4916
4917 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4918 lsp::PublishDiagnosticsParams {
4919 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4920 version: None,
4921 diagnostics: vec![lsp::Diagnostic {
4922 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4923 severity: Some(lsp::DiagnosticSeverity::ERROR),
4924 message: "undefined variable 'A'".to_string(),
4925 ..Default::default()
4926 }],
4927 },
4928 );
4929 assert_eq!(
4930 events.next().await.unwrap(),
4931 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4932 );
4933
4934 fake_server.end_progress(&progress_token).await;
4935 fake_server.end_progress(&progress_token).await;
4936 assert_eq!(
4937 events.next().await.unwrap(),
4938 Event::DiskBasedDiagnosticsUpdated
4939 );
4940 assert_eq!(
4941 events.next().await.unwrap(),
4942 Event::DiskBasedDiagnosticsFinished
4943 );
4944
4945 let buffer = project
4946 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4947 .await
4948 .unwrap();
4949
4950 buffer.read_with(cx, |buffer, _| {
4951 let snapshot = buffer.snapshot();
4952 let diagnostics = snapshot
4953 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4954 .collect::<Vec<_>>();
4955 assert_eq!(
4956 diagnostics,
4957 &[DiagnosticEntry {
4958 range: Point::new(0, 9)..Point::new(0, 10),
4959 diagnostic: Diagnostic {
4960 severity: lsp::DiagnosticSeverity::ERROR,
4961 message: "undefined variable 'A'".to_string(),
4962 group_id: 0,
4963 is_primary: true,
4964 ..Default::default()
4965 }
4966 }]
4967 )
4968 });
4969 }
4970
4971 #[gpui::test]
4972 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4973 cx.foreground().forbid_parking();
4974
4975 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4976 lsp_config
4977 .disk_based_diagnostic_sources
4978 .insert("disk".to_string());
4979 let language = Arc::new(Language::new(
4980 LanguageConfig {
4981 name: "Rust".into(),
4982 path_suffixes: vec!["rs".to_string()],
4983 language_server: Some(lsp_config),
4984 ..Default::default()
4985 },
4986 Some(tree_sitter_rust::language()),
4987 ));
4988
4989 let text = "
4990 fn a() { A }
4991 fn b() { BB }
4992 fn c() { CCC }
4993 "
4994 .unindent();
4995
4996 let fs = FakeFs::new(cx.background());
4997 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4998
4999 let project = Project::test(fs, cx);
5000 project.update(cx, |project, _| project.languages.add(language));
5001
5002 let worktree_id = project
5003 .update(cx, |project, cx| {
5004 project.find_or_create_local_worktree("/dir", true, cx)
5005 })
5006 .await
5007 .unwrap()
5008 .0
5009 .read_with(cx, |tree, _| tree.id());
5010
5011 let buffer = project
5012 .update(cx, |project, cx| {
5013 project.open_buffer((worktree_id, "a.rs"), cx)
5014 })
5015 .await
5016 .unwrap();
5017
5018 let mut fake_server = fake_servers.next().await.unwrap();
5019 let open_notification = fake_server
5020 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5021 .await;
5022
5023 // Edit the buffer, moving the content down
5024 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5025 let change_notification_1 = fake_server
5026 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5027 .await;
5028 assert!(
5029 change_notification_1.text_document.version > open_notification.text_document.version
5030 );
5031
5032 // Report some diagnostics for the initial version of the buffer
5033 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5034 lsp::PublishDiagnosticsParams {
5035 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5036 version: Some(open_notification.text_document.version),
5037 diagnostics: vec![
5038 lsp::Diagnostic {
5039 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5040 severity: Some(DiagnosticSeverity::ERROR),
5041 message: "undefined variable 'A'".to_string(),
5042 source: Some("disk".to_string()),
5043 ..Default::default()
5044 },
5045 lsp::Diagnostic {
5046 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5047 severity: Some(DiagnosticSeverity::ERROR),
5048 message: "undefined variable 'BB'".to_string(),
5049 source: Some("disk".to_string()),
5050 ..Default::default()
5051 },
5052 lsp::Diagnostic {
5053 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5054 severity: Some(DiagnosticSeverity::ERROR),
5055 source: Some("disk".to_string()),
5056 message: "undefined variable 'CCC'".to_string(),
5057 ..Default::default()
5058 },
5059 ],
5060 },
5061 );
5062
5063 // The diagnostics have moved down since they were created.
5064 buffer.next_notification(cx).await;
5065 buffer.read_with(cx, |buffer, _| {
5066 assert_eq!(
5067 buffer
5068 .snapshot()
5069 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5070 .collect::<Vec<_>>(),
5071 &[
5072 DiagnosticEntry {
5073 range: Point::new(3, 9)..Point::new(3, 11),
5074 diagnostic: Diagnostic {
5075 severity: DiagnosticSeverity::ERROR,
5076 message: "undefined variable 'BB'".to_string(),
5077 is_disk_based: true,
5078 group_id: 1,
5079 is_primary: true,
5080 ..Default::default()
5081 },
5082 },
5083 DiagnosticEntry {
5084 range: Point::new(4, 9)..Point::new(4, 12),
5085 diagnostic: Diagnostic {
5086 severity: DiagnosticSeverity::ERROR,
5087 message: "undefined variable 'CCC'".to_string(),
5088 is_disk_based: true,
5089 group_id: 2,
5090 is_primary: true,
5091 ..Default::default()
5092 }
5093 }
5094 ]
5095 );
5096 assert_eq!(
5097 chunks_with_diagnostics(buffer, 0..buffer.len()),
5098 [
5099 ("\n\nfn a() { ".to_string(), None),
5100 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5101 (" }\nfn b() { ".to_string(), None),
5102 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5103 (" }\nfn c() { ".to_string(), None),
5104 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5105 (" }\n".to_string(), None),
5106 ]
5107 );
5108 assert_eq!(
5109 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5110 [
5111 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5112 (" }\nfn c() { ".to_string(), None),
5113 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5114 ]
5115 );
5116 });
5117
5118 // Ensure overlapping diagnostics are highlighted correctly.
5119 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5120 lsp::PublishDiagnosticsParams {
5121 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5122 version: Some(open_notification.text_document.version),
5123 diagnostics: vec![
5124 lsp::Diagnostic {
5125 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5126 severity: Some(DiagnosticSeverity::ERROR),
5127 message: "undefined variable 'A'".to_string(),
5128 source: Some("disk".to_string()),
5129 ..Default::default()
5130 },
5131 lsp::Diagnostic {
5132 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5133 severity: Some(DiagnosticSeverity::WARNING),
5134 message: "unreachable statement".to_string(),
5135 source: Some("disk".to_string()),
5136 ..Default::default()
5137 },
5138 ],
5139 },
5140 );
5141
5142 buffer.next_notification(cx).await;
5143 buffer.read_with(cx, |buffer, _| {
5144 assert_eq!(
5145 buffer
5146 .snapshot()
5147 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5148 .collect::<Vec<_>>(),
5149 &[
5150 DiagnosticEntry {
5151 range: Point::new(2, 9)..Point::new(2, 12),
5152 diagnostic: Diagnostic {
5153 severity: DiagnosticSeverity::WARNING,
5154 message: "unreachable statement".to_string(),
5155 is_disk_based: true,
5156 group_id: 1,
5157 is_primary: true,
5158 ..Default::default()
5159 }
5160 },
5161 DiagnosticEntry {
5162 range: Point::new(2, 9)..Point::new(2, 10),
5163 diagnostic: Diagnostic {
5164 severity: DiagnosticSeverity::ERROR,
5165 message: "undefined variable 'A'".to_string(),
5166 is_disk_based: true,
5167 group_id: 0,
5168 is_primary: true,
5169 ..Default::default()
5170 },
5171 }
5172 ]
5173 );
5174 assert_eq!(
5175 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5176 [
5177 ("fn a() { ".to_string(), None),
5178 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5179 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5180 ("\n".to_string(), None),
5181 ]
5182 );
5183 assert_eq!(
5184 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5185 [
5186 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5187 ("\n".to_string(), None),
5188 ]
5189 );
5190 });
5191
5192 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5193 // changes since the last save.
5194 buffer.update(cx, |buffer, cx| {
5195 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5196 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5197 });
5198 let change_notification_2 =
5199 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5200 assert!(
5201 change_notification_2.await.text_document.version
5202 > change_notification_1.text_document.version
5203 );
5204
5205 // Handle out-of-order diagnostics
5206 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5207 lsp::PublishDiagnosticsParams {
5208 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5209 version: Some(open_notification.text_document.version),
5210 diagnostics: vec![
5211 lsp::Diagnostic {
5212 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5213 severity: Some(DiagnosticSeverity::ERROR),
5214 message: "undefined variable 'BB'".to_string(),
5215 source: Some("disk".to_string()),
5216 ..Default::default()
5217 },
5218 lsp::Diagnostic {
5219 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5220 severity: Some(DiagnosticSeverity::WARNING),
5221 message: "undefined variable 'A'".to_string(),
5222 source: Some("disk".to_string()),
5223 ..Default::default()
5224 },
5225 ],
5226 },
5227 );
5228
5229 buffer.next_notification(cx).await;
5230 buffer.read_with(cx, |buffer, _| {
5231 assert_eq!(
5232 buffer
5233 .snapshot()
5234 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5235 .collect::<Vec<_>>(),
5236 &[
5237 DiagnosticEntry {
5238 range: Point::new(2, 21)..Point::new(2, 22),
5239 diagnostic: Diagnostic {
5240 severity: DiagnosticSeverity::WARNING,
5241 message: "undefined variable 'A'".to_string(),
5242 is_disk_based: true,
5243 group_id: 1,
5244 is_primary: true,
5245 ..Default::default()
5246 }
5247 },
5248 DiagnosticEntry {
5249 range: Point::new(3, 9)..Point::new(3, 11),
5250 diagnostic: Diagnostic {
5251 severity: DiagnosticSeverity::ERROR,
5252 message: "undefined variable 'BB'".to_string(),
5253 is_disk_based: true,
5254 group_id: 0,
5255 is_primary: true,
5256 ..Default::default()
5257 },
5258 }
5259 ]
5260 );
5261 });
5262 }
5263
5264 #[gpui::test]
5265 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5266 cx.foreground().forbid_parking();
5267
5268 let text = concat!(
5269 "let one = ;\n", //
5270 "let two = \n",
5271 "let three = 3;\n",
5272 );
5273
5274 let fs = FakeFs::new(cx.background());
5275 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5276
5277 let project = Project::test(fs, cx);
5278 let worktree_id = project
5279 .update(cx, |project, cx| {
5280 project.find_or_create_local_worktree("/dir", true, cx)
5281 })
5282 .await
5283 .unwrap()
5284 .0
5285 .read_with(cx, |tree, _| tree.id());
5286
5287 let buffer = project
5288 .update(cx, |project, cx| {
5289 project.open_buffer((worktree_id, "a.rs"), cx)
5290 })
5291 .await
5292 .unwrap();
5293
5294 project.update(cx, |project, cx| {
5295 project
5296 .update_buffer_diagnostics(
5297 &buffer,
5298 vec![
5299 DiagnosticEntry {
5300 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5301 diagnostic: Diagnostic {
5302 severity: DiagnosticSeverity::ERROR,
5303 message: "syntax error 1".to_string(),
5304 ..Default::default()
5305 },
5306 },
5307 DiagnosticEntry {
5308 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5309 diagnostic: Diagnostic {
5310 severity: DiagnosticSeverity::ERROR,
5311 message: "syntax error 2".to_string(),
5312 ..Default::default()
5313 },
5314 },
5315 ],
5316 None,
5317 cx,
5318 )
5319 .unwrap();
5320 });
5321
5322 // An empty range is extended forward to include the following character.
5323 // At the end of a line, an empty range is extended backward to include
5324 // the preceding character.
5325 buffer.read_with(cx, |buffer, _| {
5326 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5327 assert_eq!(
5328 chunks
5329 .iter()
5330 .map(|(s, d)| (s.as_str(), *d))
5331 .collect::<Vec<_>>(),
5332 &[
5333 ("let one = ", None),
5334 (";", Some(DiagnosticSeverity::ERROR)),
5335 ("\nlet two =", None),
5336 (" ", Some(DiagnosticSeverity::ERROR)),
5337 ("\nlet three = 3;\n", None)
5338 ]
5339 );
5340 });
5341 }
5342
5343 #[gpui::test]
5344 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5345 cx.foreground().forbid_parking();
5346
5347 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5348 let language = Arc::new(Language::new(
5349 LanguageConfig {
5350 name: "Rust".into(),
5351 path_suffixes: vec!["rs".to_string()],
5352 language_server: Some(lsp_config),
5353 ..Default::default()
5354 },
5355 Some(tree_sitter_rust::language()),
5356 ));
5357
5358 let text = "
5359 fn a() {
5360 f1();
5361 }
5362 fn b() {
5363 f2();
5364 }
5365 fn c() {
5366 f3();
5367 }
5368 "
5369 .unindent();
5370
5371 let fs = FakeFs::new(cx.background());
5372 fs.insert_tree(
5373 "/dir",
5374 json!({
5375 "a.rs": text.clone(),
5376 }),
5377 )
5378 .await;
5379
5380 let project = Project::test(fs, cx);
5381 project.update(cx, |project, _| project.languages.add(language));
5382
5383 let worktree_id = project
5384 .update(cx, |project, cx| {
5385 project.find_or_create_local_worktree("/dir", true, cx)
5386 })
5387 .await
5388 .unwrap()
5389 .0
5390 .read_with(cx, |tree, _| tree.id());
5391
5392 let buffer = project
5393 .update(cx, |project, cx| {
5394 project.open_buffer((worktree_id, "a.rs"), cx)
5395 })
5396 .await
5397 .unwrap();
5398
5399 let mut fake_server = fake_servers.next().await.unwrap();
5400 let lsp_document_version = fake_server
5401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5402 .await
5403 .text_document
5404 .version;
5405
5406 // Simulate editing the buffer after the language server computes some edits.
5407 buffer.update(cx, |buffer, cx| {
5408 buffer.edit(
5409 [Point::new(0, 0)..Point::new(0, 0)],
5410 "// above first function\n",
5411 cx,
5412 );
5413 buffer.edit(
5414 [Point::new(2, 0)..Point::new(2, 0)],
5415 " // inside first function\n",
5416 cx,
5417 );
5418 buffer.edit(
5419 [Point::new(6, 4)..Point::new(6, 4)],
5420 "// inside second function ",
5421 cx,
5422 );
5423
5424 assert_eq!(
5425 buffer.text(),
5426 "
5427 // above first function
5428 fn a() {
5429 // inside first function
5430 f1();
5431 }
5432 fn b() {
5433 // inside second function f2();
5434 }
5435 fn c() {
5436 f3();
5437 }
5438 "
5439 .unindent()
5440 );
5441 });
5442
5443 let edits = project
5444 .update(cx, |project, cx| {
5445 project.edits_from_lsp(
5446 &buffer,
5447 vec![
5448 // replace body of first function
5449 lsp::TextEdit {
5450 range: lsp::Range::new(
5451 lsp::Position::new(0, 0),
5452 lsp::Position::new(3, 0),
5453 ),
5454 new_text: "
5455 fn a() {
5456 f10();
5457 }
5458 "
5459 .unindent(),
5460 },
5461 // edit inside second function
5462 lsp::TextEdit {
5463 range: lsp::Range::new(
5464 lsp::Position::new(4, 6),
5465 lsp::Position::new(4, 6),
5466 ),
5467 new_text: "00".into(),
5468 },
5469 // edit inside third function via two distinct edits
5470 lsp::TextEdit {
5471 range: lsp::Range::new(
5472 lsp::Position::new(7, 5),
5473 lsp::Position::new(7, 5),
5474 ),
5475 new_text: "4000".into(),
5476 },
5477 lsp::TextEdit {
5478 range: lsp::Range::new(
5479 lsp::Position::new(7, 5),
5480 lsp::Position::new(7, 6),
5481 ),
5482 new_text: "".into(),
5483 },
5484 ],
5485 Some(lsp_document_version),
5486 cx,
5487 )
5488 })
5489 .await
5490 .unwrap();
5491
5492 buffer.update(cx, |buffer, cx| {
5493 for (range, new_text) in edits {
5494 buffer.edit([range], new_text, cx);
5495 }
5496 assert_eq!(
5497 buffer.text(),
5498 "
5499 // above first function
5500 fn a() {
5501 // inside first function
5502 f10();
5503 }
5504 fn b() {
5505 // inside second function f200();
5506 }
5507 fn c() {
5508 f4000();
5509 }
5510 "
5511 .unindent()
5512 );
5513 });
5514 }
5515
5516 #[gpui::test]
5517 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5518 cx.foreground().forbid_parking();
5519
5520 let text = "
5521 use a::b;
5522 use a::c;
5523
5524 fn f() {
5525 b();
5526 c();
5527 }
5528 "
5529 .unindent();
5530
5531 let fs = FakeFs::new(cx.background());
5532 fs.insert_tree(
5533 "/dir",
5534 json!({
5535 "a.rs": text.clone(),
5536 }),
5537 )
5538 .await;
5539
5540 let project = Project::test(fs, cx);
5541 let worktree_id = project
5542 .update(cx, |project, cx| {
5543 project.find_or_create_local_worktree("/dir", true, cx)
5544 })
5545 .await
5546 .unwrap()
5547 .0
5548 .read_with(cx, |tree, _| tree.id());
5549
5550 let buffer = project
5551 .update(cx, |project, cx| {
5552 project.open_buffer((worktree_id, "a.rs"), cx)
5553 })
5554 .await
5555 .unwrap();
5556
5557 // Simulate the language server sending us a small edit in the form of a very large diff.
5558 // Rust-analyzer does this when performing a merge-imports code action.
5559 let edits = project
5560 .update(cx, |project, cx| {
5561 project.edits_from_lsp(
5562 &buffer,
5563 [
5564 // Replace the first use statement without editing the semicolon.
5565 lsp::TextEdit {
5566 range: lsp::Range::new(
5567 lsp::Position::new(0, 4),
5568 lsp::Position::new(0, 8),
5569 ),
5570 new_text: "a::{b, c}".into(),
5571 },
5572 // Reinsert the remainder of the file between the semicolon and the final
5573 // newline of the file.
5574 lsp::TextEdit {
5575 range: lsp::Range::new(
5576 lsp::Position::new(0, 9),
5577 lsp::Position::new(0, 9),
5578 ),
5579 new_text: "\n\n".into(),
5580 },
5581 lsp::TextEdit {
5582 range: lsp::Range::new(
5583 lsp::Position::new(0, 9),
5584 lsp::Position::new(0, 9),
5585 ),
5586 new_text: "
5587 fn f() {
5588 b();
5589 c();
5590 }"
5591 .unindent(),
5592 },
5593 // Delete everything after the first newline of the file.
5594 lsp::TextEdit {
5595 range: lsp::Range::new(
5596 lsp::Position::new(1, 0),
5597 lsp::Position::new(7, 0),
5598 ),
5599 new_text: "".into(),
5600 },
5601 ],
5602 None,
5603 cx,
5604 )
5605 })
5606 .await
5607 .unwrap();
5608
5609 buffer.update(cx, |buffer, cx| {
5610 let edits = edits
5611 .into_iter()
5612 .map(|(range, text)| {
5613 (
5614 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5615 text,
5616 )
5617 })
5618 .collect::<Vec<_>>();
5619
5620 assert_eq!(
5621 edits,
5622 [
5623 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5624 (Point::new(1, 0)..Point::new(2, 0), "".into())
5625 ]
5626 );
5627
5628 for (range, new_text) in edits {
5629 buffer.edit([range], new_text, cx);
5630 }
5631 assert_eq!(
5632 buffer.text(),
5633 "
5634 use a::{b, c};
5635
5636 fn f() {
5637 b();
5638 c();
5639 }
5640 "
5641 .unindent()
5642 );
5643 });
5644 }
5645
5646 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5647 buffer: &Buffer,
5648 range: Range<T>,
5649 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5650 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5651 for chunk in buffer.snapshot().chunks(range, true) {
5652 if chunks.last().map_or(false, |prev_chunk| {
5653 prev_chunk.1 == chunk.diagnostic_severity
5654 }) {
5655 chunks.last_mut().unwrap().0.push_str(chunk.text);
5656 } else {
5657 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5658 }
5659 }
5660 chunks
5661 }
5662
5663 #[gpui::test]
5664 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5665 let dir = temp_tree(json!({
5666 "root": {
5667 "dir1": {},
5668 "dir2": {
5669 "dir3": {}
5670 }
5671 }
5672 }));
5673
5674 let project = Project::test(Arc::new(RealFs), cx);
5675 let (tree, _) = project
5676 .update(cx, |project, cx| {
5677 project.find_or_create_local_worktree(&dir.path(), true, cx)
5678 })
5679 .await
5680 .unwrap();
5681
5682 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5683 .await;
5684
5685 let cancel_flag = Default::default();
5686 let results = project
5687 .read_with(cx, |project, cx| {
5688 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5689 })
5690 .await;
5691
5692 assert!(results.is_empty());
5693 }
5694
5695 #[gpui::test]
5696 async fn test_definition(cx: &mut gpui::TestAppContext) {
5697 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5698 let language = Arc::new(Language::new(
5699 LanguageConfig {
5700 name: "Rust".into(),
5701 path_suffixes: vec!["rs".to_string()],
5702 language_server: Some(language_server_config),
5703 ..Default::default()
5704 },
5705 Some(tree_sitter_rust::language()),
5706 ));
5707
5708 let fs = FakeFs::new(cx.background());
5709 fs.insert_tree(
5710 "/dir",
5711 json!({
5712 "a.rs": "const fn a() { A }",
5713 "b.rs": "const y: i32 = crate::a()",
5714 }),
5715 )
5716 .await;
5717
5718 let project = Project::test(fs, cx);
5719 project.update(cx, |project, _| {
5720 Arc::get_mut(&mut project.languages).unwrap().add(language);
5721 });
5722
5723 let (tree, _) = project
5724 .update(cx, |project, cx| {
5725 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5726 })
5727 .await
5728 .unwrap();
5729 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5730 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5731 .await;
5732
5733 let buffer = project
5734 .update(cx, |project, cx| {
5735 project.open_buffer(
5736 ProjectPath {
5737 worktree_id,
5738 path: Path::new("").into(),
5739 },
5740 cx,
5741 )
5742 })
5743 .await
5744 .unwrap();
5745
5746 let mut fake_server = fake_servers.next().await.unwrap();
5747 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5748 let params = params.text_document_position_params;
5749 assert_eq!(
5750 params.text_document.uri.to_file_path().unwrap(),
5751 Path::new("/dir/b.rs"),
5752 );
5753 assert_eq!(params.position, lsp::Position::new(0, 22));
5754
5755 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5756 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5757 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5758 )))
5759 });
5760
5761 let mut definitions = project
5762 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5763 .await
5764 .unwrap();
5765
5766 assert_eq!(definitions.len(), 1);
5767 let definition = definitions.pop().unwrap();
5768 cx.update(|cx| {
5769 let target_buffer = definition.buffer.read(cx);
5770 assert_eq!(
5771 target_buffer
5772 .file()
5773 .unwrap()
5774 .as_local()
5775 .unwrap()
5776 .abs_path(cx),
5777 Path::new("/dir/a.rs"),
5778 );
5779 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5780 assert_eq!(
5781 list_worktrees(&project, cx),
5782 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5783 );
5784
5785 drop(definition);
5786 });
5787 cx.read(|cx| {
5788 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5789 });
5790
5791 fn list_worktrees<'a>(
5792 project: &'a ModelHandle<Project>,
5793 cx: &'a AppContext,
5794 ) -> Vec<(&'a Path, bool)> {
5795 project
5796 .read(cx)
5797 .worktrees(cx)
5798 .map(|worktree| {
5799 let worktree = worktree.read(cx);
5800 (
5801 worktree.as_local().unwrap().abs_path().as_ref(),
5802 worktree.is_visible(),
5803 )
5804 })
5805 .collect::<Vec<_>>()
5806 }
5807 }
5808
5809 #[gpui::test]
5810 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5811 let fs = FakeFs::new(cx.background());
5812 fs.insert_tree(
5813 "/dir",
5814 json!({
5815 "file1": "the old contents",
5816 }),
5817 )
5818 .await;
5819
5820 let project = Project::test(fs.clone(), cx);
5821 let worktree_id = project
5822 .update(cx, |p, cx| {
5823 p.find_or_create_local_worktree("/dir", true, cx)
5824 })
5825 .await
5826 .unwrap()
5827 .0
5828 .read_with(cx, |tree, _| tree.id());
5829
5830 let buffer = project
5831 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5832 .await
5833 .unwrap();
5834 buffer
5835 .update(cx, |buffer, cx| {
5836 assert_eq!(buffer.text(), "the old contents");
5837 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5838 buffer.save(cx)
5839 })
5840 .await
5841 .unwrap();
5842
5843 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5844 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5845 }
5846
5847 #[gpui::test]
5848 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5849 let fs = FakeFs::new(cx.background());
5850 fs.insert_tree(
5851 "/dir",
5852 json!({
5853 "file1": "the old contents",
5854 }),
5855 )
5856 .await;
5857
5858 let project = Project::test(fs.clone(), cx);
5859 let worktree_id = project
5860 .update(cx, |p, cx| {
5861 p.find_or_create_local_worktree("/dir/file1", true, cx)
5862 })
5863 .await
5864 .unwrap()
5865 .0
5866 .read_with(cx, |tree, _| tree.id());
5867
5868 let buffer = project
5869 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5870 .await
5871 .unwrap();
5872 buffer
5873 .update(cx, |buffer, cx| {
5874 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5875 buffer.save(cx)
5876 })
5877 .await
5878 .unwrap();
5879
5880 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5881 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5882 }
5883
5884 #[gpui::test]
5885 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5886 let fs = FakeFs::new(cx.background());
5887 fs.insert_tree("/dir", json!({})).await;
5888
5889 let project = Project::test(fs.clone(), cx);
5890 let (worktree, _) = project
5891 .update(cx, |project, cx| {
5892 project.find_or_create_local_worktree("/dir", true, cx)
5893 })
5894 .await
5895 .unwrap();
5896 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5897
5898 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5899 buffer.update(cx, |buffer, cx| {
5900 buffer.edit([0..0], "abc", cx);
5901 assert!(buffer.is_dirty());
5902 assert!(!buffer.has_conflict());
5903 });
5904 project
5905 .update(cx, |project, cx| {
5906 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5907 })
5908 .await
5909 .unwrap();
5910 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5911 buffer.read_with(cx, |buffer, cx| {
5912 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5913 assert!(!buffer.is_dirty());
5914 assert!(!buffer.has_conflict());
5915 });
5916
5917 let opened_buffer = project
5918 .update(cx, |project, cx| {
5919 project.open_buffer((worktree_id, "file1"), cx)
5920 })
5921 .await
5922 .unwrap();
5923 assert_eq!(opened_buffer, buffer);
5924 }
5925
5926 #[gpui::test(retries = 5)]
5927 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5928 let dir = temp_tree(json!({
5929 "a": {
5930 "file1": "",
5931 "file2": "",
5932 "file3": "",
5933 },
5934 "b": {
5935 "c": {
5936 "file4": "",
5937 "file5": "",
5938 }
5939 }
5940 }));
5941
5942 let project = Project::test(Arc::new(RealFs), cx);
5943 let rpc = project.read_with(cx, |p, _| p.client.clone());
5944
5945 let (tree, _) = project
5946 .update(cx, |p, cx| {
5947 p.find_or_create_local_worktree(dir.path(), true, cx)
5948 })
5949 .await
5950 .unwrap();
5951 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5952
5953 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5954 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5955 async move { buffer.await.unwrap() }
5956 };
5957 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5958 tree.read_with(cx, |tree, _| {
5959 tree.entry_for_path(path)
5960 .expect(&format!("no entry for path {}", path))
5961 .id
5962 })
5963 };
5964
5965 let buffer2 = buffer_for_path("a/file2", cx).await;
5966 let buffer3 = buffer_for_path("a/file3", cx).await;
5967 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5968 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5969
5970 let file2_id = id_for_path("a/file2", &cx);
5971 let file3_id = id_for_path("a/file3", &cx);
5972 let file4_id = id_for_path("b/c/file4", &cx);
5973
5974 // Wait for the initial scan.
5975 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5976 .await;
5977
5978 // Create a remote copy of this worktree.
5979 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5980 let (remote, load_task) = cx.update(|cx| {
5981 Worktree::remote(
5982 1,
5983 1,
5984 initial_snapshot.to_proto(&Default::default(), true),
5985 rpc.clone(),
5986 cx,
5987 )
5988 });
5989 load_task.await;
5990
5991 cx.read(|cx| {
5992 assert!(!buffer2.read(cx).is_dirty());
5993 assert!(!buffer3.read(cx).is_dirty());
5994 assert!(!buffer4.read(cx).is_dirty());
5995 assert!(!buffer5.read(cx).is_dirty());
5996 });
5997
5998 // Rename and delete files and directories.
5999 tree.flush_fs_events(&cx).await;
6000 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6001 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6002 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6003 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6004 tree.flush_fs_events(&cx).await;
6005
6006 let expected_paths = vec![
6007 "a",
6008 "a/file1",
6009 "a/file2.new",
6010 "b",
6011 "d",
6012 "d/file3",
6013 "d/file4",
6014 ];
6015
6016 cx.read(|app| {
6017 assert_eq!(
6018 tree.read(app)
6019 .paths()
6020 .map(|p| p.to_str().unwrap())
6021 .collect::<Vec<_>>(),
6022 expected_paths
6023 );
6024
6025 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6026 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6027 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6028
6029 assert_eq!(
6030 buffer2.read(app).file().unwrap().path().as_ref(),
6031 Path::new("a/file2.new")
6032 );
6033 assert_eq!(
6034 buffer3.read(app).file().unwrap().path().as_ref(),
6035 Path::new("d/file3")
6036 );
6037 assert_eq!(
6038 buffer4.read(app).file().unwrap().path().as_ref(),
6039 Path::new("d/file4")
6040 );
6041 assert_eq!(
6042 buffer5.read(app).file().unwrap().path().as_ref(),
6043 Path::new("b/c/file5")
6044 );
6045
6046 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6047 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6048 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6049 assert!(buffer5.read(app).file().unwrap().is_deleted());
6050 });
6051
6052 // Update the remote worktree. Check that it becomes consistent with the
6053 // local worktree.
6054 remote.update(cx, |remote, cx| {
6055 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6056 &initial_snapshot,
6057 1,
6058 1,
6059 true,
6060 );
6061 remote
6062 .as_remote_mut()
6063 .unwrap()
6064 .snapshot
6065 .apply_remote_update(update_message)
6066 .unwrap();
6067
6068 assert_eq!(
6069 remote
6070 .paths()
6071 .map(|p| p.to_str().unwrap())
6072 .collect::<Vec<_>>(),
6073 expected_paths
6074 );
6075 });
6076 }
6077
6078 #[gpui::test]
6079 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6080 let fs = FakeFs::new(cx.background());
6081 fs.insert_tree(
6082 "/the-dir",
6083 json!({
6084 "a.txt": "a-contents",
6085 "b.txt": "b-contents",
6086 }),
6087 )
6088 .await;
6089
6090 let project = Project::test(fs.clone(), cx);
6091 let worktree_id = project
6092 .update(cx, |p, cx| {
6093 p.find_or_create_local_worktree("/the-dir", true, cx)
6094 })
6095 .await
6096 .unwrap()
6097 .0
6098 .read_with(cx, |tree, _| tree.id());
6099
6100 // Spawn multiple tasks to open paths, repeating some paths.
6101 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6102 (
6103 p.open_buffer((worktree_id, "a.txt"), cx),
6104 p.open_buffer((worktree_id, "b.txt"), cx),
6105 p.open_buffer((worktree_id, "a.txt"), cx),
6106 )
6107 });
6108
6109 let buffer_a_1 = buffer_a_1.await.unwrap();
6110 let buffer_a_2 = buffer_a_2.await.unwrap();
6111 let buffer_b = buffer_b.await.unwrap();
6112 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6113 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6114
6115 // There is only one buffer per path.
6116 let buffer_a_id = buffer_a_1.id();
6117 assert_eq!(buffer_a_2.id(), buffer_a_id);
6118
6119 // Open the same path again while it is still open.
6120 drop(buffer_a_1);
6121 let buffer_a_3 = project
6122 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6123 .await
6124 .unwrap();
6125
6126 // There's still only one buffer per path.
6127 assert_eq!(buffer_a_3.id(), buffer_a_id);
6128 }
6129
6130 #[gpui::test]
6131 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6132 use std::fs;
6133
6134 let dir = temp_tree(json!({
6135 "file1": "abc",
6136 "file2": "def",
6137 "file3": "ghi",
6138 }));
6139
6140 let project = Project::test(Arc::new(RealFs), cx);
6141 let (worktree, _) = project
6142 .update(cx, |p, cx| {
6143 p.find_or_create_local_worktree(dir.path(), true, cx)
6144 })
6145 .await
6146 .unwrap();
6147 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6148
6149 worktree.flush_fs_events(&cx).await;
6150 worktree
6151 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6152 .await;
6153
6154 let buffer1 = project
6155 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6156 .await
6157 .unwrap();
6158 let events = Rc::new(RefCell::new(Vec::new()));
6159
6160 // initially, the buffer isn't dirty.
6161 buffer1.update(cx, |buffer, cx| {
6162 cx.subscribe(&buffer1, {
6163 let events = events.clone();
6164 move |_, _, event, _| match event {
6165 BufferEvent::Operation(_) => {}
6166 _ => events.borrow_mut().push(event.clone()),
6167 }
6168 })
6169 .detach();
6170
6171 assert!(!buffer.is_dirty());
6172 assert!(events.borrow().is_empty());
6173
6174 buffer.edit(vec![1..2], "", cx);
6175 });
6176
6177 // after the first edit, the buffer is dirty, and emits a dirtied event.
6178 buffer1.update(cx, |buffer, cx| {
6179 assert!(buffer.text() == "ac");
6180 assert!(buffer.is_dirty());
6181 assert_eq!(
6182 *events.borrow(),
6183 &[language::Event::Edited, language::Event::Dirtied]
6184 );
6185 events.borrow_mut().clear();
6186 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6187 });
6188
6189 // after saving, the buffer is not dirty, and emits a saved event.
6190 buffer1.update(cx, |buffer, cx| {
6191 assert!(!buffer.is_dirty());
6192 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6193 events.borrow_mut().clear();
6194
6195 buffer.edit(vec![1..1], "B", cx);
6196 buffer.edit(vec![2..2], "D", cx);
6197 });
6198
6199 // after editing again, the buffer is dirty, and emits another dirty event.
6200 buffer1.update(cx, |buffer, cx| {
6201 assert!(buffer.text() == "aBDc");
6202 assert!(buffer.is_dirty());
6203 assert_eq!(
6204 *events.borrow(),
6205 &[
6206 language::Event::Edited,
6207 language::Event::Dirtied,
6208 language::Event::Edited,
6209 ],
6210 );
6211 events.borrow_mut().clear();
6212
6213 // TODO - currently, after restoring the buffer to its
6214 // previously-saved state, the is still considered dirty.
6215 buffer.edit([1..3], "", cx);
6216 assert!(buffer.text() == "ac");
6217 assert!(buffer.is_dirty());
6218 });
6219
6220 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6221
6222 // When a file is deleted, the buffer is considered dirty.
6223 let events = Rc::new(RefCell::new(Vec::new()));
6224 let buffer2 = project
6225 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6226 .await
6227 .unwrap();
6228 buffer2.update(cx, |_, cx| {
6229 cx.subscribe(&buffer2, {
6230 let events = events.clone();
6231 move |_, _, event, _| events.borrow_mut().push(event.clone())
6232 })
6233 .detach();
6234 });
6235
6236 fs::remove_file(dir.path().join("file2")).unwrap();
6237 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6238 assert_eq!(
6239 *events.borrow(),
6240 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6241 );
6242
6243 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6244 let events = Rc::new(RefCell::new(Vec::new()));
6245 let buffer3 = project
6246 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6247 .await
6248 .unwrap();
6249 buffer3.update(cx, |_, cx| {
6250 cx.subscribe(&buffer3, {
6251 let events = events.clone();
6252 move |_, _, event, _| events.borrow_mut().push(event.clone())
6253 })
6254 .detach();
6255 });
6256
6257 worktree.flush_fs_events(&cx).await;
6258 buffer3.update(cx, |buffer, cx| {
6259 buffer.edit(Some(0..0), "x", cx);
6260 });
6261 events.borrow_mut().clear();
6262 fs::remove_file(dir.path().join("file3")).unwrap();
6263 buffer3
6264 .condition(&cx, |_, _| !events.borrow().is_empty())
6265 .await;
6266 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6267 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6268 }
6269
6270 #[gpui::test]
6271 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6272 use std::fs;
6273
6274 let initial_contents = "aaa\nbbbbb\nc\n";
6275 let dir = temp_tree(json!({ "the-file": initial_contents }));
6276
6277 let project = Project::test(Arc::new(RealFs), cx);
6278 let (worktree, _) = project
6279 .update(cx, |p, cx| {
6280 p.find_or_create_local_worktree(dir.path(), true, cx)
6281 })
6282 .await
6283 .unwrap();
6284 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6285
6286 worktree
6287 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6288 .await;
6289
6290 let abs_path = dir.path().join("the-file");
6291 let buffer = project
6292 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6293 .await
6294 .unwrap();
6295
6296 // TODO
6297 // Add a cursor on each row.
6298 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6299 // assert!(!buffer.is_dirty());
6300 // buffer.add_selection_set(
6301 // &(0..3)
6302 // .map(|row| Selection {
6303 // id: row as usize,
6304 // start: Point::new(row, 1),
6305 // end: Point::new(row, 1),
6306 // reversed: false,
6307 // goal: SelectionGoal::None,
6308 // })
6309 // .collect::<Vec<_>>(),
6310 // cx,
6311 // )
6312 // });
6313
6314 // Change the file on disk, adding two new lines of text, and removing
6315 // one line.
6316 buffer.read_with(cx, |buffer, _| {
6317 assert!(!buffer.is_dirty());
6318 assert!(!buffer.has_conflict());
6319 });
6320 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6321 fs::write(&abs_path, new_contents).unwrap();
6322
6323 // Because the buffer was not modified, it is reloaded from disk. Its
6324 // contents are edited according to the diff between the old and new
6325 // file contents.
6326 buffer
6327 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6328 .await;
6329
6330 buffer.update(cx, |buffer, _| {
6331 assert_eq!(buffer.text(), new_contents);
6332 assert!(!buffer.is_dirty());
6333 assert!(!buffer.has_conflict());
6334
6335 // TODO
6336 // let cursor_positions = buffer
6337 // .selection_set(selection_set_id)
6338 // .unwrap()
6339 // .selections::<Point>(&*buffer)
6340 // .map(|selection| {
6341 // assert_eq!(selection.start, selection.end);
6342 // selection.start
6343 // })
6344 // .collect::<Vec<_>>();
6345 // assert_eq!(
6346 // cursor_positions,
6347 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6348 // );
6349 });
6350
6351 // Modify the buffer
6352 buffer.update(cx, |buffer, cx| {
6353 buffer.edit(vec![0..0], " ", cx);
6354 assert!(buffer.is_dirty());
6355 assert!(!buffer.has_conflict());
6356 });
6357
6358 // Change the file on disk again, adding blank lines to the beginning.
6359 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6360
6361 // Because the buffer is modified, it doesn't reload from disk, but is
6362 // marked as having a conflict.
6363 buffer
6364 .condition(&cx, |buffer, _| buffer.has_conflict())
6365 .await;
6366 }
6367
6368 #[gpui::test]
6369 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6370 cx.foreground().forbid_parking();
6371
6372 let fs = FakeFs::new(cx.background());
6373 fs.insert_tree(
6374 "/the-dir",
6375 json!({
6376 "a.rs": "
6377 fn foo(mut v: Vec<usize>) {
6378 for x in &v {
6379 v.push(1);
6380 }
6381 }
6382 "
6383 .unindent(),
6384 }),
6385 )
6386 .await;
6387
6388 let project = Project::test(fs.clone(), cx);
6389 let (worktree, _) = project
6390 .update(cx, |p, cx| {
6391 p.find_or_create_local_worktree("/the-dir", true, cx)
6392 })
6393 .await
6394 .unwrap();
6395 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6396
6397 let buffer = project
6398 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6399 .await
6400 .unwrap();
6401
6402 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6403 let message = lsp::PublishDiagnosticsParams {
6404 uri: buffer_uri.clone(),
6405 diagnostics: vec![
6406 lsp::Diagnostic {
6407 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6408 severity: Some(DiagnosticSeverity::WARNING),
6409 message: "error 1".to_string(),
6410 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6411 location: lsp::Location {
6412 uri: buffer_uri.clone(),
6413 range: lsp::Range::new(
6414 lsp::Position::new(1, 8),
6415 lsp::Position::new(1, 9),
6416 ),
6417 },
6418 message: "error 1 hint 1".to_string(),
6419 }]),
6420 ..Default::default()
6421 },
6422 lsp::Diagnostic {
6423 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6424 severity: Some(DiagnosticSeverity::HINT),
6425 message: "error 1 hint 1".to_string(),
6426 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6427 location: lsp::Location {
6428 uri: buffer_uri.clone(),
6429 range: lsp::Range::new(
6430 lsp::Position::new(1, 8),
6431 lsp::Position::new(1, 9),
6432 ),
6433 },
6434 message: "original diagnostic".to_string(),
6435 }]),
6436 ..Default::default()
6437 },
6438 lsp::Diagnostic {
6439 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6440 severity: Some(DiagnosticSeverity::ERROR),
6441 message: "error 2".to_string(),
6442 related_information: Some(vec![
6443 lsp::DiagnosticRelatedInformation {
6444 location: lsp::Location {
6445 uri: buffer_uri.clone(),
6446 range: lsp::Range::new(
6447 lsp::Position::new(1, 13),
6448 lsp::Position::new(1, 15),
6449 ),
6450 },
6451 message: "error 2 hint 1".to_string(),
6452 },
6453 lsp::DiagnosticRelatedInformation {
6454 location: lsp::Location {
6455 uri: buffer_uri.clone(),
6456 range: lsp::Range::new(
6457 lsp::Position::new(1, 13),
6458 lsp::Position::new(1, 15),
6459 ),
6460 },
6461 message: "error 2 hint 2".to_string(),
6462 },
6463 ]),
6464 ..Default::default()
6465 },
6466 lsp::Diagnostic {
6467 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6468 severity: Some(DiagnosticSeverity::HINT),
6469 message: "error 2 hint 1".to_string(),
6470 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6471 location: lsp::Location {
6472 uri: buffer_uri.clone(),
6473 range: lsp::Range::new(
6474 lsp::Position::new(2, 8),
6475 lsp::Position::new(2, 17),
6476 ),
6477 },
6478 message: "original diagnostic".to_string(),
6479 }]),
6480 ..Default::default()
6481 },
6482 lsp::Diagnostic {
6483 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6484 severity: Some(DiagnosticSeverity::HINT),
6485 message: "error 2 hint 2".to_string(),
6486 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6487 location: lsp::Location {
6488 uri: buffer_uri.clone(),
6489 range: lsp::Range::new(
6490 lsp::Position::new(2, 8),
6491 lsp::Position::new(2, 17),
6492 ),
6493 },
6494 message: "original diagnostic".to_string(),
6495 }]),
6496 ..Default::default()
6497 },
6498 ],
6499 version: None,
6500 };
6501
6502 project
6503 .update(cx, |p, cx| {
6504 p.update_diagnostics(message, &Default::default(), cx)
6505 })
6506 .unwrap();
6507 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6508
6509 assert_eq!(
6510 buffer
6511 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6512 .collect::<Vec<_>>(),
6513 &[
6514 DiagnosticEntry {
6515 range: Point::new(1, 8)..Point::new(1, 9),
6516 diagnostic: Diagnostic {
6517 severity: DiagnosticSeverity::WARNING,
6518 message: "error 1".to_string(),
6519 group_id: 0,
6520 is_primary: true,
6521 ..Default::default()
6522 }
6523 },
6524 DiagnosticEntry {
6525 range: Point::new(1, 8)..Point::new(1, 9),
6526 diagnostic: Diagnostic {
6527 severity: DiagnosticSeverity::HINT,
6528 message: "error 1 hint 1".to_string(),
6529 group_id: 0,
6530 is_primary: false,
6531 ..Default::default()
6532 }
6533 },
6534 DiagnosticEntry {
6535 range: Point::new(1, 13)..Point::new(1, 15),
6536 diagnostic: Diagnostic {
6537 severity: DiagnosticSeverity::HINT,
6538 message: "error 2 hint 1".to_string(),
6539 group_id: 1,
6540 is_primary: false,
6541 ..Default::default()
6542 }
6543 },
6544 DiagnosticEntry {
6545 range: Point::new(1, 13)..Point::new(1, 15),
6546 diagnostic: Diagnostic {
6547 severity: DiagnosticSeverity::HINT,
6548 message: "error 2 hint 2".to_string(),
6549 group_id: 1,
6550 is_primary: false,
6551 ..Default::default()
6552 }
6553 },
6554 DiagnosticEntry {
6555 range: Point::new(2, 8)..Point::new(2, 17),
6556 diagnostic: Diagnostic {
6557 severity: DiagnosticSeverity::ERROR,
6558 message: "error 2".to_string(),
6559 group_id: 1,
6560 is_primary: true,
6561 ..Default::default()
6562 }
6563 }
6564 ]
6565 );
6566
6567 assert_eq!(
6568 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6569 &[
6570 DiagnosticEntry {
6571 range: Point::new(1, 8)..Point::new(1, 9),
6572 diagnostic: Diagnostic {
6573 severity: DiagnosticSeverity::WARNING,
6574 message: "error 1".to_string(),
6575 group_id: 0,
6576 is_primary: true,
6577 ..Default::default()
6578 }
6579 },
6580 DiagnosticEntry {
6581 range: Point::new(1, 8)..Point::new(1, 9),
6582 diagnostic: Diagnostic {
6583 severity: DiagnosticSeverity::HINT,
6584 message: "error 1 hint 1".to_string(),
6585 group_id: 0,
6586 is_primary: false,
6587 ..Default::default()
6588 }
6589 },
6590 ]
6591 );
6592 assert_eq!(
6593 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6594 &[
6595 DiagnosticEntry {
6596 range: Point::new(1, 13)..Point::new(1, 15),
6597 diagnostic: Diagnostic {
6598 severity: DiagnosticSeverity::HINT,
6599 message: "error 2 hint 1".to_string(),
6600 group_id: 1,
6601 is_primary: false,
6602 ..Default::default()
6603 }
6604 },
6605 DiagnosticEntry {
6606 range: Point::new(1, 13)..Point::new(1, 15),
6607 diagnostic: Diagnostic {
6608 severity: DiagnosticSeverity::HINT,
6609 message: "error 2 hint 2".to_string(),
6610 group_id: 1,
6611 is_primary: false,
6612 ..Default::default()
6613 }
6614 },
6615 DiagnosticEntry {
6616 range: Point::new(2, 8)..Point::new(2, 17),
6617 diagnostic: Diagnostic {
6618 severity: DiagnosticSeverity::ERROR,
6619 message: "error 2".to_string(),
6620 group_id: 1,
6621 is_primary: true,
6622 ..Default::default()
6623 }
6624 }
6625 ]
6626 );
6627 }
6628
6629 #[gpui::test]
6630 async fn test_rename(cx: &mut gpui::TestAppContext) {
6631 cx.foreground().forbid_parking();
6632
6633 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6634 let language = Arc::new(Language::new(
6635 LanguageConfig {
6636 name: "Rust".into(),
6637 path_suffixes: vec!["rs".to_string()],
6638 language_server: Some(language_server_config),
6639 ..Default::default()
6640 },
6641 Some(tree_sitter_rust::language()),
6642 ));
6643
6644 let fs = FakeFs::new(cx.background());
6645 fs.insert_tree(
6646 "/dir",
6647 json!({
6648 "one.rs": "const ONE: usize = 1;",
6649 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6650 }),
6651 )
6652 .await;
6653
6654 let project = Project::test(fs.clone(), cx);
6655 project.update(cx, |project, _| {
6656 Arc::get_mut(&mut project.languages).unwrap().add(language);
6657 });
6658
6659 let (tree, _) = project
6660 .update(cx, |project, cx| {
6661 project.find_or_create_local_worktree("/dir", true, cx)
6662 })
6663 .await
6664 .unwrap();
6665 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6666 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6667 .await;
6668
6669 let buffer = project
6670 .update(cx, |project, cx| {
6671 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6672 })
6673 .await
6674 .unwrap();
6675
6676 let mut fake_server = fake_servers.next().await.unwrap();
6677
6678 let response = project.update(cx, |project, cx| {
6679 project.prepare_rename(buffer.clone(), 7, cx)
6680 });
6681 fake_server
6682 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6683 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6684 assert_eq!(params.position, lsp::Position::new(0, 7));
6685 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6686 lsp::Position::new(0, 6),
6687 lsp::Position::new(0, 9),
6688 )))
6689 })
6690 .next()
6691 .await
6692 .unwrap();
6693 let range = response.await.unwrap().unwrap();
6694 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6695 assert_eq!(range, 6..9);
6696
6697 let response = project.update(cx, |project, cx| {
6698 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6699 });
6700 fake_server
6701 .handle_request::<lsp::request::Rename, _>(|params, _| {
6702 assert_eq!(
6703 params.text_document_position.text_document.uri.as_str(),
6704 "file:///dir/one.rs"
6705 );
6706 assert_eq!(
6707 params.text_document_position.position,
6708 lsp::Position::new(0, 7)
6709 );
6710 assert_eq!(params.new_name, "THREE");
6711 Some(lsp::WorkspaceEdit {
6712 changes: Some(
6713 [
6714 (
6715 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6716 vec![lsp::TextEdit::new(
6717 lsp::Range::new(
6718 lsp::Position::new(0, 6),
6719 lsp::Position::new(0, 9),
6720 ),
6721 "THREE".to_string(),
6722 )],
6723 ),
6724 (
6725 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6726 vec![
6727 lsp::TextEdit::new(
6728 lsp::Range::new(
6729 lsp::Position::new(0, 24),
6730 lsp::Position::new(0, 27),
6731 ),
6732 "THREE".to_string(),
6733 ),
6734 lsp::TextEdit::new(
6735 lsp::Range::new(
6736 lsp::Position::new(0, 35),
6737 lsp::Position::new(0, 38),
6738 ),
6739 "THREE".to_string(),
6740 ),
6741 ],
6742 ),
6743 ]
6744 .into_iter()
6745 .collect(),
6746 ),
6747 ..Default::default()
6748 })
6749 })
6750 .next()
6751 .await
6752 .unwrap();
6753 let mut transaction = response.await.unwrap().0;
6754 assert_eq!(transaction.len(), 2);
6755 assert_eq!(
6756 transaction
6757 .remove_entry(&buffer)
6758 .unwrap()
6759 .0
6760 .read_with(cx, |buffer, _| buffer.text()),
6761 "const THREE: usize = 1;"
6762 );
6763 assert_eq!(
6764 transaction
6765 .into_keys()
6766 .next()
6767 .unwrap()
6768 .read_with(cx, |buffer, _| buffer.text()),
6769 "const TWO: usize = one::THREE + one::THREE;"
6770 );
6771 }
6772
6773 #[gpui::test]
6774 async fn test_search(cx: &mut gpui::TestAppContext) {
6775 let fs = FakeFs::new(cx.background());
6776 fs.insert_tree(
6777 "/dir",
6778 json!({
6779 "one.rs": "const ONE: usize = 1;",
6780 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6781 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6782 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6783 }),
6784 )
6785 .await;
6786 let project = Project::test(fs.clone(), cx);
6787 let (tree, _) = project
6788 .update(cx, |project, cx| {
6789 project.find_or_create_local_worktree("/dir", true, cx)
6790 })
6791 .await
6792 .unwrap();
6793 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6794 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6795 .await;
6796
6797 assert_eq!(
6798 search(&project, SearchQuery::text("TWO", false, true), cx)
6799 .await
6800 .unwrap(),
6801 HashMap::from_iter([
6802 ("two.rs".to_string(), vec![6..9]),
6803 ("three.rs".to_string(), vec![37..40])
6804 ])
6805 );
6806
6807 let buffer_4 = project
6808 .update(cx, |project, cx| {
6809 project.open_buffer((worktree_id, "four.rs"), cx)
6810 })
6811 .await
6812 .unwrap();
6813 buffer_4.update(cx, |buffer, cx| {
6814 buffer.edit([20..28, 31..43], "two::TWO", cx);
6815 });
6816
6817 assert_eq!(
6818 search(&project, SearchQuery::text("TWO", false, true), cx)
6819 .await
6820 .unwrap(),
6821 HashMap::from_iter([
6822 ("two.rs".to_string(), vec![6..9]),
6823 ("three.rs".to_string(), vec![37..40]),
6824 ("four.rs".to_string(), vec![25..28, 36..39])
6825 ])
6826 );
6827
6828 async fn search(
6829 project: &ModelHandle<Project>,
6830 query: SearchQuery,
6831 cx: &mut gpui::TestAppContext,
6832 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6833 let results = project
6834 .update(cx, |project, cx| project.search(query, cx))
6835 .await?;
6836
6837 Ok(results
6838 .into_iter()
6839 .map(|(buffer, ranges)| {
6840 buffer.read_with(cx, |buffer, _| {
6841 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6842 let ranges = ranges
6843 .into_iter()
6844 .map(|range| range.to_offset(buffer))
6845 .collect::<Vec<_>>();
6846 (path, ranges)
6847 })
6848 })
6849 .collect())
6850 }
6851 }
6852}