1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, Patch, PointUtf16, TextBufferSnapshot, ToLspPosition,
22 ToOffset, ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
61 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
62 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
63 language_server_settings: Arc<Mutex<serde_json::Value>>,
64 next_language_server_id: usize,
65 client: Arc<client::Client>,
66 next_entry_id: Arc<AtomicUsize>,
67 user_store: ModelHandle<UserStore>,
68 fs: Arc<dyn Fs>,
69 client_state: ProjectClientState,
70 collaborators: HashMap<PeerId, Collaborator>,
71 subscriptions: Vec<client::Subscription>,
72 language_servers_with_diagnostics_running: isize,
73 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
74 shared_buffers: HashMap<PeerId, HashSet<u64>>,
75 loading_buffers: HashMap<
76 ProjectPath,
77 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
78 >,
79 loading_local_worktrees:
80 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
81 opened_buffers: HashMap<u64, OpenBuffer>,
82 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
83 nonce: u128,
84}
85
86enum OpenBuffer {
87 Strong(ModelHandle<Buffer>),
88 Weak(WeakModelHandle<Buffer>),
89 Loading(Vec<Operation>),
90}
91
92enum WorktreeHandle {
93 Strong(ModelHandle<Worktree>),
94 Weak(WeakModelHandle<Worktree>),
95}
96
97enum ProjectClientState {
98 Local {
99 is_shared: bool,
100 remote_id_tx: watch::Sender<Option<u64>>,
101 remote_id_rx: watch::Receiver<Option<u64>>,
102 _maintain_remote_id_task: Task<Option<()>>,
103 },
104 Remote {
105 sharing_has_stopped: bool,
106 remote_id: u64,
107 replica_id: ReplicaId,
108 _detect_unshare_task: Task<Option<()>>,
109 },
110}
111
112#[derive(Clone, Debug)]
113pub struct Collaborator {
114 pub user: Arc<User>,
115 pub peer_id: PeerId,
116 pub replica_id: ReplicaId,
117}
118
119#[derive(Clone, Debug, PartialEq)]
120pub enum Event {
121 ActiveEntryChanged(Option<ProjectEntryId>),
122 WorktreeRemoved(WorktreeId),
123 DiskBasedDiagnosticsStarted,
124 DiskBasedDiagnosticsUpdated,
125 DiskBasedDiagnosticsFinished,
126 DiagnosticsUpdated(ProjectPath),
127 RemoteIdChanged(Option<u64>),
128 CollaboratorLeft(PeerId),
129}
130
131enum LanguageServerEvent {
132 WorkStart {
133 token: String,
134 },
135 WorkProgress {
136 token: String,
137 progress: LanguageServerProgress,
138 },
139 WorkEnd {
140 token: String,
141 },
142 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
143}
144
145pub struct LanguageServerStatus {
146 pub name: String,
147 pub pending_work: BTreeMap<String, LanguageServerProgress>,
148 pending_diagnostic_updates: isize,
149}
150
151#[derive(Clone, Debug)]
152pub struct LanguageServerProgress {
153 pub message: Option<String>,
154 pub percentage: Option<usize>,
155 pub last_update_at: Instant,
156}
157
158#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
159pub struct ProjectPath {
160 pub worktree_id: WorktreeId,
161 pub path: Arc<Path>,
162}
163
164#[derive(Clone, Debug, Default, PartialEq)]
165pub struct DiagnosticSummary {
166 pub error_count: usize,
167 pub warning_count: usize,
168 pub info_count: usize,
169 pub hint_count: usize,
170}
171
172#[derive(Debug)]
173pub struct Location {
174 pub buffer: ModelHandle<Buffer>,
175 pub range: Range<language::Anchor>,
176}
177
178#[derive(Debug)]
179pub struct DocumentHighlight {
180 pub range: Range<language::Anchor>,
181 pub kind: DocumentHighlightKind,
182}
183
184#[derive(Clone, Debug)]
185pub struct Symbol {
186 pub source_worktree_id: WorktreeId,
187 pub worktree_id: WorktreeId,
188 pub language_name: String,
189 pub path: PathBuf,
190 pub label: CodeLabel,
191 pub name: String,
192 pub kind: lsp::SymbolKind,
193 pub range: Range<PointUtf16>,
194 pub signature: [u8; 32],
195}
196
197#[derive(Default)]
198pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
199
200impl DiagnosticSummary {
201 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
202 let mut this = Self {
203 error_count: 0,
204 warning_count: 0,
205 info_count: 0,
206 hint_count: 0,
207 };
208
209 for entry in diagnostics {
210 if entry.diagnostic.is_primary {
211 match entry.diagnostic.severity {
212 DiagnosticSeverity::ERROR => this.error_count += 1,
213 DiagnosticSeverity::WARNING => this.warning_count += 1,
214 DiagnosticSeverity::INFORMATION => this.info_count += 1,
215 DiagnosticSeverity::HINT => this.hint_count += 1,
216 _ => {}
217 }
218 }
219 }
220
221 this
222 }
223
224 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
225 proto::DiagnosticSummary {
226 path: path.to_string_lossy().to_string(),
227 error_count: self.error_count as u32,
228 warning_count: self.warning_count as u32,
229 info_count: self.info_count as u32,
230 hint_count: self.hint_count as u32,
231 }
232 }
233}
234
235#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
236pub struct ProjectEntryId(usize);
237
238impl ProjectEntryId {
239 pub fn new(counter: &AtomicUsize) -> Self {
240 Self(counter.fetch_add(1, SeqCst))
241 }
242
243 pub fn from_proto(id: u64) -> Self {
244 Self(id as usize)
245 }
246
247 pub fn to_proto(&self) -> u64 {
248 self.0 as u64
249 }
250
251 pub fn to_usize(&self) -> usize {
252 self.0
253 }
254}
255
256impl Project {
257 pub fn init(client: &Arc<Client>) {
258 client.add_model_message_handler(Self::handle_add_collaborator);
259 client.add_model_message_handler(Self::handle_buffer_reloaded);
260 client.add_model_message_handler(Self::handle_buffer_saved);
261 client.add_model_message_handler(Self::handle_start_language_server);
262 client.add_model_message_handler(Self::handle_update_language_server);
263 client.add_model_message_handler(Self::handle_remove_collaborator);
264 client.add_model_message_handler(Self::handle_register_worktree);
265 client.add_model_message_handler(Self::handle_unregister_worktree);
266 client.add_model_message_handler(Self::handle_unshare_project);
267 client.add_model_message_handler(Self::handle_update_buffer_file);
268 client.add_model_message_handler(Self::handle_update_buffer);
269 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
270 client.add_model_message_handler(Self::handle_update_worktree);
271 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
272 client.add_model_request_handler(Self::handle_apply_code_action);
273 client.add_model_request_handler(Self::handle_format_buffers);
274 client.add_model_request_handler(Self::handle_get_code_actions);
275 client.add_model_request_handler(Self::handle_get_completions);
276 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
277 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
278 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
279 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
280 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
281 client.add_model_request_handler(Self::handle_search_project);
282 client.add_model_request_handler(Self::handle_get_project_symbols);
283 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
284 client.add_model_request_handler(Self::handle_open_buffer_by_id);
285 client.add_model_request_handler(Self::handle_open_buffer_by_path);
286 client.add_model_request_handler(Self::handle_save_buffer);
287 }
288
289 pub fn local(
290 client: Arc<Client>,
291 user_store: ModelHandle<UserStore>,
292 languages: Arc<LanguageRegistry>,
293 fs: Arc<dyn Fs>,
294 cx: &mut MutableAppContext,
295 ) -> ModelHandle<Self> {
296 cx.add_model(|cx: &mut ModelContext<Self>| {
297 let (remote_id_tx, remote_id_rx) = watch::channel();
298 let _maintain_remote_id_task = cx.spawn_weak({
299 let rpc = client.clone();
300 move |this, mut cx| {
301 async move {
302 let mut status = rpc.status();
303 while let Some(status) = status.next().await {
304 if let Some(this) = this.upgrade(&cx) {
305 if status.is_connected() {
306 this.update(&mut cx, |this, cx| this.register(cx)).await?;
307 } else {
308 this.update(&mut cx, |this, cx| this.unregister(cx));
309 }
310 }
311 }
312 Ok(())
313 }
314 .log_err()
315 }
316 });
317
318 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
319 Self {
320 worktrees: Default::default(),
321 collaborators: Default::default(),
322 opened_buffers: Default::default(),
323 shared_buffers: Default::default(),
324 loading_buffers: Default::default(),
325 loading_local_worktrees: Default::default(),
326 buffer_snapshots: Default::default(),
327 client_state: ProjectClientState::Local {
328 is_shared: false,
329 remote_id_tx,
330 remote_id_rx,
331 _maintain_remote_id_task,
332 },
333 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
334 subscriptions: Vec::new(),
335 active_entry: None,
336 languages,
337 client,
338 user_store,
339 fs,
340 next_entry_id: Default::default(),
341 language_servers_with_diagnostics_running: 0,
342 language_servers: Default::default(),
343 started_language_servers: Default::default(),
344 language_server_statuses: Default::default(),
345 language_server_settings: Default::default(),
346 next_language_server_id: 0,
347 nonce: StdRng::from_entropy().gen(),
348 }
349 })
350 }
351
352 pub async fn remote(
353 remote_id: u64,
354 client: Arc<Client>,
355 user_store: ModelHandle<UserStore>,
356 languages: Arc<LanguageRegistry>,
357 fs: Arc<dyn Fs>,
358 cx: &mut AsyncAppContext,
359 ) -> Result<ModelHandle<Self>> {
360 client.authenticate_and_connect(true, &cx).await?;
361
362 let response = client
363 .request(proto::JoinProject {
364 project_id: remote_id,
365 })
366 .await?;
367
368 let replica_id = response.replica_id as ReplicaId;
369
370 let mut worktrees = Vec::new();
371 for worktree in response.worktrees {
372 let (worktree, load_task) = cx
373 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
374 worktrees.push(worktree);
375 load_task.detach();
376 }
377
378 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
379 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
380 let mut this = Self {
381 worktrees: Vec::new(),
382 loading_buffers: Default::default(),
383 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
384 shared_buffers: Default::default(),
385 loading_local_worktrees: Default::default(),
386 active_entry: None,
387 collaborators: Default::default(),
388 languages,
389 user_store: user_store.clone(),
390 fs,
391 next_entry_id: Default::default(),
392 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
393 client: client.clone(),
394 client_state: ProjectClientState::Remote {
395 sharing_has_stopped: false,
396 remote_id,
397 replica_id,
398 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
399 async move {
400 let mut status = client.status();
401 let is_connected =
402 status.next().await.map_or(false, |s| s.is_connected());
403 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
404 if !is_connected || status.next().await.is_some() {
405 if let Some(this) = this.upgrade(&cx) {
406 this.update(&mut cx, |this, cx| this.project_unshared(cx))
407 }
408 }
409 Ok(())
410 }
411 .log_err()
412 }),
413 },
414 language_servers_with_diagnostics_running: 0,
415 language_servers: Default::default(),
416 started_language_servers: Default::default(),
417 language_server_settings: Default::default(),
418 language_server_statuses: response
419 .language_servers
420 .into_iter()
421 .map(|server| {
422 (
423 server.id as usize,
424 LanguageServerStatus {
425 name: server.name,
426 pending_work: Default::default(),
427 pending_diagnostic_updates: 0,
428 },
429 )
430 })
431 .collect(),
432 next_language_server_id: 0,
433 opened_buffers: Default::default(),
434 buffer_snapshots: Default::default(),
435 nonce: StdRng::from_entropy().gen(),
436 };
437 for worktree in worktrees {
438 this.add_worktree(&worktree, cx);
439 }
440 this
441 });
442
443 let user_ids = response
444 .collaborators
445 .iter()
446 .map(|peer| peer.user_id)
447 .collect();
448 user_store
449 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
450 .await?;
451 let mut collaborators = HashMap::default();
452 for message in response.collaborators {
453 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
454 collaborators.insert(collaborator.peer_id, collaborator);
455 }
456
457 this.update(cx, |this, _| {
458 this.collaborators = collaborators;
459 });
460
461 Ok(this)
462 }
463
464 #[cfg(any(test, feature = "test-support"))]
465 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
466 let languages = Arc::new(LanguageRegistry::test());
467 let http_client = client::test::FakeHttpClient::with_404_response();
468 let client = client::Client::new(http_client.clone());
469 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
470 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
471 }
472
473 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
474 self.opened_buffers
475 .get(&remote_id)
476 .and_then(|buffer| buffer.upgrade(cx))
477 }
478
479 #[cfg(any(test, feature = "test-support"))]
480 pub fn languages(&self) -> &Arc<LanguageRegistry> {
481 &self.languages
482 }
483
484 #[cfg(any(test, feature = "test-support"))]
485 pub fn check_invariants(&self, cx: &AppContext) {
486 if self.is_local() {
487 let mut worktree_root_paths = HashMap::default();
488 for worktree in self.worktrees(cx) {
489 let worktree = worktree.read(cx);
490 let abs_path = worktree.as_local().unwrap().abs_path().clone();
491 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
492 assert_eq!(
493 prev_worktree_id,
494 None,
495 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
496 abs_path,
497 worktree.id(),
498 prev_worktree_id
499 )
500 }
501 } else {
502 let replica_id = self.replica_id();
503 for buffer in self.opened_buffers.values() {
504 if let Some(buffer) = buffer.upgrade(cx) {
505 let buffer = buffer.read(cx);
506 assert_eq!(
507 buffer.deferred_ops_len(),
508 0,
509 "replica {}, buffer {} has deferred operations",
510 replica_id,
511 buffer.remote_id()
512 );
513 }
514 }
515 }
516 }
517
518 #[cfg(any(test, feature = "test-support"))]
519 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
520 let path = path.into();
521 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
522 self.opened_buffers.iter().any(|(_, buffer)| {
523 if let Some(buffer) = buffer.upgrade(cx) {
524 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
525 if file.worktree == worktree && file.path() == &path.path {
526 return true;
527 }
528 }
529 }
530 false
531 })
532 } else {
533 false
534 }
535 }
536
537 pub fn fs(&self) -> &Arc<dyn Fs> {
538 &self.fs
539 }
540
541 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
542 self.unshare(cx);
543 for worktree in &self.worktrees {
544 if let Some(worktree) = worktree.upgrade(cx) {
545 worktree.update(cx, |worktree, _| {
546 worktree.as_local_mut().unwrap().unregister();
547 });
548 }
549 }
550
551 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
552 *remote_id_tx.borrow_mut() = None;
553 }
554
555 self.subscriptions.clear();
556 }
557
558 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
559 self.unregister(cx);
560
561 let response = self.client.request(proto::RegisterProject {});
562 cx.spawn(|this, mut cx| async move {
563 let remote_id = response.await?.project_id;
564
565 let mut registrations = Vec::new();
566 this.update(&mut cx, |this, cx| {
567 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
568 *remote_id_tx.borrow_mut() = Some(remote_id);
569 }
570
571 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
572
573 this.subscriptions
574 .push(this.client.add_model_for_remote_entity(remote_id, cx));
575
576 for worktree in &this.worktrees {
577 if let Some(worktree) = worktree.upgrade(cx) {
578 registrations.push(worktree.update(cx, |worktree, cx| {
579 let worktree = worktree.as_local_mut().unwrap();
580 worktree.register(remote_id, cx)
581 }));
582 }
583 }
584 });
585
586 futures::future::try_join_all(registrations).await?;
587 Ok(())
588 })
589 }
590
591 pub fn remote_id(&self) -> Option<u64> {
592 match &self.client_state {
593 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
594 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
595 }
596 }
597
598 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
599 let mut id = None;
600 let mut watch = None;
601 match &self.client_state {
602 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
603 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
604 }
605
606 async move {
607 if let Some(id) = id {
608 return id;
609 }
610 let mut watch = watch.unwrap();
611 loop {
612 let id = *watch.borrow();
613 if let Some(id) = id {
614 return id;
615 }
616 watch.next().await;
617 }
618 }
619 }
620
621 pub fn replica_id(&self) -> ReplicaId {
622 match &self.client_state {
623 ProjectClientState::Local { .. } => 0,
624 ProjectClientState::Remote { replica_id, .. } => *replica_id,
625 }
626 }
627
628 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
629 &self.collaborators
630 }
631
632 pub fn worktrees<'a>(
633 &'a self,
634 cx: &'a AppContext,
635 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
636 self.worktrees
637 .iter()
638 .filter_map(move |worktree| worktree.upgrade(cx))
639 }
640
641 pub fn visible_worktrees<'a>(
642 &'a self,
643 cx: &'a AppContext,
644 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
645 self.worktrees.iter().filter_map(|worktree| {
646 worktree.upgrade(cx).and_then(|worktree| {
647 if worktree.read(cx).is_visible() {
648 Some(worktree)
649 } else {
650 None
651 }
652 })
653 })
654 }
655
656 pub fn worktree_for_id(
657 &self,
658 id: WorktreeId,
659 cx: &AppContext,
660 ) -> Option<ModelHandle<Worktree>> {
661 self.worktrees(cx)
662 .find(|worktree| worktree.read(cx).id() == id)
663 }
664
665 pub fn worktree_for_entry(
666 &self,
667 entry_id: ProjectEntryId,
668 cx: &AppContext,
669 ) -> Option<ModelHandle<Worktree>> {
670 self.worktrees(cx)
671 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
672 }
673
674 pub fn worktree_id_for_entry(
675 &self,
676 entry_id: ProjectEntryId,
677 cx: &AppContext,
678 ) -> Option<WorktreeId> {
679 self.worktree_for_entry(entry_id, cx)
680 .map(|worktree| worktree.read(cx).id())
681 }
682
683 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
684 let rpc = self.client.clone();
685 cx.spawn(|this, mut cx| async move {
686 let project_id = this.update(&mut cx, |this, cx| {
687 if let ProjectClientState::Local {
688 is_shared,
689 remote_id_rx,
690 ..
691 } = &mut this.client_state
692 {
693 *is_shared = true;
694
695 for open_buffer in this.opened_buffers.values_mut() {
696 match open_buffer {
697 OpenBuffer::Strong(_) => {}
698 OpenBuffer::Weak(buffer) => {
699 if let Some(buffer) = buffer.upgrade(cx) {
700 *open_buffer = OpenBuffer::Strong(buffer);
701 }
702 }
703 OpenBuffer::Loading(_) => unreachable!(),
704 }
705 }
706
707 for worktree_handle in this.worktrees.iter_mut() {
708 match worktree_handle {
709 WorktreeHandle::Strong(_) => {}
710 WorktreeHandle::Weak(worktree) => {
711 if let Some(worktree) = worktree.upgrade(cx) {
712 *worktree_handle = WorktreeHandle::Strong(worktree);
713 }
714 }
715 }
716 }
717
718 remote_id_rx
719 .borrow()
720 .ok_or_else(|| anyhow!("no project id"))
721 } else {
722 Err(anyhow!("can't share a remote project"))
723 }
724 })?;
725
726 rpc.request(proto::ShareProject { project_id }).await?;
727
728 let mut tasks = Vec::new();
729 this.update(&mut cx, |this, cx| {
730 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
731 worktree.update(cx, |worktree, cx| {
732 let worktree = worktree.as_local_mut().unwrap();
733 tasks.push(worktree.share(project_id, cx));
734 });
735 }
736 });
737 for task in tasks {
738 task.await?;
739 }
740 this.update(&mut cx, |_, cx| cx.notify());
741 Ok(())
742 })
743 }
744
745 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
746 let rpc = self.client.clone();
747
748 if let ProjectClientState::Local {
749 is_shared,
750 remote_id_rx,
751 ..
752 } = &mut self.client_state
753 {
754 if !*is_shared {
755 return;
756 }
757
758 *is_shared = false;
759 self.collaborators.clear();
760 self.shared_buffers.clear();
761 for worktree_handle in self.worktrees.iter_mut() {
762 if let WorktreeHandle::Strong(worktree) = worktree_handle {
763 let is_visible = worktree.update(cx, |worktree, _| {
764 worktree.as_local_mut().unwrap().unshare();
765 worktree.is_visible()
766 });
767 if !is_visible {
768 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
769 }
770 }
771 }
772
773 for open_buffer in self.opened_buffers.values_mut() {
774 match open_buffer {
775 OpenBuffer::Strong(buffer) => {
776 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
777 }
778 _ => {}
779 }
780 }
781
782 if let Some(project_id) = *remote_id_rx.borrow() {
783 rpc.send(proto::UnshareProject { project_id }).log_err();
784 }
785
786 cx.notify();
787 } else {
788 log::error!("attempted to unshare a remote project");
789 }
790 }
791
792 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
793 if let ProjectClientState::Remote {
794 sharing_has_stopped,
795 ..
796 } = &mut self.client_state
797 {
798 *sharing_has_stopped = true;
799 self.collaborators.clear();
800 cx.notify();
801 }
802 }
803
804 pub fn is_read_only(&self) -> bool {
805 match &self.client_state {
806 ProjectClientState::Local { .. } => false,
807 ProjectClientState::Remote {
808 sharing_has_stopped,
809 ..
810 } => *sharing_has_stopped,
811 }
812 }
813
814 pub fn is_local(&self) -> bool {
815 match &self.client_state {
816 ProjectClientState::Local { .. } => true,
817 ProjectClientState::Remote { .. } => false,
818 }
819 }
820
821 pub fn is_remote(&self) -> bool {
822 !self.is_local()
823 }
824
825 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
826 if self.is_remote() {
827 return Err(anyhow!("creating buffers as a guest is not supported yet"));
828 }
829
830 let buffer = cx.add_model(|cx| {
831 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
832 });
833 self.register_buffer(&buffer, cx)?;
834 Ok(buffer)
835 }
836
837 pub fn open_path(
838 &mut self,
839 path: impl Into<ProjectPath>,
840 cx: &mut ModelContext<Self>,
841 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
842 let task = self.open_buffer(path, cx);
843 cx.spawn_weak(|_, cx| async move {
844 let buffer = task.await?;
845 let project_entry_id = buffer
846 .read_with(&cx, |buffer, cx| {
847 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
848 })
849 .ok_or_else(|| anyhow!("no project entry"))?;
850 Ok((project_entry_id, buffer.into()))
851 })
852 }
853
854 pub fn open_buffer(
855 &mut self,
856 path: impl Into<ProjectPath>,
857 cx: &mut ModelContext<Self>,
858 ) -> Task<Result<ModelHandle<Buffer>>> {
859 let project_path = path.into();
860 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
861 worktree
862 } else {
863 return Task::ready(Err(anyhow!("no such worktree")));
864 };
865
866 // If there is already a buffer for the given path, then return it.
867 let existing_buffer = self.get_open_buffer(&project_path, cx);
868 if let Some(existing_buffer) = existing_buffer {
869 return Task::ready(Ok(existing_buffer));
870 }
871
872 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
873 // If the given path is already being loaded, then wait for that existing
874 // task to complete and return the same buffer.
875 hash_map::Entry::Occupied(e) => e.get().clone(),
876
877 // Otherwise, record the fact that this path is now being loaded.
878 hash_map::Entry::Vacant(entry) => {
879 let (mut tx, rx) = postage::watch::channel();
880 entry.insert(rx.clone());
881
882 let load_buffer = if worktree.read(cx).is_local() {
883 self.open_local_buffer(&project_path.path, &worktree, cx)
884 } else {
885 self.open_remote_buffer(&project_path.path, &worktree, cx)
886 };
887
888 cx.spawn(move |this, mut cx| async move {
889 let load_result = load_buffer.await;
890 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
891 // Record the fact that the buffer is no longer loading.
892 this.loading_buffers.remove(&project_path);
893 let buffer = load_result.map_err(Arc::new)?;
894 Ok(buffer)
895 }));
896 })
897 .detach();
898 rx
899 }
900 };
901
902 cx.foreground().spawn(async move {
903 loop {
904 if let Some(result) = loading_watch.borrow().as_ref() {
905 match result {
906 Ok(buffer) => return Ok(buffer.clone()),
907 Err(error) => return Err(anyhow!("{}", error)),
908 }
909 }
910 loading_watch.next().await;
911 }
912 })
913 }
914
915 fn open_local_buffer(
916 &mut self,
917 path: &Arc<Path>,
918 worktree: &ModelHandle<Worktree>,
919 cx: &mut ModelContext<Self>,
920 ) -> Task<Result<ModelHandle<Buffer>>> {
921 let load_buffer = worktree.update(cx, |worktree, cx| {
922 let worktree = worktree.as_local_mut().unwrap();
923 worktree.load_buffer(path, cx)
924 });
925 cx.spawn(|this, mut cx| async move {
926 let buffer = load_buffer.await?;
927 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
928 Ok(buffer)
929 })
930 }
931
932 fn open_remote_buffer(
933 &mut self,
934 path: &Arc<Path>,
935 worktree: &ModelHandle<Worktree>,
936 cx: &mut ModelContext<Self>,
937 ) -> Task<Result<ModelHandle<Buffer>>> {
938 let rpc = self.client.clone();
939 let project_id = self.remote_id().unwrap();
940 let remote_worktree_id = worktree.read(cx).id();
941 let path = path.clone();
942 let path_string = path.to_string_lossy().to_string();
943 cx.spawn(|this, mut cx| async move {
944 let response = rpc
945 .request(proto::OpenBufferByPath {
946 project_id,
947 worktree_id: remote_worktree_id.to_proto(),
948 path: path_string,
949 })
950 .await?;
951 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
952 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
953 .await
954 })
955 }
956
957 fn open_local_buffer_via_lsp(
958 &mut self,
959 abs_path: lsp::Url,
960 lang_name: Arc<str>,
961 lang_server: Arc<LanguageServer>,
962 cx: &mut ModelContext<Self>,
963 ) -> Task<Result<ModelHandle<Buffer>>> {
964 cx.spawn(|this, mut cx| async move {
965 let abs_path = abs_path
966 .to_file_path()
967 .map_err(|_| anyhow!("can't convert URI to path"))?;
968 let (worktree, relative_path) = if let Some(result) =
969 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
970 {
971 result
972 } else {
973 let worktree = this
974 .update(&mut cx, |this, cx| {
975 this.create_local_worktree(&abs_path, false, cx)
976 })
977 .await?;
978 this.update(&mut cx, |this, cx| {
979 this.language_servers
980 .insert((worktree.read(cx).id(), lang_name), lang_server);
981 });
982 (worktree, PathBuf::new())
983 };
984
985 let project_path = ProjectPath {
986 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
987 path: relative_path.into(),
988 };
989 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
990 .await
991 })
992 }
993
994 pub fn open_buffer_by_id(
995 &mut self,
996 id: u64,
997 cx: &mut ModelContext<Self>,
998 ) -> Task<Result<ModelHandle<Buffer>>> {
999 if let Some(buffer) = self.buffer_for_id(id, cx) {
1000 Task::ready(Ok(buffer))
1001 } else if self.is_local() {
1002 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1003 } else if let Some(project_id) = self.remote_id() {
1004 let request = self
1005 .client
1006 .request(proto::OpenBufferById { project_id, id });
1007 cx.spawn(|this, mut cx| async move {
1008 let buffer = request
1009 .await?
1010 .buffer
1011 .ok_or_else(|| anyhow!("invalid buffer"))?;
1012 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1013 .await
1014 })
1015 } else {
1016 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1017 }
1018 }
1019
1020 pub fn save_buffer_as(
1021 &mut self,
1022 buffer: ModelHandle<Buffer>,
1023 abs_path: PathBuf,
1024 cx: &mut ModelContext<Project>,
1025 ) -> Task<Result<()>> {
1026 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1027 cx.spawn(|this, mut cx| async move {
1028 let (worktree, path) = worktree_task.await?;
1029 worktree
1030 .update(&mut cx, |worktree, cx| {
1031 worktree
1032 .as_local_mut()
1033 .unwrap()
1034 .save_buffer_as(buffer.clone(), path, cx)
1035 })
1036 .await?;
1037 this.update(&mut cx, |this, cx| {
1038 this.assign_language_to_buffer(&buffer, cx);
1039 this.register_buffer_with_language_server(&buffer, cx);
1040 });
1041 Ok(())
1042 })
1043 }
1044
1045 pub fn get_open_buffer(
1046 &mut self,
1047 path: &ProjectPath,
1048 cx: &mut ModelContext<Self>,
1049 ) -> Option<ModelHandle<Buffer>> {
1050 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1051 self.opened_buffers.values().find_map(|buffer| {
1052 let buffer = buffer.upgrade(cx)?;
1053 let file = File::from_dyn(buffer.read(cx).file())?;
1054 if file.worktree == worktree && file.path() == &path.path {
1055 Some(buffer)
1056 } else {
1057 None
1058 }
1059 })
1060 }
1061
1062 fn register_buffer(
1063 &mut self,
1064 buffer: &ModelHandle<Buffer>,
1065 cx: &mut ModelContext<Self>,
1066 ) -> Result<()> {
1067 let remote_id = buffer.read(cx).remote_id();
1068 let open_buffer = if self.is_remote() || self.is_shared() {
1069 OpenBuffer::Strong(buffer.clone())
1070 } else {
1071 OpenBuffer::Weak(buffer.downgrade())
1072 };
1073
1074 match self.opened_buffers.insert(remote_id, open_buffer) {
1075 None => {}
1076 Some(OpenBuffer::Loading(operations)) => {
1077 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1078 }
1079 Some(OpenBuffer::Weak(existing_handle)) => {
1080 if existing_handle.upgrade(cx).is_some() {
1081 Err(anyhow!(
1082 "already registered buffer with remote id {}",
1083 remote_id
1084 ))?
1085 }
1086 }
1087 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1088 "already registered buffer with remote id {}",
1089 remote_id
1090 ))?,
1091 }
1092 cx.subscribe(buffer, |this, buffer, event, cx| {
1093 this.on_buffer_event(buffer, event, cx);
1094 })
1095 .detach();
1096
1097 self.assign_language_to_buffer(buffer, cx);
1098 self.register_buffer_with_language_server(buffer, cx);
1099
1100 Ok(())
1101 }
1102
1103 fn register_buffer_with_language_server(
1104 &mut self,
1105 buffer_handle: &ModelHandle<Buffer>,
1106 cx: &mut ModelContext<Self>,
1107 ) {
1108 let buffer = buffer_handle.read(cx);
1109 let buffer_id = buffer.remote_id();
1110 if let Some(file) = File::from_dyn(buffer.file()) {
1111 if file.is_local() {
1112 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1113 let initial_snapshot = buffer.text_snapshot();
1114 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1115
1116 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1117 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1118 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1119 .log_err();
1120 }
1121 }
1122
1123 if let Some(server) = language_server {
1124 server
1125 .notify::<lsp::notification::DidOpenTextDocument>(
1126 lsp::DidOpenTextDocumentParams {
1127 text_document: lsp::TextDocumentItem::new(
1128 uri,
1129 Default::default(),
1130 0,
1131 initial_snapshot.text(),
1132 ),
1133 }
1134 .clone(),
1135 )
1136 .log_err();
1137 buffer_handle.update(cx, |buffer, cx| {
1138 buffer.set_completion_triggers(
1139 server
1140 .capabilities()
1141 .completion_provider
1142 .as_ref()
1143 .and_then(|provider| provider.trigger_characters.clone())
1144 .unwrap_or(Vec::new()),
1145 cx,
1146 )
1147 });
1148 self.buffer_snapshots
1149 .insert(buffer_id, vec![(0, initial_snapshot)]);
1150 }
1151
1152 cx.observe_release(buffer_handle, |this, buffer, cx| {
1153 if let Some(file) = File::from_dyn(buffer.file()) {
1154 if file.is_local() {
1155 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1156 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1157 server
1158 .notify::<lsp::notification::DidCloseTextDocument>(
1159 lsp::DidCloseTextDocumentParams {
1160 text_document: lsp::TextDocumentIdentifier::new(
1161 uri.clone(),
1162 ),
1163 },
1164 )
1165 .log_err();
1166 }
1167 }
1168 }
1169 })
1170 .detach();
1171 }
1172 }
1173 }
1174
1175 fn on_buffer_event(
1176 &mut self,
1177 buffer: ModelHandle<Buffer>,
1178 event: &BufferEvent,
1179 cx: &mut ModelContext<Self>,
1180 ) -> Option<()> {
1181 match event {
1182 BufferEvent::Operation(operation) => {
1183 let project_id = self.remote_id()?;
1184 let request = self.client.request(proto::UpdateBuffer {
1185 project_id,
1186 buffer_id: buffer.read(cx).remote_id(),
1187 operations: vec![language::proto::serialize_operation(&operation)],
1188 });
1189 cx.background().spawn(request).detach_and_log_err(cx);
1190 }
1191 BufferEvent::Edited { .. } => {
1192 let language_server = self
1193 .language_server_for_buffer(buffer.read(cx), cx)?
1194 .clone();
1195 let buffer = buffer.read(cx);
1196 let file = File::from_dyn(buffer.file())?;
1197 let abs_path = file.as_local()?.abs_path(cx);
1198 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1199 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1200 let (version, prev_snapshot) = buffer_snapshots.last()?;
1201 let next_snapshot = buffer.text_snapshot();
1202 let next_version = version + 1;
1203
1204 let content_changes = buffer
1205 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1206 .map(|edit| {
1207 let edit_start = edit.new.start.0;
1208 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1209 let new_text = next_snapshot
1210 .text_for_range(edit.new.start.1..edit.new.end.1)
1211 .collect();
1212 lsp::TextDocumentContentChangeEvent {
1213 range: Some(lsp::Range::new(
1214 edit_start.to_lsp_position(),
1215 edit_end.to_lsp_position(),
1216 )),
1217 range_length: None,
1218 text: new_text,
1219 }
1220 })
1221 .collect();
1222
1223 buffer_snapshots.push((next_version, next_snapshot));
1224
1225 language_server
1226 .notify::<lsp::notification::DidChangeTextDocument>(
1227 lsp::DidChangeTextDocumentParams {
1228 text_document: lsp::VersionedTextDocumentIdentifier::new(
1229 uri,
1230 next_version,
1231 ),
1232 content_changes,
1233 },
1234 )
1235 .log_err();
1236 }
1237 BufferEvent::Saved => {
1238 let file = File::from_dyn(buffer.read(cx).file())?;
1239 let worktree_id = file.worktree_id(cx);
1240 let abs_path = file.as_local()?.abs_path(cx);
1241 let text_document = lsp::TextDocumentIdentifier {
1242 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1243 };
1244
1245 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1246 server
1247 .notify::<lsp::notification::DidSaveTextDocument>(
1248 lsp::DidSaveTextDocumentParams {
1249 text_document: text_document.clone(),
1250 text: None,
1251 },
1252 )
1253 .log_err();
1254 }
1255 }
1256 _ => {}
1257 }
1258
1259 None
1260 }
1261
1262 fn language_servers_for_worktree(
1263 &self,
1264 worktree_id: WorktreeId,
1265 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1266 self.language_servers.iter().filter_map(
1267 move |((language_server_worktree_id, language_name), server)| {
1268 if *language_server_worktree_id == worktree_id {
1269 Some((language_name.as_ref(), server))
1270 } else {
1271 None
1272 }
1273 },
1274 )
1275 }
1276
1277 fn assign_language_to_buffer(
1278 &mut self,
1279 buffer: &ModelHandle<Buffer>,
1280 cx: &mut ModelContext<Self>,
1281 ) -> Option<()> {
1282 // If the buffer has a language, set it and start the language server if we haven't already.
1283 let full_path = buffer.read(cx).file()?.full_path(cx);
1284 let language = self.languages.select_language(&full_path)?;
1285 buffer.update(cx, |buffer, cx| {
1286 buffer.set_language(Some(language.clone()), cx);
1287 });
1288
1289 let file = File::from_dyn(buffer.read(cx).file())?;
1290 let worktree = file.worktree.read(cx).as_local()?;
1291 let worktree_id = worktree.id();
1292 let worktree_abs_path = worktree.abs_path().clone();
1293 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1294
1295 None
1296 }
1297
1298 fn start_language_server(
1299 &mut self,
1300 worktree_id: WorktreeId,
1301 worktree_path: Arc<Path>,
1302 language: Arc<Language>,
1303 cx: &mut ModelContext<Self>,
1304 ) {
1305 let key = (worktree_id, language.name());
1306 self.started_language_servers
1307 .entry(key.clone())
1308 .or_insert_with(|| {
1309 let server_id = post_inc(&mut self.next_language_server_id);
1310 let language_server = self.languages.start_language_server(
1311 language.clone(),
1312 worktree_path,
1313 self.client.http_client(),
1314 cx,
1315 );
1316 cx.spawn_weak(|this, mut cx| async move {
1317 let mut language_server = language_server?.await.log_err()?;
1318 let this = this.upgrade(&cx)?;
1319 let (language_server_events_tx, language_server_events_rx) =
1320 smol::channel::unbounded();
1321
1322 language_server
1323 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1324 let language_server_events_tx = language_server_events_tx.clone();
1325 move |params| {
1326 language_server_events_tx
1327 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1328 .ok();
1329 }
1330 })
1331 .detach();
1332
1333 language_server
1334 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1335 let settings = this
1336 .read_with(&cx, |this, _| this.language_server_settings.clone());
1337 move |params| {
1338 let settings = settings.lock();
1339 Ok(params
1340 .items
1341 .into_iter()
1342 .map(|item| {
1343 if let Some(section) = &item.section {
1344 settings
1345 .get(section)
1346 .cloned()
1347 .unwrap_or(serde_json::Value::Null)
1348 } else {
1349 settings.clone()
1350 }
1351 })
1352 .collect())
1353 }
1354 })
1355 .detach();
1356
1357 language_server
1358 .on_notification::<lsp::notification::Progress, _>(move |params| {
1359 let token = match params.token {
1360 lsp::NumberOrString::String(token) => token,
1361 lsp::NumberOrString::Number(token) => {
1362 log::info!("skipping numeric progress token {}", token);
1363 return;
1364 }
1365 };
1366
1367 match params.value {
1368 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1369 lsp::WorkDoneProgress::Begin(_) => {
1370 language_server_events_tx
1371 .try_send(LanguageServerEvent::WorkStart { token })
1372 .ok();
1373 }
1374 lsp::WorkDoneProgress::Report(report) => {
1375 language_server_events_tx
1376 .try_send(LanguageServerEvent::WorkProgress {
1377 token,
1378 progress: LanguageServerProgress {
1379 message: report.message,
1380 percentage: report
1381 .percentage
1382 .map(|p| p as usize),
1383 last_update_at: Instant::now(),
1384 },
1385 })
1386 .ok();
1387 }
1388 lsp::WorkDoneProgress::End(_) => {
1389 language_server_events_tx
1390 .try_send(LanguageServerEvent::WorkEnd { token })
1391 .ok();
1392 }
1393 },
1394 }
1395 })
1396 .detach();
1397
1398 // Process all the LSP events.
1399 cx.spawn(|mut cx| {
1400 let this = this.downgrade();
1401 async move {
1402 while let Ok(event) = language_server_events_rx.recv().await {
1403 let this = this.upgrade(&cx)?;
1404 this.update(&mut cx, |this, cx| {
1405 this.on_lsp_event(server_id, event, &language, cx)
1406 });
1407
1408 // Don't starve the main thread when lots of events arrive all at once.
1409 smol::future::yield_now().await;
1410 }
1411 Some(())
1412 }
1413 })
1414 .detach();
1415
1416 let language_server = language_server.initialize().await.log_err()?;
1417 this.update(&mut cx, |this, cx| {
1418 this.language_servers
1419 .insert(key.clone(), language_server.clone());
1420 this.language_server_statuses.insert(
1421 server_id,
1422 LanguageServerStatus {
1423 name: language_server.name().to_string(),
1424 pending_work: Default::default(),
1425 pending_diagnostic_updates: 0,
1426 },
1427 );
1428 language_server
1429 .notify::<lsp::notification::DidChangeConfiguration>(
1430 lsp::DidChangeConfigurationParams {
1431 settings: this.language_server_settings.lock().clone(),
1432 },
1433 )
1434 .ok();
1435
1436 if let Some(project_id) = this.remote_id() {
1437 this.client
1438 .send(proto::StartLanguageServer {
1439 project_id,
1440 server: Some(proto::LanguageServer {
1441 id: server_id as u64,
1442 name: language_server.name().to_string(),
1443 }),
1444 })
1445 .log_err();
1446 }
1447
1448 // Tell the language server about every open buffer in the worktree that matches the language.
1449 for buffer in this.opened_buffers.values() {
1450 if let Some(buffer_handle) = buffer.upgrade(cx) {
1451 let buffer = buffer_handle.read(cx);
1452 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1453 file
1454 } else {
1455 continue;
1456 };
1457 let language = if let Some(language) = buffer.language() {
1458 language
1459 } else {
1460 continue;
1461 };
1462 if (file.worktree.read(cx).id(), language.name()) != key {
1463 continue;
1464 }
1465
1466 let file = file.as_local()?;
1467 let versions = this
1468 .buffer_snapshots
1469 .entry(buffer.remote_id())
1470 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1471 let (version, initial_snapshot) = versions.last().unwrap();
1472 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1473 language_server
1474 .notify::<lsp::notification::DidOpenTextDocument>(
1475 lsp::DidOpenTextDocumentParams {
1476 text_document: lsp::TextDocumentItem::new(
1477 uri,
1478 Default::default(),
1479 *version,
1480 initial_snapshot.text(),
1481 ),
1482 },
1483 )
1484 .log_err()?;
1485 buffer_handle.update(cx, |buffer, cx| {
1486 buffer.set_completion_triggers(
1487 language_server
1488 .capabilities()
1489 .completion_provider
1490 .as_ref()
1491 .and_then(|provider| {
1492 provider.trigger_characters.clone()
1493 })
1494 .unwrap_or(Vec::new()),
1495 cx,
1496 )
1497 });
1498 }
1499 }
1500
1501 cx.notify();
1502 Some(())
1503 });
1504
1505 Some(language_server)
1506 })
1507 });
1508 }
1509
1510 fn on_lsp_event(
1511 &mut self,
1512 language_server_id: usize,
1513 event: LanguageServerEvent,
1514 language: &Arc<Language>,
1515 cx: &mut ModelContext<Self>,
1516 ) {
1517 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1518 let language_server_status =
1519 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1520 status
1521 } else {
1522 return;
1523 };
1524
1525 match event {
1526 LanguageServerEvent::WorkStart { token } => {
1527 if Some(&token) == disk_diagnostics_token {
1528 language_server_status.pending_diagnostic_updates += 1;
1529 if language_server_status.pending_diagnostic_updates == 1 {
1530 self.disk_based_diagnostics_started(cx);
1531 self.broadcast_language_server_update(
1532 language_server_id,
1533 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1534 proto::LspDiskBasedDiagnosticsUpdating {},
1535 ),
1536 );
1537 }
1538 } else {
1539 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1540 self.broadcast_language_server_update(
1541 language_server_id,
1542 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1543 token,
1544 }),
1545 );
1546 }
1547 }
1548 LanguageServerEvent::WorkProgress { token, progress } => {
1549 if Some(&token) != disk_diagnostics_token {
1550 self.on_lsp_work_progress(
1551 language_server_id,
1552 token.clone(),
1553 progress.clone(),
1554 cx,
1555 );
1556 self.broadcast_language_server_update(
1557 language_server_id,
1558 proto::update_language_server::Variant::WorkProgress(
1559 proto::LspWorkProgress {
1560 token,
1561 message: progress.message,
1562 percentage: progress.percentage.map(|p| p as u32),
1563 },
1564 ),
1565 );
1566 }
1567 }
1568 LanguageServerEvent::WorkEnd { token } => {
1569 if Some(&token) == disk_diagnostics_token {
1570 language_server_status.pending_diagnostic_updates -= 1;
1571 if language_server_status.pending_diagnostic_updates == 0 {
1572 self.disk_based_diagnostics_finished(cx);
1573 self.broadcast_language_server_update(
1574 language_server_id,
1575 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1576 proto::LspDiskBasedDiagnosticsUpdated {},
1577 ),
1578 );
1579 }
1580 } else {
1581 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1582 self.broadcast_language_server_update(
1583 language_server_id,
1584 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1585 token,
1586 }),
1587 );
1588 }
1589 }
1590 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1591 language.process_diagnostics(&mut params);
1592
1593 if disk_diagnostics_token.is_none() {
1594 self.disk_based_diagnostics_started(cx);
1595 self.broadcast_language_server_update(
1596 language_server_id,
1597 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1598 proto::LspDiskBasedDiagnosticsUpdating {},
1599 ),
1600 );
1601 }
1602 self.update_diagnostics(params, language.disk_based_diagnostic_sources(), cx)
1603 .log_err();
1604 if disk_diagnostics_token.is_none() {
1605 self.disk_based_diagnostics_finished(cx);
1606 self.broadcast_language_server_update(
1607 language_server_id,
1608 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1609 proto::LspDiskBasedDiagnosticsUpdated {},
1610 ),
1611 );
1612 }
1613 }
1614 }
1615 }
1616
1617 fn on_lsp_work_start(
1618 &mut self,
1619 language_server_id: usize,
1620 token: String,
1621 cx: &mut ModelContext<Self>,
1622 ) {
1623 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1624 status.pending_work.insert(
1625 token,
1626 LanguageServerProgress {
1627 message: None,
1628 percentage: None,
1629 last_update_at: Instant::now(),
1630 },
1631 );
1632 cx.notify();
1633 }
1634 }
1635
1636 fn on_lsp_work_progress(
1637 &mut self,
1638 language_server_id: usize,
1639 token: String,
1640 progress: LanguageServerProgress,
1641 cx: &mut ModelContext<Self>,
1642 ) {
1643 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1644 status.pending_work.insert(token, progress);
1645 cx.notify();
1646 }
1647 }
1648
1649 fn on_lsp_work_end(
1650 &mut self,
1651 language_server_id: usize,
1652 token: String,
1653 cx: &mut ModelContext<Self>,
1654 ) {
1655 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1656 status.pending_work.remove(&token);
1657 cx.notify();
1658 }
1659 }
1660
1661 fn broadcast_language_server_update(
1662 &self,
1663 language_server_id: usize,
1664 event: proto::update_language_server::Variant,
1665 ) {
1666 if let Some(project_id) = self.remote_id() {
1667 self.client
1668 .send(proto::UpdateLanguageServer {
1669 project_id,
1670 language_server_id: language_server_id as u64,
1671 variant: Some(event),
1672 })
1673 .log_err();
1674 }
1675 }
1676
1677 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1678 for server in self.language_servers.values() {
1679 server
1680 .notify::<lsp::notification::DidChangeConfiguration>(
1681 lsp::DidChangeConfigurationParams {
1682 settings: settings.clone(),
1683 },
1684 )
1685 .ok();
1686 }
1687 *self.language_server_settings.lock() = settings;
1688 }
1689
1690 pub fn language_server_statuses(
1691 &self,
1692 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1693 self.language_server_statuses.values()
1694 }
1695
1696 pub fn update_diagnostics(
1697 &mut self,
1698 params: lsp::PublishDiagnosticsParams,
1699 disk_based_sources: &HashSet<String>,
1700 cx: &mut ModelContext<Self>,
1701 ) -> Result<()> {
1702 let abs_path = params
1703 .uri
1704 .to_file_path()
1705 .map_err(|_| anyhow!("URI is not a file"))?;
1706 let mut next_group_id = 0;
1707 let mut diagnostics = Vec::default();
1708 let mut primary_diagnostic_group_ids = HashMap::default();
1709 let mut sources_by_group_id = HashMap::default();
1710 let mut supporting_diagnostics = HashMap::default();
1711 for diagnostic in ¶ms.diagnostics {
1712 let source = diagnostic.source.as_ref();
1713 let code = diagnostic.code.as_ref().map(|code| match code {
1714 lsp::NumberOrString::Number(code) => code.to_string(),
1715 lsp::NumberOrString::String(code) => code.clone(),
1716 });
1717 let range = range_from_lsp(diagnostic.range);
1718 let is_supporting = diagnostic
1719 .related_information
1720 .as_ref()
1721 .map_or(false, |infos| {
1722 infos.iter().any(|info| {
1723 primary_diagnostic_group_ids.contains_key(&(
1724 source,
1725 code.clone(),
1726 range_from_lsp(info.location.range),
1727 ))
1728 })
1729 });
1730
1731 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1732 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1733 });
1734
1735 if is_supporting {
1736 supporting_diagnostics.insert(
1737 (source, code.clone(), range),
1738 (diagnostic.severity, is_unnecessary),
1739 );
1740 } else {
1741 let group_id = post_inc(&mut next_group_id);
1742 let is_disk_based =
1743 source.map_or(false, |source| disk_based_sources.contains(source));
1744
1745 sources_by_group_id.insert(group_id, source);
1746 primary_diagnostic_group_ids
1747 .insert((source, code.clone(), range.clone()), group_id);
1748
1749 diagnostics.push(DiagnosticEntry {
1750 range,
1751 diagnostic: Diagnostic {
1752 code: code.clone(),
1753 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1754 message: diagnostic.message.clone(),
1755 group_id,
1756 is_primary: true,
1757 is_valid: true,
1758 is_disk_based,
1759 is_unnecessary,
1760 },
1761 });
1762 if let Some(infos) = &diagnostic.related_information {
1763 for info in infos {
1764 if info.location.uri == params.uri && !info.message.is_empty() {
1765 let range = range_from_lsp(info.location.range);
1766 diagnostics.push(DiagnosticEntry {
1767 range,
1768 diagnostic: Diagnostic {
1769 code: code.clone(),
1770 severity: DiagnosticSeverity::INFORMATION,
1771 message: info.message.clone(),
1772 group_id,
1773 is_primary: false,
1774 is_valid: true,
1775 is_disk_based,
1776 is_unnecessary: false,
1777 },
1778 });
1779 }
1780 }
1781 }
1782 }
1783 }
1784
1785 for entry in &mut diagnostics {
1786 let diagnostic = &mut entry.diagnostic;
1787 if !diagnostic.is_primary {
1788 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1789 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1790 source,
1791 diagnostic.code.clone(),
1792 entry.range.clone(),
1793 )) {
1794 if let Some(severity) = severity {
1795 diagnostic.severity = severity;
1796 }
1797 diagnostic.is_unnecessary = is_unnecessary;
1798 }
1799 }
1800 }
1801
1802 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1803 Ok(())
1804 }
1805
1806 pub fn update_diagnostic_entries(
1807 &mut self,
1808 abs_path: PathBuf,
1809 version: Option<i32>,
1810 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1811 cx: &mut ModelContext<Project>,
1812 ) -> Result<(), anyhow::Error> {
1813 let (worktree, relative_path) = self
1814 .find_local_worktree(&abs_path, cx)
1815 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1816 if !worktree.read(cx).is_visible() {
1817 return Ok(());
1818 }
1819
1820 let project_path = ProjectPath {
1821 worktree_id: worktree.read(cx).id(),
1822 path: relative_path.into(),
1823 };
1824
1825 for buffer in self.opened_buffers.values() {
1826 if let Some(buffer) = buffer.upgrade(cx) {
1827 if buffer
1828 .read(cx)
1829 .file()
1830 .map_or(false, |file| *file.path() == project_path.path)
1831 {
1832 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1833 break;
1834 }
1835 }
1836 }
1837 worktree.update(cx, |worktree, cx| {
1838 worktree
1839 .as_local_mut()
1840 .ok_or_else(|| anyhow!("not a local worktree"))?
1841 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1842 })?;
1843 cx.emit(Event::DiagnosticsUpdated(project_path));
1844 Ok(())
1845 }
1846
1847 fn update_buffer_diagnostics(
1848 &mut self,
1849 buffer: &ModelHandle<Buffer>,
1850 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1851 version: Option<i32>,
1852 cx: &mut ModelContext<Self>,
1853 ) -> Result<()> {
1854 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1855 Ordering::Equal
1856 .then_with(|| b.is_primary.cmp(&a.is_primary))
1857 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1858 .then_with(|| a.severity.cmp(&b.severity))
1859 .then_with(|| a.message.cmp(&b.message))
1860 }
1861
1862 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1863
1864 diagnostics.sort_unstable_by(|a, b| {
1865 Ordering::Equal
1866 .then_with(|| a.range.start.cmp(&b.range.start))
1867 .then_with(|| b.range.end.cmp(&a.range.end))
1868 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1869 });
1870
1871 let mut sanitized_diagnostics = Vec::new();
1872 let edits_since_save = Patch::new(
1873 snapshot
1874 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1875 .collect(),
1876 );
1877 for entry in diagnostics {
1878 let start;
1879 let end;
1880 if entry.diagnostic.is_disk_based {
1881 // Some diagnostics are based on files on disk instead of buffers'
1882 // current contents. Adjust these diagnostics' ranges to reflect
1883 // any unsaved edits.
1884 start = edits_since_save.old_to_new(entry.range.start);
1885 end = edits_since_save.old_to_new(entry.range.end);
1886 } else {
1887 start = entry.range.start;
1888 end = entry.range.end;
1889 }
1890
1891 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1892 ..snapshot.clip_point_utf16(end, Bias::Right);
1893
1894 // Expand empty ranges by one character
1895 if range.start == range.end {
1896 range.end.column += 1;
1897 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1898 if range.start == range.end && range.end.column > 0 {
1899 range.start.column -= 1;
1900 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1901 }
1902 }
1903
1904 sanitized_diagnostics.push(DiagnosticEntry {
1905 range,
1906 diagnostic: entry.diagnostic,
1907 });
1908 }
1909 drop(edits_since_save);
1910
1911 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1912 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1913 Ok(())
1914 }
1915
1916 pub fn format(
1917 &self,
1918 buffers: HashSet<ModelHandle<Buffer>>,
1919 push_to_history: bool,
1920 cx: &mut ModelContext<Project>,
1921 ) -> Task<Result<ProjectTransaction>> {
1922 let mut local_buffers = Vec::new();
1923 let mut remote_buffers = None;
1924 for buffer_handle in buffers {
1925 let buffer = buffer_handle.read(cx);
1926 if let Some(file) = File::from_dyn(buffer.file()) {
1927 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1928 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1929 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1930 }
1931 } else {
1932 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1933 }
1934 } else {
1935 return Task::ready(Ok(Default::default()));
1936 }
1937 }
1938
1939 let remote_buffers = self.remote_id().zip(remote_buffers);
1940 let client = self.client.clone();
1941
1942 cx.spawn(|this, mut cx| async move {
1943 let mut project_transaction = ProjectTransaction::default();
1944
1945 if let Some((project_id, remote_buffers)) = remote_buffers {
1946 let response = client
1947 .request(proto::FormatBuffers {
1948 project_id,
1949 buffer_ids: remote_buffers
1950 .iter()
1951 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1952 .collect(),
1953 })
1954 .await?
1955 .transaction
1956 .ok_or_else(|| anyhow!("missing transaction"))?;
1957 project_transaction = this
1958 .update(&mut cx, |this, cx| {
1959 this.deserialize_project_transaction(response, push_to_history, cx)
1960 })
1961 .await?;
1962 }
1963
1964 for (buffer, buffer_abs_path, language_server) in local_buffers {
1965 let text_document = lsp::TextDocumentIdentifier::new(
1966 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1967 );
1968 let capabilities = &language_server.capabilities();
1969 let lsp_edits = if capabilities
1970 .document_formatting_provider
1971 .as_ref()
1972 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1973 {
1974 language_server
1975 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1976 text_document,
1977 options: Default::default(),
1978 work_done_progress_params: Default::default(),
1979 })
1980 .await?
1981 } else if capabilities
1982 .document_range_formatting_provider
1983 .as_ref()
1984 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1985 {
1986 let buffer_start = lsp::Position::new(0, 0);
1987 let buffer_end = buffer
1988 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1989 .to_lsp_position();
1990 language_server
1991 .request::<lsp::request::RangeFormatting>(
1992 lsp::DocumentRangeFormattingParams {
1993 text_document,
1994 range: lsp::Range::new(buffer_start, buffer_end),
1995 options: Default::default(),
1996 work_done_progress_params: Default::default(),
1997 },
1998 )
1999 .await?
2000 } else {
2001 continue;
2002 };
2003
2004 if let Some(lsp_edits) = lsp_edits {
2005 let edits = this
2006 .update(&mut cx, |this, cx| {
2007 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2008 })
2009 .await?;
2010 buffer.update(&mut cx, |buffer, cx| {
2011 buffer.finalize_last_transaction();
2012 buffer.start_transaction();
2013 for (range, text) in edits {
2014 buffer.edit([range], text, cx);
2015 }
2016 if buffer.end_transaction(cx).is_some() {
2017 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2018 if !push_to_history {
2019 buffer.forget_transaction(transaction.id);
2020 }
2021 project_transaction.0.insert(cx.handle(), transaction);
2022 }
2023 });
2024 }
2025 }
2026
2027 Ok(project_transaction)
2028 })
2029 }
2030
2031 pub fn definition<T: ToPointUtf16>(
2032 &self,
2033 buffer: &ModelHandle<Buffer>,
2034 position: T,
2035 cx: &mut ModelContext<Self>,
2036 ) -> Task<Result<Vec<Location>>> {
2037 let position = position.to_point_utf16(buffer.read(cx));
2038 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2039 }
2040
2041 pub fn references<T: ToPointUtf16>(
2042 &self,
2043 buffer: &ModelHandle<Buffer>,
2044 position: T,
2045 cx: &mut ModelContext<Self>,
2046 ) -> Task<Result<Vec<Location>>> {
2047 let position = position.to_point_utf16(buffer.read(cx));
2048 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2049 }
2050
2051 pub fn document_highlights<T: ToPointUtf16>(
2052 &self,
2053 buffer: &ModelHandle<Buffer>,
2054 position: T,
2055 cx: &mut ModelContext<Self>,
2056 ) -> Task<Result<Vec<DocumentHighlight>>> {
2057 let position = position.to_point_utf16(buffer.read(cx));
2058
2059 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2060 }
2061
2062 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2063 if self.is_local() {
2064 let mut language_servers = HashMap::default();
2065 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2066 if let Some((worktree, language)) = self
2067 .worktree_for_id(*worktree_id, cx)
2068 .and_then(|worktree| worktree.read(cx).as_local())
2069 .zip(self.languages.get_language(language_name))
2070 {
2071 language_servers
2072 .entry(Arc::as_ptr(language_server))
2073 .or_insert((
2074 language_server.clone(),
2075 *worktree_id,
2076 worktree.abs_path().clone(),
2077 language.clone(),
2078 ));
2079 }
2080 }
2081
2082 let mut requests = Vec::new();
2083 for (language_server, _, _, _) in language_servers.values() {
2084 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2085 lsp::WorkspaceSymbolParams {
2086 query: query.to_string(),
2087 ..Default::default()
2088 },
2089 ));
2090 }
2091
2092 cx.spawn_weak(|this, cx| async move {
2093 let responses = futures::future::try_join_all(requests).await?;
2094
2095 let mut symbols = Vec::new();
2096 if let Some(this) = this.upgrade(&cx) {
2097 this.read_with(&cx, |this, cx| {
2098 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2099 language_servers.into_values().zip(responses)
2100 {
2101 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2102 |lsp_symbol| {
2103 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2104 let mut worktree_id = source_worktree_id;
2105 let path;
2106 if let Some((worktree, rel_path)) =
2107 this.find_local_worktree(&abs_path, cx)
2108 {
2109 worktree_id = worktree.read(cx).id();
2110 path = rel_path;
2111 } else {
2112 path = relativize_path(&worktree_abs_path, &abs_path);
2113 }
2114
2115 let label = language
2116 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2117 .unwrap_or_else(|| {
2118 CodeLabel::plain(lsp_symbol.name.clone(), None)
2119 });
2120 let signature = this.symbol_signature(worktree_id, &path);
2121
2122 Some(Symbol {
2123 source_worktree_id,
2124 worktree_id,
2125 language_name: language.name().to_string(),
2126 name: lsp_symbol.name,
2127 kind: lsp_symbol.kind,
2128 label,
2129 path,
2130 range: range_from_lsp(lsp_symbol.location.range),
2131 signature,
2132 })
2133 },
2134 ));
2135 }
2136 })
2137 }
2138
2139 Ok(symbols)
2140 })
2141 } else if let Some(project_id) = self.remote_id() {
2142 let request = self.client.request(proto::GetProjectSymbols {
2143 project_id,
2144 query: query.to_string(),
2145 });
2146 cx.spawn_weak(|this, cx| async move {
2147 let response = request.await?;
2148 let mut symbols = Vec::new();
2149 if let Some(this) = this.upgrade(&cx) {
2150 this.read_with(&cx, |this, _| {
2151 symbols.extend(
2152 response
2153 .symbols
2154 .into_iter()
2155 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2156 );
2157 })
2158 }
2159 Ok(symbols)
2160 })
2161 } else {
2162 Task::ready(Ok(Default::default()))
2163 }
2164 }
2165
2166 pub fn open_buffer_for_symbol(
2167 &mut self,
2168 symbol: &Symbol,
2169 cx: &mut ModelContext<Self>,
2170 ) -> Task<Result<ModelHandle<Buffer>>> {
2171 if self.is_local() {
2172 let language_server = if let Some(server) = self.language_servers.get(&(
2173 symbol.source_worktree_id,
2174 Arc::from(symbol.language_name.as_str()),
2175 )) {
2176 server.clone()
2177 } else {
2178 return Task::ready(Err(anyhow!(
2179 "language server for worktree and language not found"
2180 )));
2181 };
2182
2183 let worktree_abs_path = if let Some(worktree_abs_path) = self
2184 .worktree_for_id(symbol.worktree_id, cx)
2185 .and_then(|worktree| worktree.read(cx).as_local())
2186 .map(|local_worktree| local_worktree.abs_path())
2187 {
2188 worktree_abs_path
2189 } else {
2190 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2191 };
2192 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2193 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2194 uri
2195 } else {
2196 return Task::ready(Err(anyhow!("invalid symbol path")));
2197 };
2198
2199 self.open_local_buffer_via_lsp(
2200 symbol_uri,
2201 Arc::from(symbol.language_name.as_str()),
2202 language_server,
2203 cx,
2204 )
2205 } else if let Some(project_id) = self.remote_id() {
2206 let request = self.client.request(proto::OpenBufferForSymbol {
2207 project_id,
2208 symbol: Some(serialize_symbol(symbol)),
2209 });
2210 cx.spawn(|this, mut cx| async move {
2211 let response = request.await?;
2212 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2213 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2214 .await
2215 })
2216 } else {
2217 Task::ready(Err(anyhow!("project does not have a remote id")))
2218 }
2219 }
2220
2221 pub fn completions<T: ToPointUtf16>(
2222 &self,
2223 source_buffer_handle: &ModelHandle<Buffer>,
2224 position: T,
2225 cx: &mut ModelContext<Self>,
2226 ) -> Task<Result<Vec<Completion>>> {
2227 let source_buffer_handle = source_buffer_handle.clone();
2228 let source_buffer = source_buffer_handle.read(cx);
2229 let buffer_id = source_buffer.remote_id();
2230 let language = source_buffer.language().cloned();
2231 let worktree;
2232 let buffer_abs_path;
2233 if let Some(file) = File::from_dyn(source_buffer.file()) {
2234 worktree = file.worktree.clone();
2235 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2236 } else {
2237 return Task::ready(Ok(Default::default()));
2238 };
2239
2240 let position = position.to_point_utf16(source_buffer);
2241 let anchor = source_buffer.anchor_after(position);
2242
2243 if worktree.read(cx).as_local().is_some() {
2244 let buffer_abs_path = buffer_abs_path.unwrap();
2245 let lang_server =
2246 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2247 server.clone()
2248 } else {
2249 return Task::ready(Ok(Default::default()));
2250 };
2251
2252 cx.spawn(|_, cx| async move {
2253 let completions = lang_server
2254 .request::<lsp::request::Completion>(lsp::CompletionParams {
2255 text_document_position: lsp::TextDocumentPositionParams::new(
2256 lsp::TextDocumentIdentifier::new(
2257 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2258 ),
2259 position.to_lsp_position(),
2260 ),
2261 context: Default::default(),
2262 work_done_progress_params: Default::default(),
2263 partial_result_params: Default::default(),
2264 })
2265 .await
2266 .context("lsp completion request failed")?;
2267
2268 let completions = if let Some(completions) = completions {
2269 match completions {
2270 lsp::CompletionResponse::Array(completions) => completions,
2271 lsp::CompletionResponse::List(list) => list.items,
2272 }
2273 } else {
2274 Default::default()
2275 };
2276
2277 source_buffer_handle.read_with(&cx, |this, _| {
2278 Ok(completions
2279 .into_iter()
2280 .filter_map(|lsp_completion| {
2281 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2282 lsp::CompletionTextEdit::Edit(edit) => {
2283 (range_from_lsp(edit.range), edit.new_text.clone())
2284 }
2285 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2286 log::info!("unsupported insert/replace completion");
2287 return None;
2288 }
2289 };
2290
2291 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2292 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2293 if clipped_start == old_range.start && clipped_end == old_range.end {
2294 Some(Completion {
2295 old_range: this.anchor_before(old_range.start)
2296 ..this.anchor_after(old_range.end),
2297 new_text,
2298 label: language
2299 .as_ref()
2300 .and_then(|l| l.label_for_completion(&lsp_completion))
2301 .unwrap_or_else(|| {
2302 CodeLabel::plain(
2303 lsp_completion.label.clone(),
2304 lsp_completion.filter_text.as_deref(),
2305 )
2306 }),
2307 lsp_completion,
2308 })
2309 } else {
2310 None
2311 }
2312 })
2313 .collect())
2314 })
2315 })
2316 } else if let Some(project_id) = self.remote_id() {
2317 let rpc = self.client.clone();
2318 let message = proto::GetCompletions {
2319 project_id,
2320 buffer_id,
2321 position: Some(language::proto::serialize_anchor(&anchor)),
2322 version: serialize_version(&source_buffer.version()),
2323 };
2324 cx.spawn_weak(|_, mut cx| async move {
2325 let response = rpc.request(message).await?;
2326
2327 source_buffer_handle
2328 .update(&mut cx, |buffer, _| {
2329 buffer.wait_for_version(deserialize_version(response.version))
2330 })
2331 .await;
2332
2333 response
2334 .completions
2335 .into_iter()
2336 .map(|completion| {
2337 language::proto::deserialize_completion(completion, language.as_ref())
2338 })
2339 .collect()
2340 })
2341 } else {
2342 Task::ready(Ok(Default::default()))
2343 }
2344 }
2345
2346 pub fn apply_additional_edits_for_completion(
2347 &self,
2348 buffer_handle: ModelHandle<Buffer>,
2349 completion: Completion,
2350 push_to_history: bool,
2351 cx: &mut ModelContext<Self>,
2352 ) -> Task<Result<Option<Transaction>>> {
2353 let buffer = buffer_handle.read(cx);
2354 let buffer_id = buffer.remote_id();
2355
2356 if self.is_local() {
2357 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2358 server.clone()
2359 } else {
2360 return Task::ready(Ok(Default::default()));
2361 };
2362
2363 cx.spawn(|this, mut cx| async move {
2364 let resolved_completion = lang_server
2365 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2366 .await?;
2367 if let Some(edits) = resolved_completion.additional_text_edits {
2368 let edits = this
2369 .update(&mut cx, |this, cx| {
2370 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2371 })
2372 .await?;
2373 buffer_handle.update(&mut cx, |buffer, cx| {
2374 buffer.finalize_last_transaction();
2375 buffer.start_transaction();
2376 for (range, text) in edits {
2377 buffer.edit([range], text, cx);
2378 }
2379 let transaction = if buffer.end_transaction(cx).is_some() {
2380 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2381 if !push_to_history {
2382 buffer.forget_transaction(transaction.id);
2383 }
2384 Some(transaction)
2385 } else {
2386 None
2387 };
2388 Ok(transaction)
2389 })
2390 } else {
2391 Ok(None)
2392 }
2393 })
2394 } else if let Some(project_id) = self.remote_id() {
2395 let client = self.client.clone();
2396 cx.spawn(|_, mut cx| async move {
2397 let response = client
2398 .request(proto::ApplyCompletionAdditionalEdits {
2399 project_id,
2400 buffer_id,
2401 completion: Some(language::proto::serialize_completion(&completion)),
2402 })
2403 .await?;
2404
2405 if let Some(transaction) = response.transaction {
2406 let transaction = language::proto::deserialize_transaction(transaction)?;
2407 buffer_handle
2408 .update(&mut cx, |buffer, _| {
2409 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2410 })
2411 .await;
2412 if push_to_history {
2413 buffer_handle.update(&mut cx, |buffer, _| {
2414 buffer.push_transaction(transaction.clone(), Instant::now());
2415 });
2416 }
2417 Ok(Some(transaction))
2418 } else {
2419 Ok(None)
2420 }
2421 })
2422 } else {
2423 Task::ready(Err(anyhow!("project does not have a remote id")))
2424 }
2425 }
2426
2427 pub fn code_actions<T: ToOffset>(
2428 &self,
2429 buffer_handle: &ModelHandle<Buffer>,
2430 range: Range<T>,
2431 cx: &mut ModelContext<Self>,
2432 ) -> Task<Result<Vec<CodeAction>>> {
2433 let buffer_handle = buffer_handle.clone();
2434 let buffer = buffer_handle.read(cx);
2435 let buffer_id = buffer.remote_id();
2436 let worktree;
2437 let buffer_abs_path;
2438 if let Some(file) = File::from_dyn(buffer.file()) {
2439 worktree = file.worktree.clone();
2440 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2441 } else {
2442 return Task::ready(Ok(Default::default()));
2443 };
2444 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2445
2446 if worktree.read(cx).as_local().is_some() {
2447 let buffer_abs_path = buffer_abs_path.unwrap();
2448 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2449 server.clone()
2450 } else {
2451 return Task::ready(Ok(Default::default()));
2452 };
2453
2454 let lsp_range = lsp::Range::new(
2455 range.start.to_point_utf16(buffer).to_lsp_position(),
2456 range.end.to_point_utf16(buffer).to_lsp_position(),
2457 );
2458 cx.foreground().spawn(async move {
2459 if !lang_server.capabilities().code_action_provider.is_some() {
2460 return Ok(Default::default());
2461 }
2462
2463 Ok(lang_server
2464 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2465 text_document: lsp::TextDocumentIdentifier::new(
2466 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2467 ),
2468 range: lsp_range,
2469 work_done_progress_params: Default::default(),
2470 partial_result_params: Default::default(),
2471 context: lsp::CodeActionContext {
2472 diagnostics: Default::default(),
2473 only: Some(vec![
2474 lsp::CodeActionKind::QUICKFIX,
2475 lsp::CodeActionKind::REFACTOR,
2476 lsp::CodeActionKind::REFACTOR_EXTRACT,
2477 ]),
2478 },
2479 })
2480 .await?
2481 .unwrap_or_default()
2482 .into_iter()
2483 .filter_map(|entry| {
2484 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2485 Some(CodeAction {
2486 range: range.clone(),
2487 lsp_action,
2488 })
2489 } else {
2490 None
2491 }
2492 })
2493 .collect())
2494 })
2495 } else if let Some(project_id) = self.remote_id() {
2496 let rpc = self.client.clone();
2497 let version = buffer.version();
2498 cx.spawn_weak(|_, mut cx| async move {
2499 let response = rpc
2500 .request(proto::GetCodeActions {
2501 project_id,
2502 buffer_id,
2503 start: Some(language::proto::serialize_anchor(&range.start)),
2504 end: Some(language::proto::serialize_anchor(&range.end)),
2505 version: serialize_version(&version),
2506 })
2507 .await?;
2508
2509 buffer_handle
2510 .update(&mut cx, |buffer, _| {
2511 buffer.wait_for_version(deserialize_version(response.version))
2512 })
2513 .await;
2514
2515 response
2516 .actions
2517 .into_iter()
2518 .map(language::proto::deserialize_code_action)
2519 .collect()
2520 })
2521 } else {
2522 Task::ready(Ok(Default::default()))
2523 }
2524 }
2525
2526 pub fn apply_code_action(
2527 &self,
2528 buffer_handle: ModelHandle<Buffer>,
2529 mut action: CodeAction,
2530 push_to_history: bool,
2531 cx: &mut ModelContext<Self>,
2532 ) -> Task<Result<ProjectTransaction>> {
2533 if self.is_local() {
2534 let buffer = buffer_handle.read(cx);
2535 let lang_name = if let Some(lang) = buffer.language() {
2536 lang.name()
2537 } else {
2538 return Task::ready(Ok(Default::default()));
2539 };
2540 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2541 server.clone()
2542 } else {
2543 return Task::ready(Ok(Default::default()));
2544 };
2545 let range = action.range.to_point_utf16(buffer);
2546
2547 cx.spawn(|this, mut cx| async move {
2548 if let Some(lsp_range) = action
2549 .lsp_action
2550 .data
2551 .as_mut()
2552 .and_then(|d| d.get_mut("codeActionParams"))
2553 .and_then(|d| d.get_mut("range"))
2554 {
2555 *lsp_range = serde_json::to_value(&lsp::Range::new(
2556 range.start.to_lsp_position(),
2557 range.end.to_lsp_position(),
2558 ))
2559 .unwrap();
2560 action.lsp_action = lang_server
2561 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2562 .await?;
2563 } else {
2564 let actions = this
2565 .update(&mut cx, |this, cx| {
2566 this.code_actions(&buffer_handle, action.range, cx)
2567 })
2568 .await?;
2569 action.lsp_action = actions
2570 .into_iter()
2571 .find(|a| a.lsp_action.title == action.lsp_action.title)
2572 .ok_or_else(|| anyhow!("code action is outdated"))?
2573 .lsp_action;
2574 }
2575
2576 if let Some(edit) = action.lsp_action.edit {
2577 Self::deserialize_workspace_edit(
2578 this,
2579 edit,
2580 push_to_history,
2581 lang_name,
2582 lang_server,
2583 &mut cx,
2584 )
2585 .await
2586 } else {
2587 Ok(ProjectTransaction::default())
2588 }
2589 })
2590 } else if let Some(project_id) = self.remote_id() {
2591 let client = self.client.clone();
2592 let request = proto::ApplyCodeAction {
2593 project_id,
2594 buffer_id: buffer_handle.read(cx).remote_id(),
2595 action: Some(language::proto::serialize_code_action(&action)),
2596 };
2597 cx.spawn(|this, mut cx| async move {
2598 let response = client
2599 .request(request)
2600 .await?
2601 .transaction
2602 .ok_or_else(|| anyhow!("missing transaction"))?;
2603 this.update(&mut cx, |this, cx| {
2604 this.deserialize_project_transaction(response, push_to_history, cx)
2605 })
2606 .await
2607 })
2608 } else {
2609 Task::ready(Err(anyhow!("project does not have a remote id")))
2610 }
2611 }
2612
2613 async fn deserialize_workspace_edit(
2614 this: ModelHandle<Self>,
2615 edit: lsp::WorkspaceEdit,
2616 push_to_history: bool,
2617 language_name: Arc<str>,
2618 language_server: Arc<LanguageServer>,
2619 cx: &mut AsyncAppContext,
2620 ) -> Result<ProjectTransaction> {
2621 let fs = this.read_with(cx, |this, _| this.fs.clone());
2622 let mut operations = Vec::new();
2623 if let Some(document_changes) = edit.document_changes {
2624 match document_changes {
2625 lsp::DocumentChanges::Edits(edits) => {
2626 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2627 }
2628 lsp::DocumentChanges::Operations(ops) => operations = ops,
2629 }
2630 } else if let Some(changes) = edit.changes {
2631 operations.extend(changes.into_iter().map(|(uri, edits)| {
2632 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2633 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2634 uri,
2635 version: None,
2636 },
2637 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2638 })
2639 }));
2640 }
2641
2642 let mut project_transaction = ProjectTransaction::default();
2643 for operation in operations {
2644 match operation {
2645 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2646 let abs_path = op
2647 .uri
2648 .to_file_path()
2649 .map_err(|_| anyhow!("can't convert URI to path"))?;
2650
2651 if let Some(parent_path) = abs_path.parent() {
2652 fs.create_dir(parent_path).await?;
2653 }
2654 if abs_path.ends_with("/") {
2655 fs.create_dir(&abs_path).await?;
2656 } else {
2657 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2658 .await?;
2659 }
2660 }
2661 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2662 let source_abs_path = op
2663 .old_uri
2664 .to_file_path()
2665 .map_err(|_| anyhow!("can't convert URI to path"))?;
2666 let target_abs_path = op
2667 .new_uri
2668 .to_file_path()
2669 .map_err(|_| anyhow!("can't convert URI to path"))?;
2670 fs.rename(
2671 &source_abs_path,
2672 &target_abs_path,
2673 op.options.map(Into::into).unwrap_or_default(),
2674 )
2675 .await?;
2676 }
2677 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2678 let abs_path = op
2679 .uri
2680 .to_file_path()
2681 .map_err(|_| anyhow!("can't convert URI to path"))?;
2682 let options = op.options.map(Into::into).unwrap_or_default();
2683 if abs_path.ends_with("/") {
2684 fs.remove_dir(&abs_path, options).await?;
2685 } else {
2686 fs.remove_file(&abs_path, options).await?;
2687 }
2688 }
2689 lsp::DocumentChangeOperation::Edit(op) => {
2690 let buffer_to_edit = this
2691 .update(cx, |this, cx| {
2692 this.open_local_buffer_via_lsp(
2693 op.text_document.uri,
2694 language_name.clone(),
2695 language_server.clone(),
2696 cx,
2697 )
2698 })
2699 .await?;
2700
2701 let edits = this
2702 .update(cx, |this, cx| {
2703 let edits = op.edits.into_iter().map(|edit| match edit {
2704 lsp::OneOf::Left(edit) => edit,
2705 lsp::OneOf::Right(edit) => edit.text_edit,
2706 });
2707 this.edits_from_lsp(
2708 &buffer_to_edit,
2709 edits,
2710 op.text_document.version,
2711 cx,
2712 )
2713 })
2714 .await?;
2715
2716 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2717 buffer.finalize_last_transaction();
2718 buffer.start_transaction();
2719 for (range, text) in edits {
2720 buffer.edit([range], text, cx);
2721 }
2722 let transaction = if buffer.end_transaction(cx).is_some() {
2723 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2724 if !push_to_history {
2725 buffer.forget_transaction(transaction.id);
2726 }
2727 Some(transaction)
2728 } else {
2729 None
2730 };
2731
2732 transaction
2733 });
2734 if let Some(transaction) = transaction {
2735 project_transaction.0.insert(buffer_to_edit, transaction);
2736 }
2737 }
2738 }
2739 }
2740
2741 Ok(project_transaction)
2742 }
2743
2744 pub fn prepare_rename<T: ToPointUtf16>(
2745 &self,
2746 buffer: ModelHandle<Buffer>,
2747 position: T,
2748 cx: &mut ModelContext<Self>,
2749 ) -> Task<Result<Option<Range<Anchor>>>> {
2750 let position = position.to_point_utf16(buffer.read(cx));
2751 self.request_lsp(buffer, PrepareRename { position }, cx)
2752 }
2753
2754 pub fn perform_rename<T: ToPointUtf16>(
2755 &self,
2756 buffer: ModelHandle<Buffer>,
2757 position: T,
2758 new_name: String,
2759 push_to_history: bool,
2760 cx: &mut ModelContext<Self>,
2761 ) -> Task<Result<ProjectTransaction>> {
2762 let position = position.to_point_utf16(buffer.read(cx));
2763 self.request_lsp(
2764 buffer,
2765 PerformRename {
2766 position,
2767 new_name,
2768 push_to_history,
2769 },
2770 cx,
2771 )
2772 }
2773
2774 pub fn search(
2775 &self,
2776 query: SearchQuery,
2777 cx: &mut ModelContext<Self>,
2778 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2779 if self.is_local() {
2780 let snapshots = self
2781 .visible_worktrees(cx)
2782 .filter_map(|tree| {
2783 let tree = tree.read(cx).as_local()?;
2784 Some(tree.snapshot())
2785 })
2786 .collect::<Vec<_>>();
2787
2788 let background = cx.background().clone();
2789 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2790 if path_count == 0 {
2791 return Task::ready(Ok(Default::default()));
2792 }
2793 let workers = background.num_cpus().min(path_count);
2794 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2795 cx.background()
2796 .spawn({
2797 let fs = self.fs.clone();
2798 let background = cx.background().clone();
2799 let query = query.clone();
2800 async move {
2801 let fs = &fs;
2802 let query = &query;
2803 let matching_paths_tx = &matching_paths_tx;
2804 let paths_per_worker = (path_count + workers - 1) / workers;
2805 let snapshots = &snapshots;
2806 background
2807 .scoped(|scope| {
2808 for worker_ix in 0..workers {
2809 let worker_start_ix = worker_ix * paths_per_worker;
2810 let worker_end_ix = worker_start_ix + paths_per_worker;
2811 scope.spawn(async move {
2812 let mut snapshot_start_ix = 0;
2813 let mut abs_path = PathBuf::new();
2814 for snapshot in snapshots {
2815 let snapshot_end_ix =
2816 snapshot_start_ix + snapshot.visible_file_count();
2817 if worker_end_ix <= snapshot_start_ix {
2818 break;
2819 } else if worker_start_ix > snapshot_end_ix {
2820 snapshot_start_ix = snapshot_end_ix;
2821 continue;
2822 } else {
2823 let start_in_snapshot = worker_start_ix
2824 .saturating_sub(snapshot_start_ix);
2825 let end_in_snapshot =
2826 cmp::min(worker_end_ix, snapshot_end_ix)
2827 - snapshot_start_ix;
2828
2829 for entry in snapshot
2830 .files(false, start_in_snapshot)
2831 .take(end_in_snapshot - start_in_snapshot)
2832 {
2833 if matching_paths_tx.is_closed() {
2834 break;
2835 }
2836
2837 abs_path.clear();
2838 abs_path.push(&snapshot.abs_path());
2839 abs_path.push(&entry.path);
2840 let matches = if let Some(file) =
2841 fs.open_sync(&abs_path).await.log_err()
2842 {
2843 query.detect(file).unwrap_or(false)
2844 } else {
2845 false
2846 };
2847
2848 if matches {
2849 let project_path =
2850 (snapshot.id(), entry.path.clone());
2851 if matching_paths_tx
2852 .send(project_path)
2853 .await
2854 .is_err()
2855 {
2856 break;
2857 }
2858 }
2859 }
2860
2861 snapshot_start_ix = snapshot_end_ix;
2862 }
2863 }
2864 });
2865 }
2866 })
2867 .await;
2868 }
2869 })
2870 .detach();
2871
2872 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2873 let open_buffers = self
2874 .opened_buffers
2875 .values()
2876 .filter_map(|b| b.upgrade(cx))
2877 .collect::<HashSet<_>>();
2878 cx.spawn(|this, cx| async move {
2879 for buffer in &open_buffers {
2880 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2881 buffers_tx.send((buffer.clone(), snapshot)).await?;
2882 }
2883
2884 let open_buffers = Rc::new(RefCell::new(open_buffers));
2885 while let Some(project_path) = matching_paths_rx.next().await {
2886 if buffers_tx.is_closed() {
2887 break;
2888 }
2889
2890 let this = this.clone();
2891 let open_buffers = open_buffers.clone();
2892 let buffers_tx = buffers_tx.clone();
2893 cx.spawn(|mut cx| async move {
2894 if let Some(buffer) = this
2895 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2896 .await
2897 .log_err()
2898 {
2899 if open_buffers.borrow_mut().insert(buffer.clone()) {
2900 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2901 buffers_tx.send((buffer, snapshot)).await?;
2902 }
2903 }
2904
2905 Ok::<_, anyhow::Error>(())
2906 })
2907 .detach();
2908 }
2909
2910 Ok::<_, anyhow::Error>(())
2911 })
2912 .detach_and_log_err(cx);
2913
2914 let background = cx.background().clone();
2915 cx.background().spawn(async move {
2916 let query = &query;
2917 let mut matched_buffers = Vec::new();
2918 for _ in 0..workers {
2919 matched_buffers.push(HashMap::default());
2920 }
2921 background
2922 .scoped(|scope| {
2923 for worker_matched_buffers in matched_buffers.iter_mut() {
2924 let mut buffers_rx = buffers_rx.clone();
2925 scope.spawn(async move {
2926 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2927 let buffer_matches = query
2928 .search(snapshot.as_rope())
2929 .await
2930 .iter()
2931 .map(|range| {
2932 snapshot.anchor_before(range.start)
2933 ..snapshot.anchor_after(range.end)
2934 })
2935 .collect::<Vec<_>>();
2936 if !buffer_matches.is_empty() {
2937 worker_matched_buffers
2938 .insert(buffer.clone(), buffer_matches);
2939 }
2940 }
2941 });
2942 }
2943 })
2944 .await;
2945 Ok(matched_buffers.into_iter().flatten().collect())
2946 })
2947 } else if let Some(project_id) = self.remote_id() {
2948 let request = self.client.request(query.to_proto(project_id));
2949 cx.spawn(|this, mut cx| async move {
2950 let response = request.await?;
2951 let mut result = HashMap::default();
2952 for location in response.locations {
2953 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2954 let target_buffer = this
2955 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2956 .await?;
2957 let start = location
2958 .start
2959 .and_then(deserialize_anchor)
2960 .ok_or_else(|| anyhow!("missing target start"))?;
2961 let end = location
2962 .end
2963 .and_then(deserialize_anchor)
2964 .ok_or_else(|| anyhow!("missing target end"))?;
2965 result
2966 .entry(target_buffer)
2967 .or_insert(Vec::new())
2968 .push(start..end)
2969 }
2970 Ok(result)
2971 })
2972 } else {
2973 Task::ready(Ok(Default::default()))
2974 }
2975 }
2976
2977 fn request_lsp<R: LspCommand>(
2978 &self,
2979 buffer_handle: ModelHandle<Buffer>,
2980 request: R,
2981 cx: &mut ModelContext<Self>,
2982 ) -> Task<Result<R::Response>>
2983 where
2984 <R::LspRequest as lsp::request::Request>::Result: Send,
2985 {
2986 let buffer = buffer_handle.read(cx);
2987 if self.is_local() {
2988 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2989 if let Some((file, language_server)) =
2990 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2991 {
2992 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2993 return cx.spawn(|this, cx| async move {
2994 if !request.check_capabilities(&language_server.capabilities()) {
2995 return Ok(Default::default());
2996 }
2997
2998 let response = language_server
2999 .request::<R::LspRequest>(lsp_params)
3000 .await
3001 .context("lsp request failed")?;
3002 request
3003 .response_from_lsp(response, this, buffer_handle, cx)
3004 .await
3005 });
3006 }
3007 } else if let Some(project_id) = self.remote_id() {
3008 let rpc = self.client.clone();
3009 let message = request.to_proto(project_id, buffer);
3010 return cx.spawn(|this, cx| async move {
3011 let response = rpc.request(message).await?;
3012 request
3013 .response_from_proto(response, this, buffer_handle, cx)
3014 .await
3015 });
3016 }
3017 Task::ready(Ok(Default::default()))
3018 }
3019
3020 pub fn find_or_create_local_worktree(
3021 &mut self,
3022 abs_path: impl AsRef<Path>,
3023 visible: bool,
3024 cx: &mut ModelContext<Self>,
3025 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3026 let abs_path = abs_path.as_ref();
3027 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3028 Task::ready(Ok((tree.clone(), relative_path.into())))
3029 } else {
3030 let worktree = self.create_local_worktree(abs_path, visible, cx);
3031 cx.foreground()
3032 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3033 }
3034 }
3035
3036 pub fn find_local_worktree(
3037 &self,
3038 abs_path: &Path,
3039 cx: &AppContext,
3040 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3041 for tree in self.worktrees(cx) {
3042 if let Some(relative_path) = tree
3043 .read(cx)
3044 .as_local()
3045 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3046 {
3047 return Some((tree.clone(), relative_path.into()));
3048 }
3049 }
3050 None
3051 }
3052
3053 pub fn is_shared(&self) -> bool {
3054 match &self.client_state {
3055 ProjectClientState::Local { is_shared, .. } => *is_shared,
3056 ProjectClientState::Remote { .. } => false,
3057 }
3058 }
3059
3060 fn create_local_worktree(
3061 &mut self,
3062 abs_path: impl AsRef<Path>,
3063 visible: bool,
3064 cx: &mut ModelContext<Self>,
3065 ) -> Task<Result<ModelHandle<Worktree>>> {
3066 let fs = self.fs.clone();
3067 let client = self.client.clone();
3068 let next_entry_id = self.next_entry_id.clone();
3069 let path: Arc<Path> = abs_path.as_ref().into();
3070 let task = self
3071 .loading_local_worktrees
3072 .entry(path.clone())
3073 .or_insert_with(|| {
3074 cx.spawn(|project, mut cx| {
3075 async move {
3076 let worktree = Worktree::local(
3077 client.clone(),
3078 path.clone(),
3079 visible,
3080 fs,
3081 next_entry_id,
3082 &mut cx,
3083 )
3084 .await;
3085 project.update(&mut cx, |project, _| {
3086 project.loading_local_worktrees.remove(&path);
3087 });
3088 let worktree = worktree?;
3089
3090 let (remote_project_id, is_shared) =
3091 project.update(&mut cx, |project, cx| {
3092 project.add_worktree(&worktree, cx);
3093 (project.remote_id(), project.is_shared())
3094 });
3095
3096 if let Some(project_id) = remote_project_id {
3097 if is_shared {
3098 worktree
3099 .update(&mut cx, |worktree, cx| {
3100 worktree.as_local_mut().unwrap().share(project_id, cx)
3101 })
3102 .await?;
3103 } else {
3104 worktree
3105 .update(&mut cx, |worktree, cx| {
3106 worktree.as_local_mut().unwrap().register(project_id, cx)
3107 })
3108 .await?;
3109 }
3110 }
3111
3112 Ok(worktree)
3113 }
3114 .map_err(|err| Arc::new(err))
3115 })
3116 .shared()
3117 })
3118 .clone();
3119 cx.foreground().spawn(async move {
3120 match task.await {
3121 Ok(worktree) => Ok(worktree),
3122 Err(err) => Err(anyhow!("{}", err)),
3123 }
3124 })
3125 }
3126
3127 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3128 self.worktrees.retain(|worktree| {
3129 worktree
3130 .upgrade(cx)
3131 .map_or(false, |w| w.read(cx).id() != id)
3132 });
3133 cx.notify();
3134 }
3135
3136 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3137 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3138 if worktree.read(cx).is_local() {
3139 cx.subscribe(&worktree, |this, worktree, _, cx| {
3140 this.update_local_worktree_buffers(worktree, cx);
3141 })
3142 .detach();
3143 }
3144
3145 let push_strong_handle = {
3146 let worktree = worktree.read(cx);
3147 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3148 };
3149 if push_strong_handle {
3150 self.worktrees
3151 .push(WorktreeHandle::Strong(worktree.clone()));
3152 } else {
3153 cx.observe_release(&worktree, |this, _, cx| {
3154 this.worktrees
3155 .retain(|worktree| worktree.upgrade(cx).is_some());
3156 cx.notify();
3157 })
3158 .detach();
3159 self.worktrees
3160 .push(WorktreeHandle::Weak(worktree.downgrade()));
3161 }
3162 cx.notify();
3163 }
3164
3165 fn update_local_worktree_buffers(
3166 &mut self,
3167 worktree_handle: ModelHandle<Worktree>,
3168 cx: &mut ModelContext<Self>,
3169 ) {
3170 let snapshot = worktree_handle.read(cx).snapshot();
3171 let mut buffers_to_delete = Vec::new();
3172 for (buffer_id, buffer) in &self.opened_buffers {
3173 if let Some(buffer) = buffer.upgrade(cx) {
3174 buffer.update(cx, |buffer, cx| {
3175 if let Some(old_file) = File::from_dyn(buffer.file()) {
3176 if old_file.worktree != worktree_handle {
3177 return;
3178 }
3179
3180 let new_file = if let Some(entry) = old_file
3181 .entry_id
3182 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3183 {
3184 File {
3185 is_local: true,
3186 entry_id: Some(entry.id),
3187 mtime: entry.mtime,
3188 path: entry.path.clone(),
3189 worktree: worktree_handle.clone(),
3190 }
3191 } else if let Some(entry) =
3192 snapshot.entry_for_path(old_file.path().as_ref())
3193 {
3194 File {
3195 is_local: true,
3196 entry_id: Some(entry.id),
3197 mtime: entry.mtime,
3198 path: entry.path.clone(),
3199 worktree: worktree_handle.clone(),
3200 }
3201 } else {
3202 File {
3203 is_local: true,
3204 entry_id: None,
3205 path: old_file.path().clone(),
3206 mtime: old_file.mtime(),
3207 worktree: worktree_handle.clone(),
3208 }
3209 };
3210
3211 if let Some(project_id) = self.remote_id() {
3212 self.client
3213 .send(proto::UpdateBufferFile {
3214 project_id,
3215 buffer_id: *buffer_id as u64,
3216 file: Some(new_file.to_proto()),
3217 })
3218 .log_err();
3219 }
3220 buffer.file_updated(Box::new(new_file), cx).detach();
3221 }
3222 });
3223 } else {
3224 buffers_to_delete.push(*buffer_id);
3225 }
3226 }
3227
3228 for buffer_id in buffers_to_delete {
3229 self.opened_buffers.remove(&buffer_id);
3230 }
3231 }
3232
3233 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3234 let new_active_entry = entry.and_then(|project_path| {
3235 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3236 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3237 Some(entry.id)
3238 });
3239 if new_active_entry != self.active_entry {
3240 self.active_entry = new_active_entry;
3241 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3242 }
3243 }
3244
3245 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3246 self.language_servers_with_diagnostics_running > 0
3247 }
3248
3249 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3250 let mut summary = DiagnosticSummary::default();
3251 for (_, path_summary) in self.diagnostic_summaries(cx) {
3252 summary.error_count += path_summary.error_count;
3253 summary.warning_count += path_summary.warning_count;
3254 summary.info_count += path_summary.info_count;
3255 summary.hint_count += path_summary.hint_count;
3256 }
3257 summary
3258 }
3259
3260 pub fn diagnostic_summaries<'a>(
3261 &'a self,
3262 cx: &'a AppContext,
3263 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3264 self.worktrees(cx).flat_map(move |worktree| {
3265 let worktree = worktree.read(cx);
3266 let worktree_id = worktree.id();
3267 worktree
3268 .diagnostic_summaries()
3269 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3270 })
3271 }
3272
3273 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3274 self.language_servers_with_diagnostics_running += 1;
3275 if self.language_servers_with_diagnostics_running == 1 {
3276 cx.emit(Event::DiskBasedDiagnosticsStarted);
3277 }
3278 }
3279
3280 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3281 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3282 self.language_servers_with_diagnostics_running -= 1;
3283 if self.language_servers_with_diagnostics_running == 0 {
3284 cx.emit(Event::DiskBasedDiagnosticsFinished);
3285 }
3286 }
3287
3288 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3289 self.active_entry
3290 }
3291
3292 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3293 self.worktree_for_id(path.worktree_id, cx)?
3294 .read(cx)
3295 .entry_for_path(&path.path)
3296 .map(|entry| entry.id)
3297 }
3298
3299 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3300 let worktree = self.worktree_for_entry(entry_id, cx)?;
3301 let worktree = worktree.read(cx);
3302 let worktree_id = worktree.id();
3303 let path = worktree.entry_for_id(entry_id)?.path.clone();
3304 Some(ProjectPath { worktree_id, path })
3305 }
3306
3307 // RPC message handlers
3308
3309 async fn handle_unshare_project(
3310 this: ModelHandle<Self>,
3311 _: TypedEnvelope<proto::UnshareProject>,
3312 _: Arc<Client>,
3313 mut cx: AsyncAppContext,
3314 ) -> Result<()> {
3315 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3316 Ok(())
3317 }
3318
3319 async fn handle_add_collaborator(
3320 this: ModelHandle<Self>,
3321 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3322 _: Arc<Client>,
3323 mut cx: AsyncAppContext,
3324 ) -> Result<()> {
3325 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3326 let collaborator = envelope
3327 .payload
3328 .collaborator
3329 .take()
3330 .ok_or_else(|| anyhow!("empty collaborator"))?;
3331
3332 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3333 this.update(&mut cx, |this, cx| {
3334 this.collaborators
3335 .insert(collaborator.peer_id, collaborator);
3336 cx.notify();
3337 });
3338
3339 Ok(())
3340 }
3341
3342 async fn handle_remove_collaborator(
3343 this: ModelHandle<Self>,
3344 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3345 _: Arc<Client>,
3346 mut cx: AsyncAppContext,
3347 ) -> Result<()> {
3348 this.update(&mut cx, |this, cx| {
3349 let peer_id = PeerId(envelope.payload.peer_id);
3350 let replica_id = this
3351 .collaborators
3352 .remove(&peer_id)
3353 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3354 .replica_id;
3355 for (_, buffer) in &this.opened_buffers {
3356 if let Some(buffer) = buffer.upgrade(cx) {
3357 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3358 }
3359 }
3360 cx.emit(Event::CollaboratorLeft(peer_id));
3361 cx.notify();
3362 Ok(())
3363 })
3364 }
3365
3366 async fn handle_register_worktree(
3367 this: ModelHandle<Self>,
3368 envelope: TypedEnvelope<proto::RegisterWorktree>,
3369 client: Arc<Client>,
3370 mut cx: AsyncAppContext,
3371 ) -> Result<()> {
3372 this.update(&mut cx, |this, cx| {
3373 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3374 let replica_id = this.replica_id();
3375 let worktree = proto::Worktree {
3376 id: envelope.payload.worktree_id,
3377 root_name: envelope.payload.root_name,
3378 entries: Default::default(),
3379 diagnostic_summaries: Default::default(),
3380 visible: envelope.payload.visible,
3381 };
3382 let (worktree, load_task) =
3383 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3384 this.add_worktree(&worktree, cx);
3385 load_task.detach();
3386 Ok(())
3387 })
3388 }
3389
3390 async fn handle_unregister_worktree(
3391 this: ModelHandle<Self>,
3392 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3393 _: Arc<Client>,
3394 mut cx: AsyncAppContext,
3395 ) -> Result<()> {
3396 this.update(&mut cx, |this, cx| {
3397 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3398 this.remove_worktree(worktree_id, cx);
3399 Ok(())
3400 })
3401 }
3402
3403 async fn handle_update_worktree(
3404 this: ModelHandle<Self>,
3405 envelope: TypedEnvelope<proto::UpdateWorktree>,
3406 _: Arc<Client>,
3407 mut cx: AsyncAppContext,
3408 ) -> Result<()> {
3409 this.update(&mut cx, |this, cx| {
3410 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3411 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3412 worktree.update(cx, |worktree, _| {
3413 let worktree = worktree.as_remote_mut().unwrap();
3414 worktree.update_from_remote(envelope)
3415 })?;
3416 }
3417 Ok(())
3418 })
3419 }
3420
3421 async fn handle_update_diagnostic_summary(
3422 this: ModelHandle<Self>,
3423 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3424 _: Arc<Client>,
3425 mut cx: AsyncAppContext,
3426 ) -> Result<()> {
3427 this.update(&mut cx, |this, cx| {
3428 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3429 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3430 if let Some(summary) = envelope.payload.summary {
3431 let project_path = ProjectPath {
3432 worktree_id,
3433 path: Path::new(&summary.path).into(),
3434 };
3435 worktree.update(cx, |worktree, _| {
3436 worktree
3437 .as_remote_mut()
3438 .unwrap()
3439 .update_diagnostic_summary(project_path.path.clone(), &summary);
3440 });
3441 cx.emit(Event::DiagnosticsUpdated(project_path));
3442 }
3443 }
3444 Ok(())
3445 })
3446 }
3447
3448 async fn handle_start_language_server(
3449 this: ModelHandle<Self>,
3450 envelope: TypedEnvelope<proto::StartLanguageServer>,
3451 _: Arc<Client>,
3452 mut cx: AsyncAppContext,
3453 ) -> Result<()> {
3454 let server = envelope
3455 .payload
3456 .server
3457 .ok_or_else(|| anyhow!("invalid server"))?;
3458 this.update(&mut cx, |this, cx| {
3459 this.language_server_statuses.insert(
3460 server.id as usize,
3461 LanguageServerStatus {
3462 name: server.name,
3463 pending_work: Default::default(),
3464 pending_diagnostic_updates: 0,
3465 },
3466 );
3467 cx.notify();
3468 });
3469 Ok(())
3470 }
3471
3472 async fn handle_update_language_server(
3473 this: ModelHandle<Self>,
3474 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3475 _: Arc<Client>,
3476 mut cx: AsyncAppContext,
3477 ) -> Result<()> {
3478 let language_server_id = envelope.payload.language_server_id as usize;
3479 match envelope
3480 .payload
3481 .variant
3482 .ok_or_else(|| anyhow!("invalid variant"))?
3483 {
3484 proto::update_language_server::Variant::WorkStart(payload) => {
3485 this.update(&mut cx, |this, cx| {
3486 this.on_lsp_work_start(language_server_id, payload.token, cx);
3487 })
3488 }
3489 proto::update_language_server::Variant::WorkProgress(payload) => {
3490 this.update(&mut cx, |this, cx| {
3491 this.on_lsp_work_progress(
3492 language_server_id,
3493 payload.token,
3494 LanguageServerProgress {
3495 message: payload.message,
3496 percentage: payload.percentage.map(|p| p as usize),
3497 last_update_at: Instant::now(),
3498 },
3499 cx,
3500 );
3501 })
3502 }
3503 proto::update_language_server::Variant::WorkEnd(payload) => {
3504 this.update(&mut cx, |this, cx| {
3505 this.on_lsp_work_end(language_server_id, payload.token, cx);
3506 })
3507 }
3508 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3509 this.update(&mut cx, |this, cx| {
3510 this.disk_based_diagnostics_started(cx);
3511 })
3512 }
3513 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3514 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3515 }
3516 }
3517
3518 Ok(())
3519 }
3520
3521 async fn handle_update_buffer(
3522 this: ModelHandle<Self>,
3523 envelope: TypedEnvelope<proto::UpdateBuffer>,
3524 _: Arc<Client>,
3525 mut cx: AsyncAppContext,
3526 ) -> Result<()> {
3527 this.update(&mut cx, |this, cx| {
3528 let payload = envelope.payload.clone();
3529 let buffer_id = payload.buffer_id;
3530 let ops = payload
3531 .operations
3532 .into_iter()
3533 .map(|op| language::proto::deserialize_operation(op))
3534 .collect::<Result<Vec<_>, _>>()?;
3535 match this.opened_buffers.entry(buffer_id) {
3536 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3537 OpenBuffer::Strong(buffer) => {
3538 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3539 }
3540 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3541 OpenBuffer::Weak(_) => {}
3542 },
3543 hash_map::Entry::Vacant(e) => {
3544 e.insert(OpenBuffer::Loading(ops));
3545 }
3546 }
3547 Ok(())
3548 })
3549 }
3550
3551 async fn handle_update_buffer_file(
3552 this: ModelHandle<Self>,
3553 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3554 _: Arc<Client>,
3555 mut cx: AsyncAppContext,
3556 ) -> Result<()> {
3557 this.update(&mut cx, |this, cx| {
3558 let payload = envelope.payload.clone();
3559 let buffer_id = payload.buffer_id;
3560 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3561 let worktree = this
3562 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3563 .ok_or_else(|| anyhow!("no such worktree"))?;
3564 let file = File::from_proto(file, worktree.clone(), cx)?;
3565 let buffer = this
3566 .opened_buffers
3567 .get_mut(&buffer_id)
3568 .and_then(|b| b.upgrade(cx))
3569 .ok_or_else(|| anyhow!("no such buffer"))?;
3570 buffer.update(cx, |buffer, cx| {
3571 buffer.file_updated(Box::new(file), cx).detach();
3572 });
3573 Ok(())
3574 })
3575 }
3576
3577 async fn handle_save_buffer(
3578 this: ModelHandle<Self>,
3579 envelope: TypedEnvelope<proto::SaveBuffer>,
3580 _: Arc<Client>,
3581 mut cx: AsyncAppContext,
3582 ) -> Result<proto::BufferSaved> {
3583 let buffer_id = envelope.payload.buffer_id;
3584 let requested_version = deserialize_version(envelope.payload.version);
3585
3586 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3587 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3588 let buffer = this
3589 .opened_buffers
3590 .get(&buffer_id)
3591 .map(|buffer| buffer.upgrade(cx).unwrap())
3592 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3593 Ok::<_, anyhow::Error>((project_id, buffer))
3594 })?;
3595 buffer
3596 .update(&mut cx, |buffer, _| {
3597 buffer.wait_for_version(requested_version)
3598 })
3599 .await;
3600
3601 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3602 Ok(proto::BufferSaved {
3603 project_id,
3604 buffer_id,
3605 version: serialize_version(&saved_version),
3606 mtime: Some(mtime.into()),
3607 })
3608 }
3609
3610 async fn handle_format_buffers(
3611 this: ModelHandle<Self>,
3612 envelope: TypedEnvelope<proto::FormatBuffers>,
3613 _: Arc<Client>,
3614 mut cx: AsyncAppContext,
3615 ) -> Result<proto::FormatBuffersResponse> {
3616 let sender_id = envelope.original_sender_id()?;
3617 let format = this.update(&mut cx, |this, cx| {
3618 let mut buffers = HashSet::default();
3619 for buffer_id in &envelope.payload.buffer_ids {
3620 buffers.insert(
3621 this.opened_buffers
3622 .get(buffer_id)
3623 .map(|buffer| buffer.upgrade(cx).unwrap())
3624 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3625 );
3626 }
3627 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3628 })?;
3629
3630 let project_transaction = format.await?;
3631 let project_transaction = this.update(&mut cx, |this, cx| {
3632 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3633 });
3634 Ok(proto::FormatBuffersResponse {
3635 transaction: Some(project_transaction),
3636 })
3637 }
3638
3639 async fn handle_get_completions(
3640 this: ModelHandle<Self>,
3641 envelope: TypedEnvelope<proto::GetCompletions>,
3642 _: Arc<Client>,
3643 mut cx: AsyncAppContext,
3644 ) -> Result<proto::GetCompletionsResponse> {
3645 let position = envelope
3646 .payload
3647 .position
3648 .and_then(language::proto::deserialize_anchor)
3649 .ok_or_else(|| anyhow!("invalid position"))?;
3650 let version = deserialize_version(envelope.payload.version);
3651 let buffer = this.read_with(&cx, |this, cx| {
3652 this.opened_buffers
3653 .get(&envelope.payload.buffer_id)
3654 .map(|buffer| buffer.upgrade(cx).unwrap())
3655 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3656 })?;
3657 buffer
3658 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3659 .await;
3660 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3661 let completions = this
3662 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3663 .await?;
3664
3665 Ok(proto::GetCompletionsResponse {
3666 completions: completions
3667 .iter()
3668 .map(language::proto::serialize_completion)
3669 .collect(),
3670 version: serialize_version(&version),
3671 })
3672 }
3673
3674 async fn handle_apply_additional_edits_for_completion(
3675 this: ModelHandle<Self>,
3676 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3677 _: Arc<Client>,
3678 mut cx: AsyncAppContext,
3679 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3680 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3681 let buffer = this
3682 .opened_buffers
3683 .get(&envelope.payload.buffer_id)
3684 .map(|buffer| buffer.upgrade(cx).unwrap())
3685 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3686 let language = buffer.read(cx).language();
3687 let completion = language::proto::deserialize_completion(
3688 envelope
3689 .payload
3690 .completion
3691 .ok_or_else(|| anyhow!("invalid completion"))?,
3692 language,
3693 )?;
3694 Ok::<_, anyhow::Error>(
3695 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3696 )
3697 })?;
3698
3699 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3700 transaction: apply_additional_edits
3701 .await?
3702 .as_ref()
3703 .map(language::proto::serialize_transaction),
3704 })
3705 }
3706
3707 async fn handle_get_code_actions(
3708 this: ModelHandle<Self>,
3709 envelope: TypedEnvelope<proto::GetCodeActions>,
3710 _: Arc<Client>,
3711 mut cx: AsyncAppContext,
3712 ) -> Result<proto::GetCodeActionsResponse> {
3713 let start = envelope
3714 .payload
3715 .start
3716 .and_then(language::proto::deserialize_anchor)
3717 .ok_or_else(|| anyhow!("invalid start"))?;
3718 let end = envelope
3719 .payload
3720 .end
3721 .and_then(language::proto::deserialize_anchor)
3722 .ok_or_else(|| anyhow!("invalid end"))?;
3723 let buffer = this.update(&mut cx, |this, cx| {
3724 this.opened_buffers
3725 .get(&envelope.payload.buffer_id)
3726 .map(|buffer| buffer.upgrade(cx).unwrap())
3727 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3728 })?;
3729 buffer
3730 .update(&mut cx, |buffer, _| {
3731 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3732 })
3733 .await;
3734
3735 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3736 let code_actions = this.update(&mut cx, |this, cx| {
3737 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3738 })?;
3739
3740 Ok(proto::GetCodeActionsResponse {
3741 actions: code_actions
3742 .await?
3743 .iter()
3744 .map(language::proto::serialize_code_action)
3745 .collect(),
3746 version: serialize_version(&version),
3747 })
3748 }
3749
3750 async fn handle_apply_code_action(
3751 this: ModelHandle<Self>,
3752 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3753 _: Arc<Client>,
3754 mut cx: AsyncAppContext,
3755 ) -> Result<proto::ApplyCodeActionResponse> {
3756 let sender_id = envelope.original_sender_id()?;
3757 let action = language::proto::deserialize_code_action(
3758 envelope
3759 .payload
3760 .action
3761 .ok_or_else(|| anyhow!("invalid action"))?,
3762 )?;
3763 let apply_code_action = this.update(&mut cx, |this, cx| {
3764 let buffer = this
3765 .opened_buffers
3766 .get(&envelope.payload.buffer_id)
3767 .map(|buffer| buffer.upgrade(cx).unwrap())
3768 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3769 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3770 })?;
3771
3772 let project_transaction = apply_code_action.await?;
3773 let project_transaction = this.update(&mut cx, |this, cx| {
3774 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3775 });
3776 Ok(proto::ApplyCodeActionResponse {
3777 transaction: Some(project_transaction),
3778 })
3779 }
3780
3781 async fn handle_lsp_command<T: LspCommand>(
3782 this: ModelHandle<Self>,
3783 envelope: TypedEnvelope<T::ProtoRequest>,
3784 _: Arc<Client>,
3785 mut cx: AsyncAppContext,
3786 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3787 where
3788 <T::LspRequest as lsp::request::Request>::Result: Send,
3789 {
3790 let sender_id = envelope.original_sender_id()?;
3791 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3792 let buffer_handle = this.read_with(&cx, |this, _| {
3793 this.opened_buffers
3794 .get(&buffer_id)
3795 .and_then(|buffer| buffer.upgrade(&cx))
3796 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3797 })?;
3798 let request = T::from_proto(
3799 envelope.payload,
3800 this.clone(),
3801 buffer_handle.clone(),
3802 cx.clone(),
3803 )
3804 .await?;
3805 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3806 let response = this
3807 .update(&mut cx, |this, cx| {
3808 this.request_lsp(buffer_handle, request, cx)
3809 })
3810 .await?;
3811 this.update(&mut cx, |this, cx| {
3812 Ok(T::response_to_proto(
3813 response,
3814 this,
3815 sender_id,
3816 &buffer_version,
3817 cx,
3818 ))
3819 })
3820 }
3821
3822 async fn handle_get_project_symbols(
3823 this: ModelHandle<Self>,
3824 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3825 _: Arc<Client>,
3826 mut cx: AsyncAppContext,
3827 ) -> Result<proto::GetProjectSymbolsResponse> {
3828 let symbols = this
3829 .update(&mut cx, |this, cx| {
3830 this.symbols(&envelope.payload.query, cx)
3831 })
3832 .await?;
3833
3834 Ok(proto::GetProjectSymbolsResponse {
3835 symbols: symbols.iter().map(serialize_symbol).collect(),
3836 })
3837 }
3838
3839 async fn handle_search_project(
3840 this: ModelHandle<Self>,
3841 envelope: TypedEnvelope<proto::SearchProject>,
3842 _: Arc<Client>,
3843 mut cx: AsyncAppContext,
3844 ) -> Result<proto::SearchProjectResponse> {
3845 let peer_id = envelope.original_sender_id()?;
3846 let query = SearchQuery::from_proto(envelope.payload)?;
3847 let result = this
3848 .update(&mut cx, |this, cx| this.search(query, cx))
3849 .await?;
3850
3851 this.update(&mut cx, |this, cx| {
3852 let mut locations = Vec::new();
3853 for (buffer, ranges) in result {
3854 for range in ranges {
3855 let start = serialize_anchor(&range.start);
3856 let end = serialize_anchor(&range.end);
3857 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3858 locations.push(proto::Location {
3859 buffer: Some(buffer),
3860 start: Some(start),
3861 end: Some(end),
3862 });
3863 }
3864 }
3865 Ok(proto::SearchProjectResponse { locations })
3866 })
3867 }
3868
3869 async fn handle_open_buffer_for_symbol(
3870 this: ModelHandle<Self>,
3871 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3872 _: Arc<Client>,
3873 mut cx: AsyncAppContext,
3874 ) -> Result<proto::OpenBufferForSymbolResponse> {
3875 let peer_id = envelope.original_sender_id()?;
3876 let symbol = envelope
3877 .payload
3878 .symbol
3879 .ok_or_else(|| anyhow!("invalid symbol"))?;
3880 let symbol = this.read_with(&cx, |this, _| {
3881 let symbol = this.deserialize_symbol(symbol)?;
3882 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3883 if signature == symbol.signature {
3884 Ok(symbol)
3885 } else {
3886 Err(anyhow!("invalid symbol signature"))
3887 }
3888 })?;
3889 let buffer = this
3890 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3891 .await?;
3892
3893 Ok(proto::OpenBufferForSymbolResponse {
3894 buffer: Some(this.update(&mut cx, |this, cx| {
3895 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3896 })),
3897 })
3898 }
3899
3900 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3901 let mut hasher = Sha256::new();
3902 hasher.update(worktree_id.to_proto().to_be_bytes());
3903 hasher.update(path.to_string_lossy().as_bytes());
3904 hasher.update(self.nonce.to_be_bytes());
3905 hasher.finalize().as_slice().try_into().unwrap()
3906 }
3907
3908 async fn handle_open_buffer_by_id(
3909 this: ModelHandle<Self>,
3910 envelope: TypedEnvelope<proto::OpenBufferById>,
3911 _: Arc<Client>,
3912 mut cx: AsyncAppContext,
3913 ) -> Result<proto::OpenBufferResponse> {
3914 let peer_id = envelope.original_sender_id()?;
3915 let buffer = this
3916 .update(&mut cx, |this, cx| {
3917 this.open_buffer_by_id(envelope.payload.id, cx)
3918 })
3919 .await?;
3920 this.update(&mut cx, |this, cx| {
3921 Ok(proto::OpenBufferResponse {
3922 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3923 })
3924 })
3925 }
3926
3927 async fn handle_open_buffer_by_path(
3928 this: ModelHandle<Self>,
3929 envelope: TypedEnvelope<proto::OpenBufferByPath>,
3930 _: Arc<Client>,
3931 mut cx: AsyncAppContext,
3932 ) -> Result<proto::OpenBufferResponse> {
3933 let peer_id = envelope.original_sender_id()?;
3934 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3935 let open_buffer = this.update(&mut cx, |this, cx| {
3936 this.open_buffer(
3937 ProjectPath {
3938 worktree_id,
3939 path: PathBuf::from(envelope.payload.path).into(),
3940 },
3941 cx,
3942 )
3943 });
3944
3945 let buffer = open_buffer.await?;
3946 this.update(&mut cx, |this, cx| {
3947 Ok(proto::OpenBufferResponse {
3948 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3949 })
3950 })
3951 }
3952
3953 fn serialize_project_transaction_for_peer(
3954 &mut self,
3955 project_transaction: ProjectTransaction,
3956 peer_id: PeerId,
3957 cx: &AppContext,
3958 ) -> proto::ProjectTransaction {
3959 let mut serialized_transaction = proto::ProjectTransaction {
3960 buffers: Default::default(),
3961 transactions: Default::default(),
3962 };
3963 for (buffer, transaction) in project_transaction.0 {
3964 serialized_transaction
3965 .buffers
3966 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3967 serialized_transaction
3968 .transactions
3969 .push(language::proto::serialize_transaction(&transaction));
3970 }
3971 serialized_transaction
3972 }
3973
3974 fn deserialize_project_transaction(
3975 &mut self,
3976 message: proto::ProjectTransaction,
3977 push_to_history: bool,
3978 cx: &mut ModelContext<Self>,
3979 ) -> Task<Result<ProjectTransaction>> {
3980 cx.spawn(|this, mut cx| async move {
3981 let mut project_transaction = ProjectTransaction::default();
3982 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3983 let buffer = this
3984 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3985 .await?;
3986 let transaction = language::proto::deserialize_transaction(transaction)?;
3987 project_transaction.0.insert(buffer, transaction);
3988 }
3989
3990 for (buffer, transaction) in &project_transaction.0 {
3991 buffer
3992 .update(&mut cx, |buffer, _| {
3993 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3994 })
3995 .await;
3996
3997 if push_to_history {
3998 buffer.update(&mut cx, |buffer, _| {
3999 buffer.push_transaction(transaction.clone(), Instant::now());
4000 });
4001 }
4002 }
4003
4004 Ok(project_transaction)
4005 })
4006 }
4007
4008 fn serialize_buffer_for_peer(
4009 &mut self,
4010 buffer: &ModelHandle<Buffer>,
4011 peer_id: PeerId,
4012 cx: &AppContext,
4013 ) -> proto::Buffer {
4014 let buffer_id = buffer.read(cx).remote_id();
4015 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4016 if shared_buffers.insert(buffer_id) {
4017 proto::Buffer {
4018 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4019 }
4020 } else {
4021 proto::Buffer {
4022 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4023 }
4024 }
4025 }
4026
4027 fn deserialize_buffer(
4028 &mut self,
4029 buffer: proto::Buffer,
4030 cx: &mut ModelContext<Self>,
4031 ) -> Task<Result<ModelHandle<Buffer>>> {
4032 let replica_id = self.replica_id();
4033
4034 let opened_buffer_tx = self.opened_buffer.0.clone();
4035 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4036 cx.spawn(|this, mut cx| async move {
4037 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4038 proto::buffer::Variant::Id(id) => {
4039 let buffer = loop {
4040 let buffer = this.read_with(&cx, |this, cx| {
4041 this.opened_buffers
4042 .get(&id)
4043 .and_then(|buffer| buffer.upgrade(cx))
4044 });
4045 if let Some(buffer) = buffer {
4046 break buffer;
4047 }
4048 opened_buffer_rx
4049 .next()
4050 .await
4051 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4052 };
4053 Ok(buffer)
4054 }
4055 proto::buffer::Variant::State(mut buffer) => {
4056 let mut buffer_worktree = None;
4057 let mut buffer_file = None;
4058 if let Some(file) = buffer.file.take() {
4059 this.read_with(&cx, |this, cx| {
4060 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4061 let worktree =
4062 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4063 anyhow!("no worktree found for id {}", file.worktree_id)
4064 })?;
4065 buffer_file =
4066 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4067 as Box<dyn language::File>);
4068 buffer_worktree = Some(worktree);
4069 Ok::<_, anyhow::Error>(())
4070 })?;
4071 }
4072
4073 let buffer = cx.add_model(|cx| {
4074 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4075 });
4076
4077 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4078
4079 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4080 Ok(buffer)
4081 }
4082 }
4083 })
4084 }
4085
4086 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4087 let language = self
4088 .languages
4089 .get_language(&serialized_symbol.language_name);
4090 let start = serialized_symbol
4091 .start
4092 .ok_or_else(|| anyhow!("invalid start"))?;
4093 let end = serialized_symbol
4094 .end
4095 .ok_or_else(|| anyhow!("invalid end"))?;
4096 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4097 Ok(Symbol {
4098 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4099 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4100 language_name: serialized_symbol.language_name.clone(),
4101 label: language
4102 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4103 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4104 name: serialized_symbol.name,
4105 path: PathBuf::from(serialized_symbol.path),
4106 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4107 kind,
4108 signature: serialized_symbol
4109 .signature
4110 .try_into()
4111 .map_err(|_| anyhow!("invalid signature"))?,
4112 })
4113 }
4114
4115 async fn handle_buffer_saved(
4116 this: ModelHandle<Self>,
4117 envelope: TypedEnvelope<proto::BufferSaved>,
4118 _: Arc<Client>,
4119 mut cx: AsyncAppContext,
4120 ) -> Result<()> {
4121 let version = deserialize_version(envelope.payload.version);
4122 let mtime = envelope
4123 .payload
4124 .mtime
4125 .ok_or_else(|| anyhow!("missing mtime"))?
4126 .into();
4127
4128 this.update(&mut cx, |this, cx| {
4129 let buffer = this
4130 .opened_buffers
4131 .get(&envelope.payload.buffer_id)
4132 .and_then(|buffer| buffer.upgrade(cx));
4133 if let Some(buffer) = buffer {
4134 buffer.update(cx, |buffer, cx| {
4135 buffer.did_save(version, mtime, None, cx);
4136 });
4137 }
4138 Ok(())
4139 })
4140 }
4141
4142 async fn handle_buffer_reloaded(
4143 this: ModelHandle<Self>,
4144 envelope: TypedEnvelope<proto::BufferReloaded>,
4145 _: Arc<Client>,
4146 mut cx: AsyncAppContext,
4147 ) -> Result<()> {
4148 let payload = envelope.payload.clone();
4149 let version = deserialize_version(payload.version);
4150 let mtime = payload
4151 .mtime
4152 .ok_or_else(|| anyhow!("missing mtime"))?
4153 .into();
4154 this.update(&mut cx, |this, cx| {
4155 let buffer = this
4156 .opened_buffers
4157 .get(&payload.buffer_id)
4158 .and_then(|buffer| buffer.upgrade(cx));
4159 if let Some(buffer) = buffer {
4160 buffer.update(cx, |buffer, cx| {
4161 buffer.did_reload(version, mtime, cx);
4162 });
4163 }
4164 Ok(())
4165 })
4166 }
4167
4168 pub fn match_paths<'a>(
4169 &self,
4170 query: &'a str,
4171 include_ignored: bool,
4172 smart_case: bool,
4173 max_results: usize,
4174 cancel_flag: &'a AtomicBool,
4175 cx: &AppContext,
4176 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4177 let worktrees = self
4178 .worktrees(cx)
4179 .filter(|worktree| worktree.read(cx).is_visible())
4180 .collect::<Vec<_>>();
4181 let include_root_name = worktrees.len() > 1;
4182 let candidate_sets = worktrees
4183 .into_iter()
4184 .map(|worktree| CandidateSet {
4185 snapshot: worktree.read(cx).snapshot(),
4186 include_ignored,
4187 include_root_name,
4188 })
4189 .collect::<Vec<_>>();
4190
4191 let background = cx.background().clone();
4192 async move {
4193 fuzzy::match_paths(
4194 candidate_sets.as_slice(),
4195 query,
4196 smart_case,
4197 max_results,
4198 cancel_flag,
4199 background,
4200 )
4201 .await
4202 }
4203 }
4204
4205 fn edits_from_lsp(
4206 &mut self,
4207 buffer: &ModelHandle<Buffer>,
4208 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4209 version: Option<i32>,
4210 cx: &mut ModelContext<Self>,
4211 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4212 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4213 cx.background().spawn(async move {
4214 let snapshot = snapshot?;
4215 let mut lsp_edits = lsp_edits
4216 .into_iter()
4217 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4218 .peekable();
4219
4220 let mut edits = Vec::new();
4221 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4222 // Combine any LSP edits that are adjacent.
4223 //
4224 // Also, combine LSP edits that are separated from each other by only
4225 // a newline. This is important because for some code actions,
4226 // Rust-analyzer rewrites the entire buffer via a series of edits that
4227 // are separated by unchanged newline characters.
4228 //
4229 // In order for the diffing logic below to work properly, any edits that
4230 // cancel each other out must be combined into one.
4231 while let Some((next_range, next_text)) = lsp_edits.peek() {
4232 if next_range.start > range.end {
4233 if next_range.start.row > range.end.row + 1
4234 || next_range.start.column > 0
4235 || snapshot.clip_point_utf16(
4236 PointUtf16::new(range.end.row, u32::MAX),
4237 Bias::Left,
4238 ) > range.end
4239 {
4240 break;
4241 }
4242 new_text.push('\n');
4243 }
4244 range.end = next_range.end;
4245 new_text.push_str(&next_text);
4246 lsp_edits.next();
4247 }
4248
4249 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4250 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4251 {
4252 return Err(anyhow!("invalid edits received from language server"));
4253 }
4254
4255 // For multiline edits, perform a diff of the old and new text so that
4256 // we can identify the changes more precisely, preserving the locations
4257 // of any anchors positioned in the unchanged regions.
4258 if range.end.row > range.start.row {
4259 let mut offset = range.start.to_offset(&snapshot);
4260 let old_text = snapshot.text_for_range(range).collect::<String>();
4261
4262 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4263 let mut moved_since_edit = true;
4264 for change in diff.iter_all_changes() {
4265 let tag = change.tag();
4266 let value = change.value();
4267 match tag {
4268 ChangeTag::Equal => {
4269 offset += value.len();
4270 moved_since_edit = true;
4271 }
4272 ChangeTag::Delete => {
4273 let start = snapshot.anchor_after(offset);
4274 let end = snapshot.anchor_before(offset + value.len());
4275 if moved_since_edit {
4276 edits.push((start..end, String::new()));
4277 } else {
4278 edits.last_mut().unwrap().0.end = end;
4279 }
4280 offset += value.len();
4281 moved_since_edit = false;
4282 }
4283 ChangeTag::Insert => {
4284 if moved_since_edit {
4285 let anchor = snapshot.anchor_after(offset);
4286 edits.push((anchor.clone()..anchor, value.to_string()));
4287 } else {
4288 edits.last_mut().unwrap().1.push_str(value);
4289 }
4290 moved_since_edit = false;
4291 }
4292 }
4293 }
4294 } else if range.end == range.start {
4295 let anchor = snapshot.anchor_after(range.start);
4296 edits.push((anchor.clone()..anchor, new_text));
4297 } else {
4298 let edit_start = snapshot.anchor_after(range.start);
4299 let edit_end = snapshot.anchor_before(range.end);
4300 edits.push((edit_start..edit_end, new_text));
4301 }
4302 }
4303
4304 Ok(edits)
4305 })
4306 }
4307
4308 fn buffer_snapshot_for_lsp_version(
4309 &mut self,
4310 buffer: &ModelHandle<Buffer>,
4311 version: Option<i32>,
4312 cx: &AppContext,
4313 ) -> Result<TextBufferSnapshot> {
4314 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4315
4316 if let Some(version) = version {
4317 let buffer_id = buffer.read(cx).remote_id();
4318 let snapshots = self
4319 .buffer_snapshots
4320 .get_mut(&buffer_id)
4321 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4322 let mut found_snapshot = None;
4323 snapshots.retain(|(snapshot_version, snapshot)| {
4324 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4325 false
4326 } else {
4327 if *snapshot_version == version {
4328 found_snapshot = Some(snapshot.clone());
4329 }
4330 true
4331 }
4332 });
4333
4334 found_snapshot.ok_or_else(|| {
4335 anyhow!(
4336 "snapshot not found for buffer {} at version {}",
4337 buffer_id,
4338 version
4339 )
4340 })
4341 } else {
4342 Ok((buffer.read(cx)).text_snapshot())
4343 }
4344 }
4345
4346 fn language_server_for_buffer(
4347 &self,
4348 buffer: &Buffer,
4349 cx: &AppContext,
4350 ) -> Option<&Arc<LanguageServer>> {
4351 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4352 let worktree_id = file.worktree_id(cx);
4353 self.language_servers.get(&(worktree_id, language.name()))
4354 } else {
4355 None
4356 }
4357 }
4358}
4359
4360impl WorktreeHandle {
4361 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4362 match self {
4363 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4364 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4365 }
4366 }
4367}
4368
4369impl OpenBuffer {
4370 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4371 match self {
4372 OpenBuffer::Strong(handle) => Some(handle.clone()),
4373 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4374 OpenBuffer::Loading(_) => None,
4375 }
4376 }
4377}
4378
4379struct CandidateSet {
4380 snapshot: Snapshot,
4381 include_ignored: bool,
4382 include_root_name: bool,
4383}
4384
4385impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4386 type Candidates = CandidateSetIter<'a>;
4387
4388 fn id(&self) -> usize {
4389 self.snapshot.id().to_usize()
4390 }
4391
4392 fn len(&self) -> usize {
4393 if self.include_ignored {
4394 self.snapshot.file_count()
4395 } else {
4396 self.snapshot.visible_file_count()
4397 }
4398 }
4399
4400 fn prefix(&self) -> Arc<str> {
4401 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4402 self.snapshot.root_name().into()
4403 } else if self.include_root_name {
4404 format!("{}/", self.snapshot.root_name()).into()
4405 } else {
4406 "".into()
4407 }
4408 }
4409
4410 fn candidates(&'a self, start: usize) -> Self::Candidates {
4411 CandidateSetIter {
4412 traversal: self.snapshot.files(self.include_ignored, start),
4413 }
4414 }
4415}
4416
4417struct CandidateSetIter<'a> {
4418 traversal: Traversal<'a>,
4419}
4420
4421impl<'a> Iterator for CandidateSetIter<'a> {
4422 type Item = PathMatchCandidate<'a>;
4423
4424 fn next(&mut self) -> Option<Self::Item> {
4425 self.traversal.next().map(|entry| {
4426 if let EntryKind::File(char_bag) = entry.kind {
4427 PathMatchCandidate {
4428 path: &entry.path,
4429 char_bag,
4430 }
4431 } else {
4432 unreachable!()
4433 }
4434 })
4435 }
4436}
4437
4438impl Entity for Project {
4439 type Event = Event;
4440
4441 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4442 match &self.client_state {
4443 ProjectClientState::Local { remote_id_rx, .. } => {
4444 if let Some(project_id) = *remote_id_rx.borrow() {
4445 self.client
4446 .send(proto::UnregisterProject { project_id })
4447 .log_err();
4448 }
4449 }
4450 ProjectClientState::Remote { remote_id, .. } => {
4451 self.client
4452 .send(proto::LeaveProject {
4453 project_id: *remote_id,
4454 })
4455 .log_err();
4456 }
4457 }
4458 }
4459
4460 fn app_will_quit(
4461 &mut self,
4462 _: &mut MutableAppContext,
4463 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4464 let shutdown_futures = self
4465 .language_servers
4466 .drain()
4467 .filter_map(|(_, server)| server.shutdown())
4468 .collect::<Vec<_>>();
4469 Some(
4470 async move {
4471 futures::future::join_all(shutdown_futures).await;
4472 }
4473 .boxed(),
4474 )
4475 }
4476}
4477
4478impl Collaborator {
4479 fn from_proto(
4480 message: proto::Collaborator,
4481 user_store: &ModelHandle<UserStore>,
4482 cx: &mut AsyncAppContext,
4483 ) -> impl Future<Output = Result<Self>> {
4484 let user = user_store.update(cx, |user_store, cx| {
4485 user_store.fetch_user(message.user_id, cx)
4486 });
4487
4488 async move {
4489 Ok(Self {
4490 peer_id: PeerId(message.peer_id),
4491 user: user.await?,
4492 replica_id: message.replica_id as ReplicaId,
4493 })
4494 }
4495 }
4496}
4497
4498impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4499 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4500 Self {
4501 worktree_id,
4502 path: path.as_ref().into(),
4503 }
4504 }
4505}
4506
4507impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4508 fn from(options: lsp::CreateFileOptions) -> Self {
4509 Self {
4510 overwrite: options.overwrite.unwrap_or(false),
4511 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4512 }
4513 }
4514}
4515
4516impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4517 fn from(options: lsp::RenameFileOptions) -> Self {
4518 Self {
4519 overwrite: options.overwrite.unwrap_or(false),
4520 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4521 }
4522 }
4523}
4524
4525impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4526 fn from(options: lsp::DeleteFileOptions) -> Self {
4527 Self {
4528 recursive: options.recursive.unwrap_or(false),
4529 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4530 }
4531 }
4532}
4533
4534fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4535 proto::Symbol {
4536 source_worktree_id: symbol.source_worktree_id.to_proto(),
4537 worktree_id: symbol.worktree_id.to_proto(),
4538 language_name: symbol.language_name.clone(),
4539 name: symbol.name.clone(),
4540 kind: unsafe { mem::transmute(symbol.kind) },
4541 path: symbol.path.to_string_lossy().to_string(),
4542 start: Some(proto::Point {
4543 row: symbol.range.start.row,
4544 column: symbol.range.start.column,
4545 }),
4546 end: Some(proto::Point {
4547 row: symbol.range.end.row,
4548 column: symbol.range.end.column,
4549 }),
4550 signature: symbol.signature.to_vec(),
4551 }
4552}
4553
4554fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4555 let mut path_components = path.components();
4556 let mut base_components = base.components();
4557 let mut components: Vec<Component> = Vec::new();
4558 loop {
4559 match (path_components.next(), base_components.next()) {
4560 (None, None) => break,
4561 (Some(a), None) => {
4562 components.push(a);
4563 components.extend(path_components.by_ref());
4564 break;
4565 }
4566 (None, _) => components.push(Component::ParentDir),
4567 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4568 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4569 (Some(a), Some(_)) => {
4570 components.push(Component::ParentDir);
4571 for _ in base_components {
4572 components.push(Component::ParentDir);
4573 }
4574 components.push(a);
4575 components.extend(path_components.by_ref());
4576 break;
4577 }
4578 }
4579 }
4580 components.iter().map(|c| c.as_os_str()).collect()
4581}
4582
4583impl Item for Buffer {
4584 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4585 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4586 }
4587}
4588
4589#[cfg(test)]
4590mod tests {
4591 use super::{Event, *};
4592 use fs::RealFs;
4593 use futures::StreamExt;
4594 use gpui::test::subscribe;
4595 use language::{
4596 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4597 ToPoint,
4598 };
4599 use lsp::Url;
4600 use serde_json::json;
4601 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4602 use unindent::Unindent as _;
4603 use util::test::temp_tree;
4604 use worktree::WorktreeHandle as _;
4605
4606 #[gpui::test]
4607 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4608 let dir = temp_tree(json!({
4609 "root": {
4610 "apple": "",
4611 "banana": {
4612 "carrot": {
4613 "date": "",
4614 "endive": "",
4615 }
4616 },
4617 "fennel": {
4618 "grape": "",
4619 }
4620 }
4621 }));
4622
4623 let root_link_path = dir.path().join("root_link");
4624 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4625 unix::fs::symlink(
4626 &dir.path().join("root/fennel"),
4627 &dir.path().join("root/finnochio"),
4628 )
4629 .unwrap();
4630
4631 let project = Project::test(Arc::new(RealFs), cx);
4632
4633 let (tree, _) = project
4634 .update(cx, |project, cx| {
4635 project.find_or_create_local_worktree(&root_link_path, true, cx)
4636 })
4637 .await
4638 .unwrap();
4639
4640 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4641 .await;
4642 cx.read(|cx| {
4643 let tree = tree.read(cx);
4644 assert_eq!(tree.file_count(), 5);
4645 assert_eq!(
4646 tree.inode_for_path("fennel/grape"),
4647 tree.inode_for_path("finnochio/grape")
4648 );
4649 });
4650
4651 let cancel_flag = Default::default();
4652 let results = project
4653 .read_with(cx, |project, cx| {
4654 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4655 })
4656 .await;
4657 assert_eq!(
4658 results
4659 .into_iter()
4660 .map(|result| result.path)
4661 .collect::<Vec<Arc<Path>>>(),
4662 vec![
4663 PathBuf::from("banana/carrot/date").into(),
4664 PathBuf::from("banana/carrot/endive").into(),
4665 ]
4666 );
4667 }
4668
4669 #[gpui::test]
4670 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4671 cx.foreground().forbid_parking();
4672
4673 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4674 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4675 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4676 completion_provider: Some(lsp::CompletionOptions {
4677 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4678 ..Default::default()
4679 }),
4680 ..Default::default()
4681 });
4682 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4683 completion_provider: Some(lsp::CompletionOptions {
4684 trigger_characters: Some(vec![":".to_string()]),
4685 ..Default::default()
4686 }),
4687 ..Default::default()
4688 });
4689
4690 let rust_language = Arc::new(Language::new(
4691 LanguageConfig {
4692 name: "Rust".into(),
4693 path_suffixes: vec!["rs".to_string()],
4694 language_server: rust_lsp_config,
4695 ..Default::default()
4696 },
4697 Some(tree_sitter_rust::language()),
4698 ));
4699 let json_language = Arc::new(Language::new(
4700 LanguageConfig {
4701 name: "JSON".into(),
4702 path_suffixes: vec!["json".to_string()],
4703 language_server: json_lsp_config,
4704 ..Default::default()
4705 },
4706 None,
4707 ));
4708
4709 let fs = FakeFs::new(cx.background());
4710 fs.insert_tree(
4711 "/the-root",
4712 json!({
4713 "test.rs": "const A: i32 = 1;",
4714 "test2.rs": "",
4715 "Cargo.toml": "a = 1",
4716 "package.json": "{\"a\": 1}",
4717 }),
4718 )
4719 .await;
4720
4721 let project = Project::test(fs, cx);
4722 project.update(cx, |project, _| {
4723 project.languages.add(rust_language);
4724 project.languages.add(json_language);
4725 });
4726
4727 let worktree_id = project
4728 .update(cx, |project, cx| {
4729 project.find_or_create_local_worktree("/the-root", true, cx)
4730 })
4731 .await
4732 .unwrap()
4733 .0
4734 .read_with(cx, |tree, _| tree.id());
4735
4736 // Open a buffer without an associated language server.
4737 let toml_buffer = project
4738 .update(cx, |project, cx| {
4739 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4740 })
4741 .await
4742 .unwrap();
4743
4744 // Open a buffer with an associated language server.
4745 let rust_buffer = project
4746 .update(cx, |project, cx| {
4747 project.open_buffer((worktree_id, "test.rs"), cx)
4748 })
4749 .await
4750 .unwrap();
4751
4752 // A server is started up, and it is notified about Rust files.
4753 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4754 assert_eq!(
4755 fake_rust_server
4756 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4757 .await
4758 .text_document,
4759 lsp::TextDocumentItem {
4760 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4761 version: 0,
4762 text: "const A: i32 = 1;".to_string(),
4763 language_id: Default::default()
4764 }
4765 );
4766
4767 // The buffer is configured based on the language server's capabilities.
4768 rust_buffer.read_with(cx, |buffer, _| {
4769 assert_eq!(
4770 buffer.completion_triggers(),
4771 &[".".to_string(), "::".to_string()]
4772 );
4773 });
4774 toml_buffer.read_with(cx, |buffer, _| {
4775 assert!(buffer.completion_triggers().is_empty());
4776 });
4777
4778 // Edit a buffer. The changes are reported to the language server.
4779 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4780 assert_eq!(
4781 fake_rust_server
4782 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4783 .await
4784 .text_document,
4785 lsp::VersionedTextDocumentIdentifier::new(
4786 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4787 1
4788 )
4789 );
4790
4791 // Open a third buffer with a different associated language server.
4792 let json_buffer = project
4793 .update(cx, |project, cx| {
4794 project.open_buffer((worktree_id, "package.json"), cx)
4795 })
4796 .await
4797 .unwrap();
4798
4799 // Another language server is started up, and it is notified about
4800 // all three open buffers.
4801 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4802 assert_eq!(
4803 fake_json_server
4804 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4805 .await
4806 .text_document,
4807 lsp::TextDocumentItem {
4808 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4809 version: 0,
4810 text: "{\"a\": 1}".to_string(),
4811 language_id: Default::default()
4812 }
4813 );
4814
4815 // This buffer is configured based on the second language server's
4816 // capabilities.
4817 json_buffer.read_with(cx, |buffer, _| {
4818 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4819 });
4820
4821 // When opening another buffer whose language server is already running,
4822 // it is also configured based on the existing language server's capabilities.
4823 let rust_buffer2 = project
4824 .update(cx, |project, cx| {
4825 project.open_buffer((worktree_id, "test2.rs"), cx)
4826 })
4827 .await
4828 .unwrap();
4829 rust_buffer2.read_with(cx, |buffer, _| {
4830 assert_eq!(
4831 buffer.completion_triggers(),
4832 &[".".to_string(), "::".to_string()]
4833 );
4834 });
4835
4836 // Changes are reported only to servers matching the buffer's language.
4837 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4838 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4839 assert_eq!(
4840 fake_rust_server
4841 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4842 .await
4843 .text_document,
4844 lsp::VersionedTextDocumentIdentifier::new(
4845 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4846 1
4847 )
4848 );
4849
4850 // Save notifications are reported to all servers.
4851 toml_buffer
4852 .update(cx, |buffer, cx| buffer.save(cx))
4853 .await
4854 .unwrap();
4855 assert_eq!(
4856 fake_rust_server
4857 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4858 .await
4859 .text_document,
4860 lsp::TextDocumentIdentifier::new(
4861 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4862 )
4863 );
4864 assert_eq!(
4865 fake_json_server
4866 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4867 .await
4868 .text_document,
4869 lsp::TextDocumentIdentifier::new(
4870 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4871 )
4872 );
4873
4874 // Close notifications are reported only to servers matching the buffer's language.
4875 cx.update(|_| drop(json_buffer));
4876 let close_message = lsp::DidCloseTextDocumentParams {
4877 text_document: lsp::TextDocumentIdentifier::new(
4878 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4879 ),
4880 };
4881 assert_eq!(
4882 fake_json_server
4883 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4884 .await,
4885 close_message,
4886 );
4887 }
4888
4889 #[gpui::test]
4890 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4891 cx.foreground().forbid_parking();
4892
4893 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4894 let progress_token = language_server_config
4895 .disk_based_diagnostics_progress_token
4896 .clone()
4897 .unwrap();
4898
4899 let language = Arc::new(Language::new(
4900 LanguageConfig {
4901 name: "Rust".into(),
4902 path_suffixes: vec!["rs".to_string()],
4903 language_server: language_server_config,
4904 ..Default::default()
4905 },
4906 Some(tree_sitter_rust::language()),
4907 ));
4908
4909 let fs = FakeFs::new(cx.background());
4910 fs.insert_tree(
4911 "/dir",
4912 json!({
4913 "a.rs": "fn a() { A }",
4914 "b.rs": "const y: i32 = 1",
4915 }),
4916 )
4917 .await;
4918
4919 let project = Project::test(fs, cx);
4920 project.update(cx, |project, _| project.languages.add(language));
4921
4922 let (tree, _) = project
4923 .update(cx, |project, cx| {
4924 project.find_or_create_local_worktree("/dir", true, cx)
4925 })
4926 .await
4927 .unwrap();
4928 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4929
4930 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4931 .await;
4932
4933 // Cause worktree to start the fake language server
4934 let _buffer = project
4935 .update(cx, |project, cx| {
4936 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4937 })
4938 .await
4939 .unwrap();
4940
4941 let mut events = subscribe(&project, cx);
4942
4943 let mut fake_server = fake_servers.next().await.unwrap();
4944 fake_server.start_progress(&progress_token).await;
4945 assert_eq!(
4946 events.next().await.unwrap(),
4947 Event::DiskBasedDiagnosticsStarted
4948 );
4949
4950 fake_server.start_progress(&progress_token).await;
4951 fake_server.end_progress(&progress_token).await;
4952 fake_server.start_progress(&progress_token).await;
4953
4954 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4955 lsp::PublishDiagnosticsParams {
4956 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4957 version: None,
4958 diagnostics: vec![lsp::Diagnostic {
4959 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4960 severity: Some(lsp::DiagnosticSeverity::ERROR),
4961 message: "undefined variable 'A'".to_string(),
4962 ..Default::default()
4963 }],
4964 },
4965 );
4966 assert_eq!(
4967 events.next().await.unwrap(),
4968 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4969 );
4970
4971 fake_server.end_progress(&progress_token).await;
4972 fake_server.end_progress(&progress_token).await;
4973 assert_eq!(
4974 events.next().await.unwrap(),
4975 Event::DiskBasedDiagnosticsUpdated
4976 );
4977 assert_eq!(
4978 events.next().await.unwrap(),
4979 Event::DiskBasedDiagnosticsFinished
4980 );
4981
4982 let buffer = project
4983 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4984 .await
4985 .unwrap();
4986
4987 buffer.read_with(cx, |buffer, _| {
4988 let snapshot = buffer.snapshot();
4989 let diagnostics = snapshot
4990 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4991 .collect::<Vec<_>>();
4992 assert_eq!(
4993 diagnostics,
4994 &[DiagnosticEntry {
4995 range: Point::new(0, 9)..Point::new(0, 10),
4996 diagnostic: Diagnostic {
4997 severity: lsp::DiagnosticSeverity::ERROR,
4998 message: "undefined variable 'A'".to_string(),
4999 group_id: 0,
5000 is_primary: true,
5001 ..Default::default()
5002 }
5003 }]
5004 )
5005 });
5006 }
5007
5008 #[gpui::test]
5009 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5010 cx.foreground().forbid_parking();
5011
5012 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5013 lsp_config
5014 .disk_based_diagnostic_sources
5015 .insert("disk".to_string());
5016 let language = Arc::new(Language::new(
5017 LanguageConfig {
5018 name: "Rust".into(),
5019 path_suffixes: vec!["rs".to_string()],
5020 language_server: lsp_config,
5021 ..Default::default()
5022 },
5023 Some(tree_sitter_rust::language()),
5024 ));
5025
5026 let text = "
5027 fn a() { A }
5028 fn b() { BB }
5029 fn c() { CCC }
5030 "
5031 .unindent();
5032
5033 let fs = FakeFs::new(cx.background());
5034 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5035
5036 let project = Project::test(fs, cx);
5037 project.update(cx, |project, _| project.languages.add(language));
5038
5039 let worktree_id = project
5040 .update(cx, |project, cx| {
5041 project.find_or_create_local_worktree("/dir", true, cx)
5042 })
5043 .await
5044 .unwrap()
5045 .0
5046 .read_with(cx, |tree, _| tree.id());
5047
5048 let buffer = project
5049 .update(cx, |project, cx| {
5050 project.open_buffer((worktree_id, "a.rs"), cx)
5051 })
5052 .await
5053 .unwrap();
5054
5055 let mut fake_server = fake_servers.next().await.unwrap();
5056 let open_notification = fake_server
5057 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5058 .await;
5059
5060 // Edit the buffer, moving the content down
5061 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5062 let change_notification_1 = fake_server
5063 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5064 .await;
5065 assert!(
5066 change_notification_1.text_document.version > open_notification.text_document.version
5067 );
5068
5069 // Report some diagnostics for the initial version of the buffer
5070 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5071 lsp::PublishDiagnosticsParams {
5072 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5073 version: Some(open_notification.text_document.version),
5074 diagnostics: vec![
5075 lsp::Diagnostic {
5076 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5077 severity: Some(DiagnosticSeverity::ERROR),
5078 message: "undefined variable 'A'".to_string(),
5079 source: Some("disk".to_string()),
5080 ..Default::default()
5081 },
5082 lsp::Diagnostic {
5083 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5084 severity: Some(DiagnosticSeverity::ERROR),
5085 message: "undefined variable 'BB'".to_string(),
5086 source: Some("disk".to_string()),
5087 ..Default::default()
5088 },
5089 lsp::Diagnostic {
5090 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5091 severity: Some(DiagnosticSeverity::ERROR),
5092 source: Some("disk".to_string()),
5093 message: "undefined variable 'CCC'".to_string(),
5094 ..Default::default()
5095 },
5096 ],
5097 },
5098 );
5099
5100 // The diagnostics have moved down since they were created.
5101 buffer.next_notification(cx).await;
5102 buffer.read_with(cx, |buffer, _| {
5103 assert_eq!(
5104 buffer
5105 .snapshot()
5106 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5107 .collect::<Vec<_>>(),
5108 &[
5109 DiagnosticEntry {
5110 range: Point::new(3, 9)..Point::new(3, 11),
5111 diagnostic: Diagnostic {
5112 severity: DiagnosticSeverity::ERROR,
5113 message: "undefined variable 'BB'".to_string(),
5114 is_disk_based: true,
5115 group_id: 1,
5116 is_primary: true,
5117 ..Default::default()
5118 },
5119 },
5120 DiagnosticEntry {
5121 range: Point::new(4, 9)..Point::new(4, 12),
5122 diagnostic: Diagnostic {
5123 severity: DiagnosticSeverity::ERROR,
5124 message: "undefined variable 'CCC'".to_string(),
5125 is_disk_based: true,
5126 group_id: 2,
5127 is_primary: true,
5128 ..Default::default()
5129 }
5130 }
5131 ]
5132 );
5133 assert_eq!(
5134 chunks_with_diagnostics(buffer, 0..buffer.len()),
5135 [
5136 ("\n\nfn a() { ".to_string(), None),
5137 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5138 (" }\nfn b() { ".to_string(), None),
5139 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5140 (" }\nfn c() { ".to_string(), None),
5141 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5142 (" }\n".to_string(), None),
5143 ]
5144 );
5145 assert_eq!(
5146 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5147 [
5148 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5149 (" }\nfn c() { ".to_string(), None),
5150 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5151 ]
5152 );
5153 });
5154
5155 // Ensure overlapping diagnostics are highlighted correctly.
5156 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5157 lsp::PublishDiagnosticsParams {
5158 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5159 version: Some(open_notification.text_document.version),
5160 diagnostics: vec![
5161 lsp::Diagnostic {
5162 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5163 severity: Some(DiagnosticSeverity::ERROR),
5164 message: "undefined variable 'A'".to_string(),
5165 source: Some("disk".to_string()),
5166 ..Default::default()
5167 },
5168 lsp::Diagnostic {
5169 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5170 severity: Some(DiagnosticSeverity::WARNING),
5171 message: "unreachable statement".to_string(),
5172 source: Some("disk".to_string()),
5173 ..Default::default()
5174 },
5175 ],
5176 },
5177 );
5178
5179 buffer.next_notification(cx).await;
5180 buffer.read_with(cx, |buffer, _| {
5181 assert_eq!(
5182 buffer
5183 .snapshot()
5184 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5185 .collect::<Vec<_>>(),
5186 &[
5187 DiagnosticEntry {
5188 range: Point::new(2, 9)..Point::new(2, 12),
5189 diagnostic: Diagnostic {
5190 severity: DiagnosticSeverity::WARNING,
5191 message: "unreachable statement".to_string(),
5192 is_disk_based: true,
5193 group_id: 1,
5194 is_primary: true,
5195 ..Default::default()
5196 }
5197 },
5198 DiagnosticEntry {
5199 range: Point::new(2, 9)..Point::new(2, 10),
5200 diagnostic: Diagnostic {
5201 severity: DiagnosticSeverity::ERROR,
5202 message: "undefined variable 'A'".to_string(),
5203 is_disk_based: true,
5204 group_id: 0,
5205 is_primary: true,
5206 ..Default::default()
5207 },
5208 }
5209 ]
5210 );
5211 assert_eq!(
5212 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5213 [
5214 ("fn a() { ".to_string(), None),
5215 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5216 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5217 ("\n".to_string(), None),
5218 ]
5219 );
5220 assert_eq!(
5221 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5222 [
5223 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5224 ("\n".to_string(), None),
5225 ]
5226 );
5227 });
5228
5229 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5230 // changes since the last save.
5231 buffer.update(cx, |buffer, cx| {
5232 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5233 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5234 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5235 });
5236 let change_notification_2 = fake_server
5237 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5238 .await;
5239 assert!(
5240 change_notification_2.text_document.version
5241 > change_notification_1.text_document.version
5242 );
5243
5244 // Handle out-of-order diagnostics
5245 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5246 lsp::PublishDiagnosticsParams {
5247 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5248 version: Some(change_notification_2.text_document.version),
5249 diagnostics: vec![
5250 lsp::Diagnostic {
5251 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5252 severity: Some(DiagnosticSeverity::ERROR),
5253 message: "undefined variable 'BB'".to_string(),
5254 source: Some("disk".to_string()),
5255 ..Default::default()
5256 },
5257 lsp::Diagnostic {
5258 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5259 severity: Some(DiagnosticSeverity::WARNING),
5260 message: "undefined variable 'A'".to_string(),
5261 source: Some("disk".to_string()),
5262 ..Default::default()
5263 },
5264 ],
5265 },
5266 );
5267
5268 buffer.next_notification(cx).await;
5269 buffer.read_with(cx, |buffer, _| {
5270 assert_eq!(
5271 buffer
5272 .snapshot()
5273 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5274 .collect::<Vec<_>>(),
5275 &[
5276 DiagnosticEntry {
5277 range: Point::new(2, 21)..Point::new(2, 22),
5278 diagnostic: Diagnostic {
5279 severity: DiagnosticSeverity::WARNING,
5280 message: "undefined variable 'A'".to_string(),
5281 is_disk_based: true,
5282 group_id: 1,
5283 is_primary: true,
5284 ..Default::default()
5285 }
5286 },
5287 DiagnosticEntry {
5288 range: Point::new(3, 9)..Point::new(3, 14),
5289 diagnostic: Diagnostic {
5290 severity: DiagnosticSeverity::ERROR,
5291 message: "undefined variable 'BB'".to_string(),
5292 is_disk_based: true,
5293 group_id: 0,
5294 is_primary: true,
5295 ..Default::default()
5296 },
5297 }
5298 ]
5299 );
5300 });
5301 }
5302
5303 #[gpui::test]
5304 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5305 cx.foreground().forbid_parking();
5306
5307 let text = concat!(
5308 "let one = ;\n", //
5309 "let two = \n",
5310 "let three = 3;\n",
5311 );
5312
5313 let fs = FakeFs::new(cx.background());
5314 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5315
5316 let project = Project::test(fs, cx);
5317 let worktree_id = project
5318 .update(cx, |project, cx| {
5319 project.find_or_create_local_worktree("/dir", true, cx)
5320 })
5321 .await
5322 .unwrap()
5323 .0
5324 .read_with(cx, |tree, _| tree.id());
5325
5326 let buffer = project
5327 .update(cx, |project, cx| {
5328 project.open_buffer((worktree_id, "a.rs"), cx)
5329 })
5330 .await
5331 .unwrap();
5332
5333 project.update(cx, |project, cx| {
5334 project
5335 .update_buffer_diagnostics(
5336 &buffer,
5337 vec![
5338 DiagnosticEntry {
5339 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5340 diagnostic: Diagnostic {
5341 severity: DiagnosticSeverity::ERROR,
5342 message: "syntax error 1".to_string(),
5343 ..Default::default()
5344 },
5345 },
5346 DiagnosticEntry {
5347 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5348 diagnostic: Diagnostic {
5349 severity: DiagnosticSeverity::ERROR,
5350 message: "syntax error 2".to_string(),
5351 ..Default::default()
5352 },
5353 },
5354 ],
5355 None,
5356 cx,
5357 )
5358 .unwrap();
5359 });
5360
5361 // An empty range is extended forward to include the following character.
5362 // At the end of a line, an empty range is extended backward to include
5363 // the preceding character.
5364 buffer.read_with(cx, |buffer, _| {
5365 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5366 assert_eq!(
5367 chunks
5368 .iter()
5369 .map(|(s, d)| (s.as_str(), *d))
5370 .collect::<Vec<_>>(),
5371 &[
5372 ("let one = ", None),
5373 (";", Some(DiagnosticSeverity::ERROR)),
5374 ("\nlet two =", None),
5375 (" ", Some(DiagnosticSeverity::ERROR)),
5376 ("\nlet three = 3;\n", None)
5377 ]
5378 );
5379 });
5380 }
5381
5382 #[gpui::test]
5383 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5384 cx.foreground().forbid_parking();
5385
5386 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5387 let language = Arc::new(Language::new(
5388 LanguageConfig {
5389 name: "Rust".into(),
5390 path_suffixes: vec!["rs".to_string()],
5391 language_server: lsp_config,
5392 ..Default::default()
5393 },
5394 Some(tree_sitter_rust::language()),
5395 ));
5396
5397 let text = "
5398 fn a() {
5399 f1();
5400 }
5401 fn b() {
5402 f2();
5403 }
5404 fn c() {
5405 f3();
5406 }
5407 "
5408 .unindent();
5409
5410 let fs = FakeFs::new(cx.background());
5411 fs.insert_tree(
5412 "/dir",
5413 json!({
5414 "a.rs": text.clone(),
5415 }),
5416 )
5417 .await;
5418
5419 let project = Project::test(fs, cx);
5420 project.update(cx, |project, _| project.languages.add(language));
5421
5422 let worktree_id = project
5423 .update(cx, |project, cx| {
5424 project.find_or_create_local_worktree("/dir", true, cx)
5425 })
5426 .await
5427 .unwrap()
5428 .0
5429 .read_with(cx, |tree, _| tree.id());
5430
5431 let buffer = project
5432 .update(cx, |project, cx| {
5433 project.open_buffer((worktree_id, "a.rs"), cx)
5434 })
5435 .await
5436 .unwrap();
5437
5438 let mut fake_server = fake_servers.next().await.unwrap();
5439 let lsp_document_version = fake_server
5440 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5441 .await
5442 .text_document
5443 .version;
5444
5445 // Simulate editing the buffer after the language server computes some edits.
5446 buffer.update(cx, |buffer, cx| {
5447 buffer.edit(
5448 [Point::new(0, 0)..Point::new(0, 0)],
5449 "// above first function\n",
5450 cx,
5451 );
5452 buffer.edit(
5453 [Point::new(2, 0)..Point::new(2, 0)],
5454 " // inside first function\n",
5455 cx,
5456 );
5457 buffer.edit(
5458 [Point::new(6, 4)..Point::new(6, 4)],
5459 "// inside second function ",
5460 cx,
5461 );
5462
5463 assert_eq!(
5464 buffer.text(),
5465 "
5466 // above first function
5467 fn a() {
5468 // inside first function
5469 f1();
5470 }
5471 fn b() {
5472 // inside second function f2();
5473 }
5474 fn c() {
5475 f3();
5476 }
5477 "
5478 .unindent()
5479 );
5480 });
5481
5482 let edits = project
5483 .update(cx, |project, cx| {
5484 project.edits_from_lsp(
5485 &buffer,
5486 vec![
5487 // replace body of first function
5488 lsp::TextEdit {
5489 range: lsp::Range::new(
5490 lsp::Position::new(0, 0),
5491 lsp::Position::new(3, 0),
5492 ),
5493 new_text: "
5494 fn a() {
5495 f10();
5496 }
5497 "
5498 .unindent(),
5499 },
5500 // edit inside second function
5501 lsp::TextEdit {
5502 range: lsp::Range::new(
5503 lsp::Position::new(4, 6),
5504 lsp::Position::new(4, 6),
5505 ),
5506 new_text: "00".into(),
5507 },
5508 // edit inside third function via two distinct edits
5509 lsp::TextEdit {
5510 range: lsp::Range::new(
5511 lsp::Position::new(7, 5),
5512 lsp::Position::new(7, 5),
5513 ),
5514 new_text: "4000".into(),
5515 },
5516 lsp::TextEdit {
5517 range: lsp::Range::new(
5518 lsp::Position::new(7, 5),
5519 lsp::Position::new(7, 6),
5520 ),
5521 new_text: "".into(),
5522 },
5523 ],
5524 Some(lsp_document_version),
5525 cx,
5526 )
5527 })
5528 .await
5529 .unwrap();
5530
5531 buffer.update(cx, |buffer, cx| {
5532 for (range, new_text) in edits {
5533 buffer.edit([range], new_text, cx);
5534 }
5535 assert_eq!(
5536 buffer.text(),
5537 "
5538 // above first function
5539 fn a() {
5540 // inside first function
5541 f10();
5542 }
5543 fn b() {
5544 // inside second function f200();
5545 }
5546 fn c() {
5547 f4000();
5548 }
5549 "
5550 .unindent()
5551 );
5552 });
5553 }
5554
5555 #[gpui::test]
5556 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5557 cx.foreground().forbid_parking();
5558
5559 let text = "
5560 use a::b;
5561 use a::c;
5562
5563 fn f() {
5564 b();
5565 c();
5566 }
5567 "
5568 .unindent();
5569
5570 let fs = FakeFs::new(cx.background());
5571 fs.insert_tree(
5572 "/dir",
5573 json!({
5574 "a.rs": text.clone(),
5575 }),
5576 )
5577 .await;
5578
5579 let project = Project::test(fs, cx);
5580 let worktree_id = project
5581 .update(cx, |project, cx| {
5582 project.find_or_create_local_worktree("/dir", true, cx)
5583 })
5584 .await
5585 .unwrap()
5586 .0
5587 .read_with(cx, |tree, _| tree.id());
5588
5589 let buffer = project
5590 .update(cx, |project, cx| {
5591 project.open_buffer((worktree_id, "a.rs"), cx)
5592 })
5593 .await
5594 .unwrap();
5595
5596 // Simulate the language server sending us a small edit in the form of a very large diff.
5597 // Rust-analyzer does this when performing a merge-imports code action.
5598 let edits = project
5599 .update(cx, |project, cx| {
5600 project.edits_from_lsp(
5601 &buffer,
5602 [
5603 // Replace the first use statement without editing the semicolon.
5604 lsp::TextEdit {
5605 range: lsp::Range::new(
5606 lsp::Position::new(0, 4),
5607 lsp::Position::new(0, 8),
5608 ),
5609 new_text: "a::{b, c}".into(),
5610 },
5611 // Reinsert the remainder of the file between the semicolon and the final
5612 // newline of the file.
5613 lsp::TextEdit {
5614 range: lsp::Range::new(
5615 lsp::Position::new(0, 9),
5616 lsp::Position::new(0, 9),
5617 ),
5618 new_text: "\n\n".into(),
5619 },
5620 lsp::TextEdit {
5621 range: lsp::Range::new(
5622 lsp::Position::new(0, 9),
5623 lsp::Position::new(0, 9),
5624 ),
5625 new_text: "
5626 fn f() {
5627 b();
5628 c();
5629 }"
5630 .unindent(),
5631 },
5632 // Delete everything after the first newline of the file.
5633 lsp::TextEdit {
5634 range: lsp::Range::new(
5635 lsp::Position::new(1, 0),
5636 lsp::Position::new(7, 0),
5637 ),
5638 new_text: "".into(),
5639 },
5640 ],
5641 None,
5642 cx,
5643 )
5644 })
5645 .await
5646 .unwrap();
5647
5648 buffer.update(cx, |buffer, cx| {
5649 let edits = edits
5650 .into_iter()
5651 .map(|(range, text)| {
5652 (
5653 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5654 text,
5655 )
5656 })
5657 .collect::<Vec<_>>();
5658
5659 assert_eq!(
5660 edits,
5661 [
5662 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5663 (Point::new(1, 0)..Point::new(2, 0), "".into())
5664 ]
5665 );
5666
5667 for (range, new_text) in edits {
5668 buffer.edit([range], new_text, cx);
5669 }
5670 assert_eq!(
5671 buffer.text(),
5672 "
5673 use a::{b, c};
5674
5675 fn f() {
5676 b();
5677 c();
5678 }
5679 "
5680 .unindent()
5681 );
5682 });
5683 }
5684
5685 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5686 buffer: &Buffer,
5687 range: Range<T>,
5688 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5689 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5690 for chunk in buffer.snapshot().chunks(range, true) {
5691 if chunks.last().map_or(false, |prev_chunk| {
5692 prev_chunk.1 == chunk.diagnostic_severity
5693 }) {
5694 chunks.last_mut().unwrap().0.push_str(chunk.text);
5695 } else {
5696 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5697 }
5698 }
5699 chunks
5700 }
5701
5702 #[gpui::test]
5703 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5704 let dir = temp_tree(json!({
5705 "root": {
5706 "dir1": {},
5707 "dir2": {
5708 "dir3": {}
5709 }
5710 }
5711 }));
5712
5713 let project = Project::test(Arc::new(RealFs), cx);
5714 let (tree, _) = project
5715 .update(cx, |project, cx| {
5716 project.find_or_create_local_worktree(&dir.path(), true, cx)
5717 })
5718 .await
5719 .unwrap();
5720
5721 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5722 .await;
5723
5724 let cancel_flag = Default::default();
5725 let results = project
5726 .read_with(cx, |project, cx| {
5727 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5728 })
5729 .await;
5730
5731 assert!(results.is_empty());
5732 }
5733
5734 #[gpui::test]
5735 async fn test_definition(cx: &mut gpui::TestAppContext) {
5736 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5737 let language = Arc::new(Language::new(
5738 LanguageConfig {
5739 name: "Rust".into(),
5740 path_suffixes: vec!["rs".to_string()],
5741 language_server: language_server_config,
5742 ..Default::default()
5743 },
5744 Some(tree_sitter_rust::language()),
5745 ));
5746
5747 let fs = FakeFs::new(cx.background());
5748 fs.insert_tree(
5749 "/dir",
5750 json!({
5751 "a.rs": "const fn a() { A }",
5752 "b.rs": "const y: i32 = crate::a()",
5753 }),
5754 )
5755 .await;
5756
5757 let project = Project::test(fs, cx);
5758 project.update(cx, |project, _| {
5759 Arc::get_mut(&mut project.languages).unwrap().add(language);
5760 });
5761
5762 let (tree, _) = project
5763 .update(cx, |project, cx| {
5764 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5765 })
5766 .await
5767 .unwrap();
5768 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5769 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5770 .await;
5771
5772 let buffer = project
5773 .update(cx, |project, cx| {
5774 project.open_buffer(
5775 ProjectPath {
5776 worktree_id,
5777 path: Path::new("").into(),
5778 },
5779 cx,
5780 )
5781 })
5782 .await
5783 .unwrap();
5784
5785 let mut fake_server = fake_servers.next().await.unwrap();
5786 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
5787 let params = params.text_document_position_params;
5788 assert_eq!(
5789 params.text_document.uri.to_file_path().unwrap(),
5790 Path::new("/dir/b.rs"),
5791 );
5792 assert_eq!(params.position, lsp::Position::new(0, 22));
5793
5794 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5795 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5796 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5797 )))
5798 });
5799
5800 let mut definitions = project
5801 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5802 .await
5803 .unwrap();
5804
5805 assert_eq!(definitions.len(), 1);
5806 let definition = definitions.pop().unwrap();
5807 cx.update(|cx| {
5808 let target_buffer = definition.buffer.read(cx);
5809 assert_eq!(
5810 target_buffer
5811 .file()
5812 .unwrap()
5813 .as_local()
5814 .unwrap()
5815 .abs_path(cx),
5816 Path::new("/dir/a.rs"),
5817 );
5818 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5819 assert_eq!(
5820 list_worktrees(&project, cx),
5821 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5822 );
5823
5824 drop(definition);
5825 });
5826 cx.read(|cx| {
5827 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5828 });
5829
5830 fn list_worktrees<'a>(
5831 project: &'a ModelHandle<Project>,
5832 cx: &'a AppContext,
5833 ) -> Vec<(&'a Path, bool)> {
5834 project
5835 .read(cx)
5836 .worktrees(cx)
5837 .map(|worktree| {
5838 let worktree = worktree.read(cx);
5839 (
5840 worktree.as_local().unwrap().abs_path().as_ref(),
5841 worktree.is_visible(),
5842 )
5843 })
5844 .collect::<Vec<_>>()
5845 }
5846 }
5847
5848 #[gpui::test]
5849 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5850 let fs = FakeFs::new(cx.background());
5851 fs.insert_tree(
5852 "/dir",
5853 json!({
5854 "file1": "the old contents",
5855 }),
5856 )
5857 .await;
5858
5859 let project = Project::test(fs.clone(), cx);
5860 let worktree_id = project
5861 .update(cx, |p, cx| {
5862 p.find_or_create_local_worktree("/dir", true, cx)
5863 })
5864 .await
5865 .unwrap()
5866 .0
5867 .read_with(cx, |tree, _| tree.id());
5868
5869 let buffer = project
5870 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5871 .await
5872 .unwrap();
5873 buffer
5874 .update(cx, |buffer, cx| {
5875 assert_eq!(buffer.text(), "the old contents");
5876 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5877 buffer.save(cx)
5878 })
5879 .await
5880 .unwrap();
5881
5882 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5883 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5884 }
5885
5886 #[gpui::test]
5887 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5888 let fs = FakeFs::new(cx.background());
5889 fs.insert_tree(
5890 "/dir",
5891 json!({
5892 "file1": "the old contents",
5893 }),
5894 )
5895 .await;
5896
5897 let project = Project::test(fs.clone(), cx);
5898 let worktree_id = project
5899 .update(cx, |p, cx| {
5900 p.find_or_create_local_worktree("/dir/file1", true, cx)
5901 })
5902 .await
5903 .unwrap()
5904 .0
5905 .read_with(cx, |tree, _| tree.id());
5906
5907 let buffer = project
5908 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5909 .await
5910 .unwrap();
5911 buffer
5912 .update(cx, |buffer, cx| {
5913 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5914 buffer.save(cx)
5915 })
5916 .await
5917 .unwrap();
5918
5919 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5920 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5921 }
5922
5923 #[gpui::test]
5924 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5925 let fs = FakeFs::new(cx.background());
5926 fs.insert_tree("/dir", json!({})).await;
5927
5928 let project = Project::test(fs.clone(), cx);
5929 let (worktree, _) = project
5930 .update(cx, |project, cx| {
5931 project.find_or_create_local_worktree("/dir", true, cx)
5932 })
5933 .await
5934 .unwrap();
5935 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5936
5937 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5938 buffer.update(cx, |buffer, cx| {
5939 buffer.edit([0..0], "abc", cx);
5940 assert!(buffer.is_dirty());
5941 assert!(!buffer.has_conflict());
5942 });
5943 project
5944 .update(cx, |project, cx| {
5945 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5946 })
5947 .await
5948 .unwrap();
5949 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5950 buffer.read_with(cx, |buffer, cx| {
5951 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5952 assert!(!buffer.is_dirty());
5953 assert!(!buffer.has_conflict());
5954 });
5955
5956 let opened_buffer = project
5957 .update(cx, |project, cx| {
5958 project.open_buffer((worktree_id, "file1"), cx)
5959 })
5960 .await
5961 .unwrap();
5962 assert_eq!(opened_buffer, buffer);
5963 }
5964
5965 #[gpui::test(retries = 5)]
5966 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5967 let dir = temp_tree(json!({
5968 "a": {
5969 "file1": "",
5970 "file2": "",
5971 "file3": "",
5972 },
5973 "b": {
5974 "c": {
5975 "file4": "",
5976 "file5": "",
5977 }
5978 }
5979 }));
5980
5981 let project = Project::test(Arc::new(RealFs), cx);
5982 let rpc = project.read_with(cx, |p, _| p.client.clone());
5983
5984 let (tree, _) = project
5985 .update(cx, |p, cx| {
5986 p.find_or_create_local_worktree(dir.path(), true, cx)
5987 })
5988 .await
5989 .unwrap();
5990 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5991
5992 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5993 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5994 async move { buffer.await.unwrap() }
5995 };
5996 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5997 tree.read_with(cx, |tree, _| {
5998 tree.entry_for_path(path)
5999 .expect(&format!("no entry for path {}", path))
6000 .id
6001 })
6002 };
6003
6004 let buffer2 = buffer_for_path("a/file2", cx).await;
6005 let buffer3 = buffer_for_path("a/file3", cx).await;
6006 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6007 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6008
6009 let file2_id = id_for_path("a/file2", &cx);
6010 let file3_id = id_for_path("a/file3", &cx);
6011 let file4_id = id_for_path("b/c/file4", &cx);
6012
6013 // Wait for the initial scan.
6014 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6015 .await;
6016
6017 // Create a remote copy of this worktree.
6018 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6019 let (remote, load_task) = cx.update(|cx| {
6020 Worktree::remote(
6021 1,
6022 1,
6023 initial_snapshot.to_proto(&Default::default(), true),
6024 rpc.clone(),
6025 cx,
6026 )
6027 });
6028 load_task.await;
6029
6030 cx.read(|cx| {
6031 assert!(!buffer2.read(cx).is_dirty());
6032 assert!(!buffer3.read(cx).is_dirty());
6033 assert!(!buffer4.read(cx).is_dirty());
6034 assert!(!buffer5.read(cx).is_dirty());
6035 });
6036
6037 // Rename and delete files and directories.
6038 tree.flush_fs_events(&cx).await;
6039 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6040 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6041 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6042 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6043 tree.flush_fs_events(&cx).await;
6044
6045 let expected_paths = vec![
6046 "a",
6047 "a/file1",
6048 "a/file2.new",
6049 "b",
6050 "d",
6051 "d/file3",
6052 "d/file4",
6053 ];
6054
6055 cx.read(|app| {
6056 assert_eq!(
6057 tree.read(app)
6058 .paths()
6059 .map(|p| p.to_str().unwrap())
6060 .collect::<Vec<_>>(),
6061 expected_paths
6062 );
6063
6064 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6065 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6066 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6067
6068 assert_eq!(
6069 buffer2.read(app).file().unwrap().path().as_ref(),
6070 Path::new("a/file2.new")
6071 );
6072 assert_eq!(
6073 buffer3.read(app).file().unwrap().path().as_ref(),
6074 Path::new("d/file3")
6075 );
6076 assert_eq!(
6077 buffer4.read(app).file().unwrap().path().as_ref(),
6078 Path::new("d/file4")
6079 );
6080 assert_eq!(
6081 buffer5.read(app).file().unwrap().path().as_ref(),
6082 Path::new("b/c/file5")
6083 );
6084
6085 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6086 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6087 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6088 assert!(buffer5.read(app).file().unwrap().is_deleted());
6089 });
6090
6091 // Update the remote worktree. Check that it becomes consistent with the
6092 // local worktree.
6093 remote.update(cx, |remote, cx| {
6094 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6095 &initial_snapshot,
6096 1,
6097 1,
6098 true,
6099 );
6100 remote
6101 .as_remote_mut()
6102 .unwrap()
6103 .snapshot
6104 .apply_remote_update(update_message)
6105 .unwrap();
6106
6107 assert_eq!(
6108 remote
6109 .paths()
6110 .map(|p| p.to_str().unwrap())
6111 .collect::<Vec<_>>(),
6112 expected_paths
6113 );
6114 });
6115 }
6116
6117 #[gpui::test]
6118 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6119 let fs = FakeFs::new(cx.background());
6120 fs.insert_tree(
6121 "/the-dir",
6122 json!({
6123 "a.txt": "a-contents",
6124 "b.txt": "b-contents",
6125 }),
6126 )
6127 .await;
6128
6129 let project = Project::test(fs.clone(), cx);
6130 let worktree_id = project
6131 .update(cx, |p, cx| {
6132 p.find_or_create_local_worktree("/the-dir", true, cx)
6133 })
6134 .await
6135 .unwrap()
6136 .0
6137 .read_with(cx, |tree, _| tree.id());
6138
6139 // Spawn multiple tasks to open paths, repeating some paths.
6140 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6141 (
6142 p.open_buffer((worktree_id, "a.txt"), cx),
6143 p.open_buffer((worktree_id, "b.txt"), cx),
6144 p.open_buffer((worktree_id, "a.txt"), cx),
6145 )
6146 });
6147
6148 let buffer_a_1 = buffer_a_1.await.unwrap();
6149 let buffer_a_2 = buffer_a_2.await.unwrap();
6150 let buffer_b = buffer_b.await.unwrap();
6151 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6152 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6153
6154 // There is only one buffer per path.
6155 let buffer_a_id = buffer_a_1.id();
6156 assert_eq!(buffer_a_2.id(), buffer_a_id);
6157
6158 // Open the same path again while it is still open.
6159 drop(buffer_a_1);
6160 let buffer_a_3 = project
6161 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6162 .await
6163 .unwrap();
6164
6165 // There's still only one buffer per path.
6166 assert_eq!(buffer_a_3.id(), buffer_a_id);
6167 }
6168
6169 #[gpui::test]
6170 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6171 use std::fs;
6172
6173 let dir = temp_tree(json!({
6174 "file1": "abc",
6175 "file2": "def",
6176 "file3": "ghi",
6177 }));
6178
6179 let project = Project::test(Arc::new(RealFs), cx);
6180 let (worktree, _) = project
6181 .update(cx, |p, cx| {
6182 p.find_or_create_local_worktree(dir.path(), true, cx)
6183 })
6184 .await
6185 .unwrap();
6186 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6187
6188 worktree.flush_fs_events(&cx).await;
6189 worktree
6190 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6191 .await;
6192
6193 let buffer1 = project
6194 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6195 .await
6196 .unwrap();
6197 let events = Rc::new(RefCell::new(Vec::new()));
6198
6199 // initially, the buffer isn't dirty.
6200 buffer1.update(cx, |buffer, cx| {
6201 cx.subscribe(&buffer1, {
6202 let events = events.clone();
6203 move |_, _, event, _| match event {
6204 BufferEvent::Operation(_) => {}
6205 _ => events.borrow_mut().push(event.clone()),
6206 }
6207 })
6208 .detach();
6209
6210 assert!(!buffer.is_dirty());
6211 assert!(events.borrow().is_empty());
6212
6213 buffer.edit(vec![1..2], "", cx);
6214 });
6215
6216 // after the first edit, the buffer is dirty, and emits a dirtied event.
6217 buffer1.update(cx, |buffer, cx| {
6218 assert!(buffer.text() == "ac");
6219 assert!(buffer.is_dirty());
6220 assert_eq!(
6221 *events.borrow(),
6222 &[language::Event::Edited, language::Event::Dirtied]
6223 );
6224 events.borrow_mut().clear();
6225 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6226 });
6227
6228 // after saving, the buffer is not dirty, and emits a saved event.
6229 buffer1.update(cx, |buffer, cx| {
6230 assert!(!buffer.is_dirty());
6231 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6232 events.borrow_mut().clear();
6233
6234 buffer.edit(vec![1..1], "B", cx);
6235 buffer.edit(vec![2..2], "D", cx);
6236 });
6237
6238 // after editing again, the buffer is dirty, and emits another dirty event.
6239 buffer1.update(cx, |buffer, cx| {
6240 assert!(buffer.text() == "aBDc");
6241 assert!(buffer.is_dirty());
6242 assert_eq!(
6243 *events.borrow(),
6244 &[
6245 language::Event::Edited,
6246 language::Event::Dirtied,
6247 language::Event::Edited,
6248 ],
6249 );
6250 events.borrow_mut().clear();
6251
6252 // TODO - currently, after restoring the buffer to its
6253 // previously-saved state, the is still considered dirty.
6254 buffer.edit([1..3], "", cx);
6255 assert!(buffer.text() == "ac");
6256 assert!(buffer.is_dirty());
6257 });
6258
6259 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6260
6261 // When a file is deleted, the buffer is considered dirty.
6262 let events = Rc::new(RefCell::new(Vec::new()));
6263 let buffer2 = project
6264 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6265 .await
6266 .unwrap();
6267 buffer2.update(cx, |_, cx| {
6268 cx.subscribe(&buffer2, {
6269 let events = events.clone();
6270 move |_, _, event, _| events.borrow_mut().push(event.clone())
6271 })
6272 .detach();
6273 });
6274
6275 fs::remove_file(dir.path().join("file2")).unwrap();
6276 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6277 assert_eq!(
6278 *events.borrow(),
6279 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6280 );
6281
6282 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6283 let events = Rc::new(RefCell::new(Vec::new()));
6284 let buffer3 = project
6285 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6286 .await
6287 .unwrap();
6288 buffer3.update(cx, |_, cx| {
6289 cx.subscribe(&buffer3, {
6290 let events = events.clone();
6291 move |_, _, event, _| events.borrow_mut().push(event.clone())
6292 })
6293 .detach();
6294 });
6295
6296 worktree.flush_fs_events(&cx).await;
6297 buffer3.update(cx, |buffer, cx| {
6298 buffer.edit(Some(0..0), "x", cx);
6299 });
6300 events.borrow_mut().clear();
6301 fs::remove_file(dir.path().join("file3")).unwrap();
6302 buffer3
6303 .condition(&cx, |_, _| !events.borrow().is_empty())
6304 .await;
6305 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6306 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6307 }
6308
6309 #[gpui::test]
6310 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6311 use std::fs;
6312
6313 let initial_contents = "aaa\nbbbbb\nc\n";
6314 let dir = temp_tree(json!({ "the-file": initial_contents }));
6315
6316 let project = Project::test(Arc::new(RealFs), cx);
6317 let (worktree, _) = project
6318 .update(cx, |p, cx| {
6319 p.find_or_create_local_worktree(dir.path(), true, cx)
6320 })
6321 .await
6322 .unwrap();
6323 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6324
6325 worktree
6326 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6327 .await;
6328
6329 let abs_path = dir.path().join("the-file");
6330 let buffer = project
6331 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6332 .await
6333 .unwrap();
6334
6335 // TODO
6336 // Add a cursor on each row.
6337 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6338 // assert!(!buffer.is_dirty());
6339 // buffer.add_selection_set(
6340 // &(0..3)
6341 // .map(|row| Selection {
6342 // id: row as usize,
6343 // start: Point::new(row, 1),
6344 // end: Point::new(row, 1),
6345 // reversed: false,
6346 // goal: SelectionGoal::None,
6347 // })
6348 // .collect::<Vec<_>>(),
6349 // cx,
6350 // )
6351 // });
6352
6353 // Change the file on disk, adding two new lines of text, and removing
6354 // one line.
6355 buffer.read_with(cx, |buffer, _| {
6356 assert!(!buffer.is_dirty());
6357 assert!(!buffer.has_conflict());
6358 });
6359 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6360 fs::write(&abs_path, new_contents).unwrap();
6361
6362 // Because the buffer was not modified, it is reloaded from disk. Its
6363 // contents are edited according to the diff between the old and new
6364 // file contents.
6365 buffer
6366 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6367 .await;
6368
6369 buffer.update(cx, |buffer, _| {
6370 assert_eq!(buffer.text(), new_contents);
6371 assert!(!buffer.is_dirty());
6372 assert!(!buffer.has_conflict());
6373
6374 // TODO
6375 // let cursor_positions = buffer
6376 // .selection_set(selection_set_id)
6377 // .unwrap()
6378 // .selections::<Point>(&*buffer)
6379 // .map(|selection| {
6380 // assert_eq!(selection.start, selection.end);
6381 // selection.start
6382 // })
6383 // .collect::<Vec<_>>();
6384 // assert_eq!(
6385 // cursor_positions,
6386 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6387 // );
6388 });
6389
6390 // Modify the buffer
6391 buffer.update(cx, |buffer, cx| {
6392 buffer.edit(vec![0..0], " ", cx);
6393 assert!(buffer.is_dirty());
6394 assert!(!buffer.has_conflict());
6395 });
6396
6397 // Change the file on disk again, adding blank lines to the beginning.
6398 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6399
6400 // Because the buffer is modified, it doesn't reload from disk, but is
6401 // marked as having a conflict.
6402 buffer
6403 .condition(&cx, |buffer, _| buffer.has_conflict())
6404 .await;
6405 }
6406
6407 #[gpui::test]
6408 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6409 cx.foreground().forbid_parking();
6410
6411 let fs = FakeFs::new(cx.background());
6412 fs.insert_tree(
6413 "/the-dir",
6414 json!({
6415 "a.rs": "
6416 fn foo(mut v: Vec<usize>) {
6417 for x in &v {
6418 v.push(1);
6419 }
6420 }
6421 "
6422 .unindent(),
6423 }),
6424 )
6425 .await;
6426
6427 let project = Project::test(fs.clone(), cx);
6428 let (worktree, _) = project
6429 .update(cx, |p, cx| {
6430 p.find_or_create_local_worktree("/the-dir", true, cx)
6431 })
6432 .await
6433 .unwrap();
6434 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6435
6436 let buffer = project
6437 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6438 .await
6439 .unwrap();
6440
6441 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6442 let message = lsp::PublishDiagnosticsParams {
6443 uri: buffer_uri.clone(),
6444 diagnostics: vec![
6445 lsp::Diagnostic {
6446 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6447 severity: Some(DiagnosticSeverity::WARNING),
6448 message: "error 1".to_string(),
6449 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6450 location: lsp::Location {
6451 uri: buffer_uri.clone(),
6452 range: lsp::Range::new(
6453 lsp::Position::new(1, 8),
6454 lsp::Position::new(1, 9),
6455 ),
6456 },
6457 message: "error 1 hint 1".to_string(),
6458 }]),
6459 ..Default::default()
6460 },
6461 lsp::Diagnostic {
6462 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6463 severity: Some(DiagnosticSeverity::HINT),
6464 message: "error 1 hint 1".to_string(),
6465 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6466 location: lsp::Location {
6467 uri: buffer_uri.clone(),
6468 range: lsp::Range::new(
6469 lsp::Position::new(1, 8),
6470 lsp::Position::new(1, 9),
6471 ),
6472 },
6473 message: "original diagnostic".to_string(),
6474 }]),
6475 ..Default::default()
6476 },
6477 lsp::Diagnostic {
6478 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6479 severity: Some(DiagnosticSeverity::ERROR),
6480 message: "error 2".to_string(),
6481 related_information: Some(vec![
6482 lsp::DiagnosticRelatedInformation {
6483 location: lsp::Location {
6484 uri: buffer_uri.clone(),
6485 range: lsp::Range::new(
6486 lsp::Position::new(1, 13),
6487 lsp::Position::new(1, 15),
6488 ),
6489 },
6490 message: "error 2 hint 1".to_string(),
6491 },
6492 lsp::DiagnosticRelatedInformation {
6493 location: lsp::Location {
6494 uri: buffer_uri.clone(),
6495 range: lsp::Range::new(
6496 lsp::Position::new(1, 13),
6497 lsp::Position::new(1, 15),
6498 ),
6499 },
6500 message: "error 2 hint 2".to_string(),
6501 },
6502 ]),
6503 ..Default::default()
6504 },
6505 lsp::Diagnostic {
6506 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6507 severity: Some(DiagnosticSeverity::HINT),
6508 message: "error 2 hint 1".to_string(),
6509 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6510 location: lsp::Location {
6511 uri: buffer_uri.clone(),
6512 range: lsp::Range::new(
6513 lsp::Position::new(2, 8),
6514 lsp::Position::new(2, 17),
6515 ),
6516 },
6517 message: "original diagnostic".to_string(),
6518 }]),
6519 ..Default::default()
6520 },
6521 lsp::Diagnostic {
6522 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6523 severity: Some(DiagnosticSeverity::HINT),
6524 message: "error 2 hint 2".to_string(),
6525 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6526 location: lsp::Location {
6527 uri: buffer_uri.clone(),
6528 range: lsp::Range::new(
6529 lsp::Position::new(2, 8),
6530 lsp::Position::new(2, 17),
6531 ),
6532 },
6533 message: "original diagnostic".to_string(),
6534 }]),
6535 ..Default::default()
6536 },
6537 ],
6538 version: None,
6539 };
6540
6541 project
6542 .update(cx, |p, cx| {
6543 p.update_diagnostics(message, &Default::default(), cx)
6544 })
6545 .unwrap();
6546 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6547
6548 assert_eq!(
6549 buffer
6550 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6551 .collect::<Vec<_>>(),
6552 &[
6553 DiagnosticEntry {
6554 range: Point::new(1, 8)..Point::new(1, 9),
6555 diagnostic: Diagnostic {
6556 severity: DiagnosticSeverity::WARNING,
6557 message: "error 1".to_string(),
6558 group_id: 0,
6559 is_primary: true,
6560 ..Default::default()
6561 }
6562 },
6563 DiagnosticEntry {
6564 range: Point::new(1, 8)..Point::new(1, 9),
6565 diagnostic: Diagnostic {
6566 severity: DiagnosticSeverity::HINT,
6567 message: "error 1 hint 1".to_string(),
6568 group_id: 0,
6569 is_primary: false,
6570 ..Default::default()
6571 }
6572 },
6573 DiagnosticEntry {
6574 range: Point::new(1, 13)..Point::new(1, 15),
6575 diagnostic: Diagnostic {
6576 severity: DiagnosticSeverity::HINT,
6577 message: "error 2 hint 1".to_string(),
6578 group_id: 1,
6579 is_primary: false,
6580 ..Default::default()
6581 }
6582 },
6583 DiagnosticEntry {
6584 range: Point::new(1, 13)..Point::new(1, 15),
6585 diagnostic: Diagnostic {
6586 severity: DiagnosticSeverity::HINT,
6587 message: "error 2 hint 2".to_string(),
6588 group_id: 1,
6589 is_primary: false,
6590 ..Default::default()
6591 }
6592 },
6593 DiagnosticEntry {
6594 range: Point::new(2, 8)..Point::new(2, 17),
6595 diagnostic: Diagnostic {
6596 severity: DiagnosticSeverity::ERROR,
6597 message: "error 2".to_string(),
6598 group_id: 1,
6599 is_primary: true,
6600 ..Default::default()
6601 }
6602 }
6603 ]
6604 );
6605
6606 assert_eq!(
6607 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6608 &[
6609 DiagnosticEntry {
6610 range: Point::new(1, 8)..Point::new(1, 9),
6611 diagnostic: Diagnostic {
6612 severity: DiagnosticSeverity::WARNING,
6613 message: "error 1".to_string(),
6614 group_id: 0,
6615 is_primary: true,
6616 ..Default::default()
6617 }
6618 },
6619 DiagnosticEntry {
6620 range: Point::new(1, 8)..Point::new(1, 9),
6621 diagnostic: Diagnostic {
6622 severity: DiagnosticSeverity::HINT,
6623 message: "error 1 hint 1".to_string(),
6624 group_id: 0,
6625 is_primary: false,
6626 ..Default::default()
6627 }
6628 },
6629 ]
6630 );
6631 assert_eq!(
6632 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6633 &[
6634 DiagnosticEntry {
6635 range: Point::new(1, 13)..Point::new(1, 15),
6636 diagnostic: Diagnostic {
6637 severity: DiagnosticSeverity::HINT,
6638 message: "error 2 hint 1".to_string(),
6639 group_id: 1,
6640 is_primary: false,
6641 ..Default::default()
6642 }
6643 },
6644 DiagnosticEntry {
6645 range: Point::new(1, 13)..Point::new(1, 15),
6646 diagnostic: Diagnostic {
6647 severity: DiagnosticSeverity::HINT,
6648 message: "error 2 hint 2".to_string(),
6649 group_id: 1,
6650 is_primary: false,
6651 ..Default::default()
6652 }
6653 },
6654 DiagnosticEntry {
6655 range: Point::new(2, 8)..Point::new(2, 17),
6656 diagnostic: Diagnostic {
6657 severity: DiagnosticSeverity::ERROR,
6658 message: "error 2".to_string(),
6659 group_id: 1,
6660 is_primary: true,
6661 ..Default::default()
6662 }
6663 }
6664 ]
6665 );
6666 }
6667
6668 #[gpui::test]
6669 async fn test_rename(cx: &mut gpui::TestAppContext) {
6670 cx.foreground().forbid_parking();
6671
6672 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6673 let language = Arc::new(Language::new(
6674 LanguageConfig {
6675 name: "Rust".into(),
6676 path_suffixes: vec!["rs".to_string()],
6677 language_server: language_server_config,
6678 ..Default::default()
6679 },
6680 Some(tree_sitter_rust::language()),
6681 ));
6682
6683 let fs = FakeFs::new(cx.background());
6684 fs.insert_tree(
6685 "/dir",
6686 json!({
6687 "one.rs": "const ONE: usize = 1;",
6688 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6689 }),
6690 )
6691 .await;
6692
6693 let project = Project::test(fs.clone(), cx);
6694 project.update(cx, |project, _| {
6695 Arc::get_mut(&mut project.languages).unwrap().add(language);
6696 });
6697
6698 let (tree, _) = project
6699 .update(cx, |project, cx| {
6700 project.find_or_create_local_worktree("/dir", true, cx)
6701 })
6702 .await
6703 .unwrap();
6704 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6705 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6706 .await;
6707
6708 let buffer = project
6709 .update(cx, |project, cx| {
6710 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6711 })
6712 .await
6713 .unwrap();
6714
6715 let mut fake_server = fake_servers.next().await.unwrap();
6716
6717 let response = project.update(cx, |project, cx| {
6718 project.prepare_rename(buffer.clone(), 7, cx)
6719 });
6720 fake_server
6721 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6722 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6723 assert_eq!(params.position, lsp::Position::new(0, 7));
6724 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6725 lsp::Position::new(0, 6),
6726 lsp::Position::new(0, 9),
6727 )))
6728 })
6729 .next()
6730 .await
6731 .unwrap();
6732 let range = response.await.unwrap().unwrap();
6733 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6734 assert_eq!(range, 6..9);
6735
6736 let response = project.update(cx, |project, cx| {
6737 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6738 });
6739 fake_server
6740 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
6741 assert_eq!(
6742 params.text_document_position.text_document.uri.as_str(),
6743 "file:///dir/one.rs"
6744 );
6745 assert_eq!(
6746 params.text_document_position.position,
6747 lsp::Position::new(0, 7)
6748 );
6749 assert_eq!(params.new_name, "THREE");
6750 Some(lsp::WorkspaceEdit {
6751 changes: Some(
6752 [
6753 (
6754 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6755 vec![lsp::TextEdit::new(
6756 lsp::Range::new(
6757 lsp::Position::new(0, 6),
6758 lsp::Position::new(0, 9),
6759 ),
6760 "THREE".to_string(),
6761 )],
6762 ),
6763 (
6764 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6765 vec![
6766 lsp::TextEdit::new(
6767 lsp::Range::new(
6768 lsp::Position::new(0, 24),
6769 lsp::Position::new(0, 27),
6770 ),
6771 "THREE".to_string(),
6772 ),
6773 lsp::TextEdit::new(
6774 lsp::Range::new(
6775 lsp::Position::new(0, 35),
6776 lsp::Position::new(0, 38),
6777 ),
6778 "THREE".to_string(),
6779 ),
6780 ],
6781 ),
6782 ]
6783 .into_iter()
6784 .collect(),
6785 ),
6786 ..Default::default()
6787 })
6788 })
6789 .next()
6790 .await
6791 .unwrap();
6792 let mut transaction = response.await.unwrap().0;
6793 assert_eq!(transaction.len(), 2);
6794 assert_eq!(
6795 transaction
6796 .remove_entry(&buffer)
6797 .unwrap()
6798 .0
6799 .read_with(cx, |buffer, _| buffer.text()),
6800 "const THREE: usize = 1;"
6801 );
6802 assert_eq!(
6803 transaction
6804 .into_keys()
6805 .next()
6806 .unwrap()
6807 .read_with(cx, |buffer, _| buffer.text()),
6808 "const TWO: usize = one::THREE + one::THREE;"
6809 );
6810 }
6811
6812 #[gpui::test]
6813 async fn test_search(cx: &mut gpui::TestAppContext) {
6814 let fs = FakeFs::new(cx.background());
6815 fs.insert_tree(
6816 "/dir",
6817 json!({
6818 "one.rs": "const ONE: usize = 1;",
6819 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6820 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6821 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6822 }),
6823 )
6824 .await;
6825 let project = Project::test(fs.clone(), cx);
6826 let (tree, _) = project
6827 .update(cx, |project, cx| {
6828 project.find_or_create_local_worktree("/dir", true, cx)
6829 })
6830 .await
6831 .unwrap();
6832 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6833 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6834 .await;
6835
6836 assert_eq!(
6837 search(&project, SearchQuery::text("TWO", false, true), cx)
6838 .await
6839 .unwrap(),
6840 HashMap::from_iter([
6841 ("two.rs".to_string(), vec![6..9]),
6842 ("three.rs".to_string(), vec![37..40])
6843 ])
6844 );
6845
6846 let buffer_4 = project
6847 .update(cx, |project, cx| {
6848 project.open_buffer((worktree_id, "four.rs"), cx)
6849 })
6850 .await
6851 .unwrap();
6852 buffer_4.update(cx, |buffer, cx| {
6853 buffer.edit([20..28, 31..43], "two::TWO", cx);
6854 });
6855
6856 assert_eq!(
6857 search(&project, SearchQuery::text("TWO", false, true), cx)
6858 .await
6859 .unwrap(),
6860 HashMap::from_iter([
6861 ("two.rs".to_string(), vec![6..9]),
6862 ("three.rs".to_string(), vec![37..40]),
6863 ("four.rs".to_string(), vec![25..28, 36..39])
6864 ])
6865 );
6866
6867 async fn search(
6868 project: &ModelHandle<Project>,
6869 query: SearchQuery,
6870 cx: &mut gpui::TestAppContext,
6871 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6872 let results = project
6873 .update(cx, |project, cx| project.search(query, cx))
6874 .await?;
6875
6876 Ok(results
6877 .into_iter()
6878 .map(|(buffer, ranges)| {
6879 buffer.read_with(cx, |buffer, _| {
6880 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6881 let ranges = ranges
6882 .into_iter()
6883 .map(|range| range.to_offset(buffer))
6884 .collect::<Vec<_>>();
6885 (path, ranges)
6886 })
6887 })
6888 .collect())
6889 }
6890 }
6891}