1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, Patch, PointUtf16, TextBufferSnapshot, ToLspPosition,
22 ToOffset, ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
61 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
62 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
63 language_server_settings: Arc<Mutex<serde_json::Value>>,
64 next_language_server_id: usize,
65 client: Arc<client::Client>,
66 next_entry_id: Arc<AtomicUsize>,
67 user_store: ModelHandle<UserStore>,
68 fs: Arc<dyn Fs>,
69 client_state: ProjectClientState,
70 collaborators: HashMap<PeerId, Collaborator>,
71 subscriptions: Vec<client::Subscription>,
72 language_servers_with_diagnostics_running: isize,
73 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
74 shared_buffers: HashMap<PeerId, HashSet<u64>>,
75 loading_buffers: HashMap<
76 ProjectPath,
77 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
78 >,
79 loading_local_worktrees:
80 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
81 opened_buffers: HashMap<u64, OpenBuffer>,
82 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
83 nonce: u128,
84}
85
86enum OpenBuffer {
87 Strong(ModelHandle<Buffer>),
88 Weak(WeakModelHandle<Buffer>),
89 Loading(Vec<Operation>),
90}
91
92enum WorktreeHandle {
93 Strong(ModelHandle<Worktree>),
94 Weak(WeakModelHandle<Worktree>),
95}
96
97enum ProjectClientState {
98 Local {
99 is_shared: bool,
100 remote_id_tx: watch::Sender<Option<u64>>,
101 remote_id_rx: watch::Receiver<Option<u64>>,
102 _maintain_remote_id_task: Task<Option<()>>,
103 },
104 Remote {
105 sharing_has_stopped: bool,
106 remote_id: u64,
107 replica_id: ReplicaId,
108 _detect_unshare_task: Task<Option<()>>,
109 },
110}
111
112#[derive(Clone, Debug)]
113pub struct Collaborator {
114 pub user: Arc<User>,
115 pub peer_id: PeerId,
116 pub replica_id: ReplicaId,
117}
118
119#[derive(Clone, Debug, PartialEq)]
120pub enum Event {
121 ActiveEntryChanged(Option<ProjectEntryId>),
122 WorktreeRemoved(WorktreeId),
123 DiskBasedDiagnosticsStarted,
124 DiskBasedDiagnosticsUpdated,
125 DiskBasedDiagnosticsFinished,
126 DiagnosticsUpdated(ProjectPath),
127 RemoteIdChanged(Option<u64>),
128 CollaboratorLeft(PeerId),
129}
130
131enum LanguageServerEvent {
132 WorkStart {
133 token: String,
134 },
135 WorkProgress {
136 token: String,
137 progress: LanguageServerProgress,
138 },
139 WorkEnd {
140 token: String,
141 },
142 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
143}
144
145pub struct LanguageServerStatus {
146 pub name: String,
147 pub pending_work: BTreeMap<String, LanguageServerProgress>,
148 pending_diagnostic_updates: isize,
149}
150
151#[derive(Clone, Debug)]
152pub struct LanguageServerProgress {
153 pub message: Option<String>,
154 pub percentage: Option<usize>,
155 pub last_update_at: Instant,
156}
157
158#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
159pub struct ProjectPath {
160 pub worktree_id: WorktreeId,
161 pub path: Arc<Path>,
162}
163
164#[derive(Clone, Debug, Default, PartialEq)]
165pub struct DiagnosticSummary {
166 pub error_count: usize,
167 pub warning_count: usize,
168 pub info_count: usize,
169 pub hint_count: usize,
170}
171
172#[derive(Debug)]
173pub struct Location {
174 pub buffer: ModelHandle<Buffer>,
175 pub range: Range<language::Anchor>,
176}
177
178#[derive(Debug)]
179pub struct DocumentHighlight {
180 pub range: Range<language::Anchor>,
181 pub kind: DocumentHighlightKind,
182}
183
184#[derive(Clone, Debug)]
185pub struct Symbol {
186 pub source_worktree_id: WorktreeId,
187 pub worktree_id: WorktreeId,
188 pub language_name: String,
189 pub path: PathBuf,
190 pub label: CodeLabel,
191 pub name: String,
192 pub kind: lsp::SymbolKind,
193 pub range: Range<PointUtf16>,
194 pub signature: [u8; 32],
195}
196
197#[derive(Default)]
198pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
199
200impl DiagnosticSummary {
201 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
202 let mut this = Self {
203 error_count: 0,
204 warning_count: 0,
205 info_count: 0,
206 hint_count: 0,
207 };
208
209 for entry in diagnostics {
210 if entry.diagnostic.is_primary {
211 match entry.diagnostic.severity {
212 DiagnosticSeverity::ERROR => this.error_count += 1,
213 DiagnosticSeverity::WARNING => this.warning_count += 1,
214 DiagnosticSeverity::INFORMATION => this.info_count += 1,
215 DiagnosticSeverity::HINT => this.hint_count += 1,
216 _ => {}
217 }
218 }
219 }
220
221 this
222 }
223
224 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
225 proto::DiagnosticSummary {
226 path: path.to_string_lossy().to_string(),
227 error_count: self.error_count as u32,
228 warning_count: self.warning_count as u32,
229 info_count: self.info_count as u32,
230 hint_count: self.hint_count as u32,
231 }
232 }
233}
234
235#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
236pub struct ProjectEntryId(usize);
237
238impl ProjectEntryId {
239 pub fn new(counter: &AtomicUsize) -> Self {
240 Self(counter.fetch_add(1, SeqCst))
241 }
242
243 pub fn from_proto(id: u64) -> Self {
244 Self(id as usize)
245 }
246
247 pub fn to_proto(&self) -> u64 {
248 self.0 as u64
249 }
250
251 pub fn to_usize(&self) -> usize {
252 self.0
253 }
254}
255
256impl Project {
257 pub fn init(client: &Arc<Client>) {
258 client.add_model_message_handler(Self::handle_add_collaborator);
259 client.add_model_message_handler(Self::handle_buffer_reloaded);
260 client.add_model_message_handler(Self::handle_buffer_saved);
261 client.add_model_message_handler(Self::handle_start_language_server);
262 client.add_model_message_handler(Self::handle_update_language_server);
263 client.add_model_message_handler(Self::handle_remove_collaborator);
264 client.add_model_message_handler(Self::handle_register_worktree);
265 client.add_model_message_handler(Self::handle_unregister_worktree);
266 client.add_model_message_handler(Self::handle_unshare_project);
267 client.add_model_message_handler(Self::handle_update_buffer_file);
268 client.add_model_message_handler(Self::handle_update_buffer);
269 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
270 client.add_model_message_handler(Self::handle_update_worktree);
271 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
272 client.add_model_request_handler(Self::handle_apply_code_action);
273 client.add_model_request_handler(Self::handle_format_buffers);
274 client.add_model_request_handler(Self::handle_get_code_actions);
275 client.add_model_request_handler(Self::handle_get_completions);
276 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
277 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
278 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
279 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
280 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
281 client.add_model_request_handler(Self::handle_search_project);
282 client.add_model_request_handler(Self::handle_get_project_symbols);
283 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
284 client.add_model_request_handler(Self::handle_open_buffer_by_id);
285 client.add_model_request_handler(Self::handle_open_buffer_by_path);
286 client.add_model_request_handler(Self::handle_save_buffer);
287 }
288
289 pub fn local(
290 client: Arc<Client>,
291 user_store: ModelHandle<UserStore>,
292 languages: Arc<LanguageRegistry>,
293 fs: Arc<dyn Fs>,
294 cx: &mut MutableAppContext,
295 ) -> ModelHandle<Self> {
296 cx.add_model(|cx: &mut ModelContext<Self>| {
297 let (remote_id_tx, remote_id_rx) = watch::channel();
298 let _maintain_remote_id_task = cx.spawn_weak({
299 let rpc = client.clone();
300 move |this, mut cx| {
301 async move {
302 let mut status = rpc.status();
303 while let Some(status) = status.next().await {
304 if let Some(this) = this.upgrade(&cx) {
305 if status.is_connected() {
306 this.update(&mut cx, |this, cx| this.register(cx)).await?;
307 } else {
308 this.update(&mut cx, |this, cx| this.unregister(cx));
309 }
310 }
311 }
312 Ok(())
313 }
314 .log_err()
315 }
316 });
317
318 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
319 Self {
320 worktrees: Default::default(),
321 collaborators: Default::default(),
322 opened_buffers: Default::default(),
323 shared_buffers: Default::default(),
324 loading_buffers: Default::default(),
325 loading_local_worktrees: Default::default(),
326 buffer_snapshots: Default::default(),
327 client_state: ProjectClientState::Local {
328 is_shared: false,
329 remote_id_tx,
330 remote_id_rx,
331 _maintain_remote_id_task,
332 },
333 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
334 subscriptions: Vec::new(),
335 active_entry: None,
336 languages,
337 client,
338 user_store,
339 fs,
340 next_entry_id: Default::default(),
341 language_servers_with_diagnostics_running: 0,
342 language_servers: Default::default(),
343 started_language_servers: Default::default(),
344 language_server_statuses: Default::default(),
345 language_server_settings: Default::default(),
346 next_language_server_id: 0,
347 nonce: StdRng::from_entropy().gen(),
348 }
349 })
350 }
351
352 pub async fn remote(
353 remote_id: u64,
354 client: Arc<Client>,
355 user_store: ModelHandle<UserStore>,
356 languages: Arc<LanguageRegistry>,
357 fs: Arc<dyn Fs>,
358 cx: &mut AsyncAppContext,
359 ) -> Result<ModelHandle<Self>> {
360 client.authenticate_and_connect(true, &cx).await?;
361
362 let response = client
363 .request(proto::JoinProject {
364 project_id: remote_id,
365 })
366 .await?;
367
368 let replica_id = response.replica_id as ReplicaId;
369
370 let mut worktrees = Vec::new();
371 for worktree in response.worktrees {
372 let (worktree, load_task) = cx
373 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
374 worktrees.push(worktree);
375 load_task.detach();
376 }
377
378 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
379 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
380 let mut this = Self {
381 worktrees: Vec::new(),
382 loading_buffers: Default::default(),
383 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
384 shared_buffers: Default::default(),
385 loading_local_worktrees: Default::default(),
386 active_entry: None,
387 collaborators: Default::default(),
388 languages,
389 user_store: user_store.clone(),
390 fs,
391 next_entry_id: Default::default(),
392 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
393 client: client.clone(),
394 client_state: ProjectClientState::Remote {
395 sharing_has_stopped: false,
396 remote_id,
397 replica_id,
398 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
399 async move {
400 let mut status = client.status();
401 let is_connected =
402 status.next().await.map_or(false, |s| s.is_connected());
403 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
404 if !is_connected || status.next().await.is_some() {
405 if let Some(this) = this.upgrade(&cx) {
406 this.update(&mut cx, |this, cx| this.project_unshared(cx))
407 }
408 }
409 Ok(())
410 }
411 .log_err()
412 }),
413 },
414 language_servers_with_diagnostics_running: 0,
415 language_servers: Default::default(),
416 started_language_servers: Default::default(),
417 language_server_settings: Default::default(),
418 language_server_statuses: response
419 .language_servers
420 .into_iter()
421 .map(|server| {
422 (
423 server.id as usize,
424 LanguageServerStatus {
425 name: server.name,
426 pending_work: Default::default(),
427 pending_diagnostic_updates: 0,
428 },
429 )
430 })
431 .collect(),
432 next_language_server_id: 0,
433 opened_buffers: Default::default(),
434 buffer_snapshots: Default::default(),
435 nonce: StdRng::from_entropy().gen(),
436 };
437 for worktree in worktrees {
438 this.add_worktree(&worktree, cx);
439 }
440 this
441 });
442
443 let user_ids = response
444 .collaborators
445 .iter()
446 .map(|peer| peer.user_id)
447 .collect();
448 user_store
449 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
450 .await?;
451 let mut collaborators = HashMap::default();
452 for message in response.collaborators {
453 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
454 collaborators.insert(collaborator.peer_id, collaborator);
455 }
456
457 this.update(cx, |this, _| {
458 this.collaborators = collaborators;
459 });
460
461 Ok(this)
462 }
463
464 #[cfg(any(test, feature = "test-support"))]
465 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
466 let languages = Arc::new(LanguageRegistry::test());
467 let http_client = client::test::FakeHttpClient::with_404_response();
468 let client = client::Client::new(http_client.clone());
469 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
470 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
471 }
472
473 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
474 self.opened_buffers
475 .get(&remote_id)
476 .and_then(|buffer| buffer.upgrade(cx))
477 }
478
479 #[cfg(any(test, feature = "test-support"))]
480 pub fn languages(&self) -> &Arc<LanguageRegistry> {
481 &self.languages
482 }
483
484 #[cfg(any(test, feature = "test-support"))]
485 pub fn check_invariants(&self, cx: &AppContext) {
486 if self.is_local() {
487 let mut worktree_root_paths = HashMap::default();
488 for worktree in self.worktrees(cx) {
489 let worktree = worktree.read(cx);
490 let abs_path = worktree.as_local().unwrap().abs_path().clone();
491 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
492 assert_eq!(
493 prev_worktree_id,
494 None,
495 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
496 abs_path,
497 worktree.id(),
498 prev_worktree_id
499 )
500 }
501 } else {
502 let replica_id = self.replica_id();
503 for buffer in self.opened_buffers.values() {
504 if let Some(buffer) = buffer.upgrade(cx) {
505 let buffer = buffer.read(cx);
506 assert_eq!(
507 buffer.deferred_ops_len(),
508 0,
509 "replica {}, buffer {} has deferred operations",
510 replica_id,
511 buffer.remote_id()
512 );
513 }
514 }
515 }
516 }
517
518 #[cfg(any(test, feature = "test-support"))]
519 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
520 let path = path.into();
521 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
522 self.opened_buffers.iter().any(|(_, buffer)| {
523 if let Some(buffer) = buffer.upgrade(cx) {
524 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
525 if file.worktree == worktree && file.path() == &path.path {
526 return true;
527 }
528 }
529 }
530 false
531 })
532 } else {
533 false
534 }
535 }
536
537 pub fn fs(&self) -> &Arc<dyn Fs> {
538 &self.fs
539 }
540
541 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
542 self.unshare(cx);
543 for worktree in &self.worktrees {
544 if let Some(worktree) = worktree.upgrade(cx) {
545 worktree.update(cx, |worktree, _| {
546 worktree.as_local_mut().unwrap().unregister();
547 });
548 }
549 }
550
551 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
552 *remote_id_tx.borrow_mut() = None;
553 }
554
555 self.subscriptions.clear();
556 }
557
558 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
559 self.unregister(cx);
560
561 let response = self.client.request(proto::RegisterProject {});
562 cx.spawn(|this, mut cx| async move {
563 let remote_id = response.await?.project_id;
564
565 let mut registrations = Vec::new();
566 this.update(&mut cx, |this, cx| {
567 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
568 *remote_id_tx.borrow_mut() = Some(remote_id);
569 }
570
571 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
572
573 this.subscriptions
574 .push(this.client.add_model_for_remote_entity(remote_id, cx));
575
576 for worktree in &this.worktrees {
577 if let Some(worktree) = worktree.upgrade(cx) {
578 registrations.push(worktree.update(cx, |worktree, cx| {
579 let worktree = worktree.as_local_mut().unwrap();
580 worktree.register(remote_id, cx)
581 }));
582 }
583 }
584 });
585
586 futures::future::try_join_all(registrations).await?;
587 Ok(())
588 })
589 }
590
591 pub fn remote_id(&self) -> Option<u64> {
592 match &self.client_state {
593 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
594 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
595 }
596 }
597
598 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
599 let mut id = None;
600 let mut watch = None;
601 match &self.client_state {
602 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
603 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
604 }
605
606 async move {
607 if let Some(id) = id {
608 return id;
609 }
610 let mut watch = watch.unwrap();
611 loop {
612 let id = *watch.borrow();
613 if let Some(id) = id {
614 return id;
615 }
616 watch.next().await;
617 }
618 }
619 }
620
621 pub fn replica_id(&self) -> ReplicaId {
622 match &self.client_state {
623 ProjectClientState::Local { .. } => 0,
624 ProjectClientState::Remote { replica_id, .. } => *replica_id,
625 }
626 }
627
628 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
629 &self.collaborators
630 }
631
632 pub fn worktrees<'a>(
633 &'a self,
634 cx: &'a AppContext,
635 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
636 self.worktrees
637 .iter()
638 .filter_map(move |worktree| worktree.upgrade(cx))
639 }
640
641 pub fn visible_worktrees<'a>(
642 &'a self,
643 cx: &'a AppContext,
644 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
645 self.worktrees.iter().filter_map(|worktree| {
646 worktree.upgrade(cx).and_then(|worktree| {
647 if worktree.read(cx).is_visible() {
648 Some(worktree)
649 } else {
650 None
651 }
652 })
653 })
654 }
655
656 pub fn worktree_for_id(
657 &self,
658 id: WorktreeId,
659 cx: &AppContext,
660 ) -> Option<ModelHandle<Worktree>> {
661 self.worktrees(cx)
662 .find(|worktree| worktree.read(cx).id() == id)
663 }
664
665 pub fn worktree_for_entry(
666 &self,
667 entry_id: ProjectEntryId,
668 cx: &AppContext,
669 ) -> Option<ModelHandle<Worktree>> {
670 self.worktrees(cx)
671 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
672 }
673
674 pub fn worktree_id_for_entry(
675 &self,
676 entry_id: ProjectEntryId,
677 cx: &AppContext,
678 ) -> Option<WorktreeId> {
679 self.worktree_for_entry(entry_id, cx)
680 .map(|worktree| worktree.read(cx).id())
681 }
682
683 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
684 let rpc = self.client.clone();
685 cx.spawn(|this, mut cx| async move {
686 let project_id = this.update(&mut cx, |this, cx| {
687 if let ProjectClientState::Local {
688 is_shared,
689 remote_id_rx,
690 ..
691 } = &mut this.client_state
692 {
693 *is_shared = true;
694
695 for open_buffer in this.opened_buffers.values_mut() {
696 match open_buffer {
697 OpenBuffer::Strong(_) => {}
698 OpenBuffer::Weak(buffer) => {
699 if let Some(buffer) = buffer.upgrade(cx) {
700 *open_buffer = OpenBuffer::Strong(buffer);
701 }
702 }
703 OpenBuffer::Loading(_) => unreachable!(),
704 }
705 }
706
707 for worktree_handle in this.worktrees.iter_mut() {
708 match worktree_handle {
709 WorktreeHandle::Strong(_) => {}
710 WorktreeHandle::Weak(worktree) => {
711 if let Some(worktree) = worktree.upgrade(cx) {
712 *worktree_handle = WorktreeHandle::Strong(worktree);
713 }
714 }
715 }
716 }
717
718 remote_id_rx
719 .borrow()
720 .ok_or_else(|| anyhow!("no project id"))
721 } else {
722 Err(anyhow!("can't share a remote project"))
723 }
724 })?;
725
726 rpc.request(proto::ShareProject { project_id }).await?;
727
728 let mut tasks = Vec::new();
729 this.update(&mut cx, |this, cx| {
730 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
731 worktree.update(cx, |worktree, cx| {
732 let worktree = worktree.as_local_mut().unwrap();
733 tasks.push(worktree.share(project_id, cx));
734 });
735 }
736 });
737 for task in tasks {
738 task.await?;
739 }
740 this.update(&mut cx, |_, cx| cx.notify());
741 Ok(())
742 })
743 }
744
745 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
746 let rpc = self.client.clone();
747
748 if let ProjectClientState::Local {
749 is_shared,
750 remote_id_rx,
751 ..
752 } = &mut self.client_state
753 {
754 if !*is_shared {
755 return;
756 }
757
758 *is_shared = false;
759 self.collaborators.clear();
760 self.shared_buffers.clear();
761 for worktree_handle in self.worktrees.iter_mut() {
762 if let WorktreeHandle::Strong(worktree) = worktree_handle {
763 let is_visible = worktree.update(cx, |worktree, _| {
764 worktree.as_local_mut().unwrap().unshare();
765 worktree.is_visible()
766 });
767 if !is_visible {
768 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
769 }
770 }
771 }
772
773 for open_buffer in self.opened_buffers.values_mut() {
774 match open_buffer {
775 OpenBuffer::Strong(buffer) => {
776 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
777 }
778 _ => {}
779 }
780 }
781
782 if let Some(project_id) = *remote_id_rx.borrow() {
783 rpc.send(proto::UnshareProject { project_id }).log_err();
784 }
785
786 cx.notify();
787 } else {
788 log::error!("attempted to unshare a remote project");
789 }
790 }
791
792 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
793 if let ProjectClientState::Remote {
794 sharing_has_stopped,
795 ..
796 } = &mut self.client_state
797 {
798 *sharing_has_stopped = true;
799 self.collaborators.clear();
800 cx.notify();
801 }
802 }
803
804 pub fn is_read_only(&self) -> bool {
805 match &self.client_state {
806 ProjectClientState::Local { .. } => false,
807 ProjectClientState::Remote {
808 sharing_has_stopped,
809 ..
810 } => *sharing_has_stopped,
811 }
812 }
813
814 pub fn is_local(&self) -> bool {
815 match &self.client_state {
816 ProjectClientState::Local { .. } => true,
817 ProjectClientState::Remote { .. } => false,
818 }
819 }
820
821 pub fn is_remote(&self) -> bool {
822 !self.is_local()
823 }
824
825 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
826 if self.is_remote() {
827 return Err(anyhow!("creating buffers as a guest is not supported yet"));
828 }
829
830 let buffer = cx.add_model(|cx| {
831 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
832 });
833 self.register_buffer(&buffer, cx)?;
834 Ok(buffer)
835 }
836
837 pub fn open_path(
838 &mut self,
839 path: impl Into<ProjectPath>,
840 cx: &mut ModelContext<Self>,
841 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
842 let task = self.open_buffer(path, cx);
843 cx.spawn_weak(|_, cx| async move {
844 let buffer = task.await?;
845 let project_entry_id = buffer
846 .read_with(&cx, |buffer, cx| {
847 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
848 })
849 .ok_or_else(|| anyhow!("no project entry"))?;
850 Ok((project_entry_id, buffer.into()))
851 })
852 }
853
854 pub fn open_buffer(
855 &mut self,
856 path: impl Into<ProjectPath>,
857 cx: &mut ModelContext<Self>,
858 ) -> Task<Result<ModelHandle<Buffer>>> {
859 let project_path = path.into();
860 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
861 worktree
862 } else {
863 return Task::ready(Err(anyhow!("no such worktree")));
864 };
865
866 // If there is already a buffer for the given path, then return it.
867 let existing_buffer = self.get_open_buffer(&project_path, cx);
868 if let Some(existing_buffer) = existing_buffer {
869 return Task::ready(Ok(existing_buffer));
870 }
871
872 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
873 // If the given path is already being loaded, then wait for that existing
874 // task to complete and return the same buffer.
875 hash_map::Entry::Occupied(e) => e.get().clone(),
876
877 // Otherwise, record the fact that this path is now being loaded.
878 hash_map::Entry::Vacant(entry) => {
879 let (mut tx, rx) = postage::watch::channel();
880 entry.insert(rx.clone());
881
882 let load_buffer = if worktree.read(cx).is_local() {
883 self.open_local_buffer(&project_path.path, &worktree, cx)
884 } else {
885 self.open_remote_buffer(&project_path.path, &worktree, cx)
886 };
887
888 cx.spawn(move |this, mut cx| async move {
889 let load_result = load_buffer.await;
890 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
891 // Record the fact that the buffer is no longer loading.
892 this.loading_buffers.remove(&project_path);
893 let buffer = load_result.map_err(Arc::new)?;
894 Ok(buffer)
895 }));
896 })
897 .detach();
898 rx
899 }
900 };
901
902 cx.foreground().spawn(async move {
903 loop {
904 if let Some(result) = loading_watch.borrow().as_ref() {
905 match result {
906 Ok(buffer) => return Ok(buffer.clone()),
907 Err(error) => return Err(anyhow!("{}", error)),
908 }
909 }
910 loading_watch.next().await;
911 }
912 })
913 }
914
915 fn open_local_buffer(
916 &mut self,
917 path: &Arc<Path>,
918 worktree: &ModelHandle<Worktree>,
919 cx: &mut ModelContext<Self>,
920 ) -> Task<Result<ModelHandle<Buffer>>> {
921 let load_buffer = worktree.update(cx, |worktree, cx| {
922 let worktree = worktree.as_local_mut().unwrap();
923 worktree.load_buffer(path, cx)
924 });
925 cx.spawn(|this, mut cx| async move {
926 let buffer = load_buffer.await?;
927 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
928 Ok(buffer)
929 })
930 }
931
932 fn open_remote_buffer(
933 &mut self,
934 path: &Arc<Path>,
935 worktree: &ModelHandle<Worktree>,
936 cx: &mut ModelContext<Self>,
937 ) -> Task<Result<ModelHandle<Buffer>>> {
938 let rpc = self.client.clone();
939 let project_id = self.remote_id().unwrap();
940 let remote_worktree_id = worktree.read(cx).id();
941 let path = path.clone();
942 let path_string = path.to_string_lossy().to_string();
943 cx.spawn(|this, mut cx| async move {
944 let response = rpc
945 .request(proto::OpenBufferByPath {
946 project_id,
947 worktree_id: remote_worktree_id.to_proto(),
948 path: path_string,
949 })
950 .await?;
951 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
952 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
953 .await
954 })
955 }
956
957 fn open_local_buffer_via_lsp(
958 &mut self,
959 abs_path: lsp::Url,
960 lang_name: Arc<str>,
961 lang_server: Arc<LanguageServer>,
962 cx: &mut ModelContext<Self>,
963 ) -> Task<Result<ModelHandle<Buffer>>> {
964 cx.spawn(|this, mut cx| async move {
965 let abs_path = abs_path
966 .to_file_path()
967 .map_err(|_| anyhow!("can't convert URI to path"))?;
968 let (worktree, relative_path) = if let Some(result) =
969 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
970 {
971 result
972 } else {
973 let worktree = this
974 .update(&mut cx, |this, cx| {
975 this.create_local_worktree(&abs_path, false, cx)
976 })
977 .await?;
978 this.update(&mut cx, |this, cx| {
979 this.language_servers
980 .insert((worktree.read(cx).id(), lang_name), lang_server);
981 });
982 (worktree, PathBuf::new())
983 };
984
985 let project_path = ProjectPath {
986 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
987 path: relative_path.into(),
988 };
989 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
990 .await
991 })
992 }
993
994 pub fn open_buffer_by_id(
995 &mut self,
996 id: u64,
997 cx: &mut ModelContext<Self>,
998 ) -> Task<Result<ModelHandle<Buffer>>> {
999 if let Some(buffer) = self.buffer_for_id(id, cx) {
1000 Task::ready(Ok(buffer))
1001 } else if self.is_local() {
1002 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1003 } else if let Some(project_id) = self.remote_id() {
1004 let request = self
1005 .client
1006 .request(proto::OpenBufferById { project_id, id });
1007 cx.spawn(|this, mut cx| async move {
1008 let buffer = request
1009 .await?
1010 .buffer
1011 .ok_or_else(|| anyhow!("invalid buffer"))?;
1012 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1013 .await
1014 })
1015 } else {
1016 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1017 }
1018 }
1019
1020 pub fn save_buffer_as(
1021 &mut self,
1022 buffer: ModelHandle<Buffer>,
1023 abs_path: PathBuf,
1024 cx: &mut ModelContext<Project>,
1025 ) -> Task<Result<()>> {
1026 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1027 cx.spawn(|this, mut cx| async move {
1028 let (worktree, path) = worktree_task.await?;
1029 worktree
1030 .update(&mut cx, |worktree, cx| {
1031 worktree
1032 .as_local_mut()
1033 .unwrap()
1034 .save_buffer_as(buffer.clone(), path, cx)
1035 })
1036 .await?;
1037 this.update(&mut cx, |this, cx| {
1038 this.assign_language_to_buffer(&buffer, cx);
1039 this.register_buffer_with_language_server(&buffer, cx);
1040 });
1041 Ok(())
1042 })
1043 }
1044
1045 pub fn get_open_buffer(
1046 &mut self,
1047 path: &ProjectPath,
1048 cx: &mut ModelContext<Self>,
1049 ) -> Option<ModelHandle<Buffer>> {
1050 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1051 self.opened_buffers.values().find_map(|buffer| {
1052 let buffer = buffer.upgrade(cx)?;
1053 let file = File::from_dyn(buffer.read(cx).file())?;
1054 if file.worktree == worktree && file.path() == &path.path {
1055 Some(buffer)
1056 } else {
1057 None
1058 }
1059 })
1060 }
1061
1062 fn register_buffer(
1063 &mut self,
1064 buffer: &ModelHandle<Buffer>,
1065 cx: &mut ModelContext<Self>,
1066 ) -> Result<()> {
1067 let remote_id = buffer.read(cx).remote_id();
1068 let open_buffer = if self.is_remote() || self.is_shared() {
1069 OpenBuffer::Strong(buffer.clone())
1070 } else {
1071 OpenBuffer::Weak(buffer.downgrade())
1072 };
1073
1074 match self.opened_buffers.insert(remote_id, open_buffer) {
1075 None => {}
1076 Some(OpenBuffer::Loading(operations)) => {
1077 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1078 }
1079 Some(OpenBuffer::Weak(existing_handle)) => {
1080 if existing_handle.upgrade(cx).is_some() {
1081 Err(anyhow!(
1082 "already registered buffer with remote id {}",
1083 remote_id
1084 ))?
1085 }
1086 }
1087 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1088 "already registered buffer with remote id {}",
1089 remote_id
1090 ))?,
1091 }
1092 cx.subscribe(buffer, |this, buffer, event, cx| {
1093 this.on_buffer_event(buffer, event, cx);
1094 })
1095 .detach();
1096
1097 self.assign_language_to_buffer(buffer, cx);
1098 self.register_buffer_with_language_server(buffer, cx);
1099
1100 Ok(())
1101 }
1102
1103 fn register_buffer_with_language_server(
1104 &mut self,
1105 buffer_handle: &ModelHandle<Buffer>,
1106 cx: &mut ModelContext<Self>,
1107 ) {
1108 let buffer = buffer_handle.read(cx);
1109 let buffer_id = buffer.remote_id();
1110 if let Some(file) = File::from_dyn(buffer.file()) {
1111 if file.is_local() {
1112 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1113 let initial_snapshot = buffer.text_snapshot();
1114 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1115
1116 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1117 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1118 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1119 .log_err();
1120 }
1121 }
1122
1123 if let Some(server) = language_server {
1124 server
1125 .notify::<lsp::notification::DidOpenTextDocument>(
1126 lsp::DidOpenTextDocumentParams {
1127 text_document: lsp::TextDocumentItem::new(
1128 uri,
1129 Default::default(),
1130 0,
1131 initial_snapshot.text(),
1132 ),
1133 }
1134 .clone(),
1135 )
1136 .log_err();
1137 buffer_handle.update(cx, |buffer, cx| {
1138 buffer.set_completion_triggers(
1139 server
1140 .capabilities()
1141 .completion_provider
1142 .as_ref()
1143 .and_then(|provider| provider.trigger_characters.clone())
1144 .unwrap_or(Vec::new()),
1145 cx,
1146 )
1147 });
1148 self.buffer_snapshots
1149 .insert(buffer_id, vec![(0, initial_snapshot)]);
1150 }
1151
1152 cx.observe_release(buffer_handle, |this, buffer, cx| {
1153 if let Some(file) = File::from_dyn(buffer.file()) {
1154 if file.is_local() {
1155 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1156 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1157 server
1158 .notify::<lsp::notification::DidCloseTextDocument>(
1159 lsp::DidCloseTextDocumentParams {
1160 text_document: lsp::TextDocumentIdentifier::new(
1161 uri.clone(),
1162 ),
1163 },
1164 )
1165 .log_err();
1166 }
1167 }
1168 }
1169 })
1170 .detach();
1171 }
1172 }
1173 }
1174
1175 fn on_buffer_event(
1176 &mut self,
1177 buffer: ModelHandle<Buffer>,
1178 event: &BufferEvent,
1179 cx: &mut ModelContext<Self>,
1180 ) -> Option<()> {
1181 match event {
1182 BufferEvent::Operation(operation) => {
1183 let project_id = self.remote_id()?;
1184 let request = self.client.request(proto::UpdateBuffer {
1185 project_id,
1186 buffer_id: buffer.read(cx).remote_id(),
1187 operations: vec![language::proto::serialize_operation(&operation)],
1188 });
1189 cx.background().spawn(request).detach_and_log_err(cx);
1190 }
1191 BufferEvent::Edited { .. } => {
1192 let language_server = self
1193 .language_server_for_buffer(buffer.read(cx), cx)?
1194 .clone();
1195 let buffer = buffer.read(cx);
1196 let file = File::from_dyn(buffer.file())?;
1197 let abs_path = file.as_local()?.abs_path(cx);
1198 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1199 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1200 let (version, prev_snapshot) = buffer_snapshots.last()?;
1201 let next_snapshot = buffer.text_snapshot();
1202 let next_version = version + 1;
1203
1204 let content_changes = buffer
1205 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1206 .map(|edit| {
1207 let edit_start = edit.new.start.0;
1208 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1209 let new_text = next_snapshot
1210 .text_for_range(edit.new.start.1..edit.new.end.1)
1211 .collect();
1212 lsp::TextDocumentContentChangeEvent {
1213 range: Some(lsp::Range::new(
1214 edit_start.to_lsp_position(),
1215 edit_end.to_lsp_position(),
1216 )),
1217 range_length: None,
1218 text: new_text,
1219 }
1220 })
1221 .collect();
1222
1223 buffer_snapshots.push((next_version, next_snapshot));
1224
1225 language_server
1226 .notify::<lsp::notification::DidChangeTextDocument>(
1227 lsp::DidChangeTextDocumentParams {
1228 text_document: lsp::VersionedTextDocumentIdentifier::new(
1229 uri,
1230 next_version,
1231 ),
1232 content_changes,
1233 },
1234 )
1235 .log_err();
1236 }
1237 BufferEvent::Saved => {
1238 let file = File::from_dyn(buffer.read(cx).file())?;
1239 let worktree_id = file.worktree_id(cx);
1240 let abs_path = file.as_local()?.abs_path(cx);
1241 let text_document = lsp::TextDocumentIdentifier {
1242 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1243 };
1244
1245 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1246 server
1247 .notify::<lsp::notification::DidSaveTextDocument>(
1248 lsp::DidSaveTextDocumentParams {
1249 text_document: text_document.clone(),
1250 text: None,
1251 },
1252 )
1253 .log_err();
1254 }
1255 }
1256 _ => {}
1257 }
1258
1259 None
1260 }
1261
1262 fn language_servers_for_worktree(
1263 &self,
1264 worktree_id: WorktreeId,
1265 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1266 self.language_servers.iter().filter_map(
1267 move |((language_server_worktree_id, language_name), server)| {
1268 if *language_server_worktree_id == worktree_id {
1269 Some((language_name.as_ref(), server))
1270 } else {
1271 None
1272 }
1273 },
1274 )
1275 }
1276
1277 fn assign_language_to_buffer(
1278 &mut self,
1279 buffer: &ModelHandle<Buffer>,
1280 cx: &mut ModelContext<Self>,
1281 ) -> Option<()> {
1282 // If the buffer has a language, set it and start the language server if we haven't already.
1283 let full_path = buffer.read(cx).file()?.full_path(cx);
1284 let language = self.languages.select_language(&full_path)?;
1285 buffer.update(cx, |buffer, cx| {
1286 buffer.set_language(Some(language.clone()), cx);
1287 });
1288
1289 let file = File::from_dyn(buffer.read(cx).file())?;
1290 let worktree = file.worktree.read(cx).as_local()?;
1291 let worktree_id = worktree.id();
1292 let worktree_abs_path = worktree.abs_path().clone();
1293 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1294
1295 None
1296 }
1297
1298 fn start_language_server(
1299 &mut self,
1300 worktree_id: WorktreeId,
1301 worktree_path: Arc<Path>,
1302 language: Arc<Language>,
1303 cx: &mut ModelContext<Self>,
1304 ) {
1305 let key = (worktree_id, language.name());
1306 self.started_language_servers
1307 .entry(key.clone())
1308 .or_insert_with(|| {
1309 let server_id = post_inc(&mut self.next_language_server_id);
1310 let language_server = self.languages.start_language_server(
1311 language.clone(),
1312 worktree_path,
1313 self.client.http_client(),
1314 cx,
1315 );
1316 cx.spawn_weak(|this, mut cx| async move {
1317 let mut language_server = language_server?.await.log_err()?;
1318 let this = this.upgrade(&cx)?;
1319 let (language_server_events_tx, language_server_events_rx) =
1320 smol::channel::unbounded();
1321
1322 language_server
1323 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1324 let language_server_events_tx = language_server_events_tx.clone();
1325 move |params| {
1326 language_server_events_tx
1327 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1328 .ok();
1329 }
1330 })
1331 .detach();
1332
1333 language_server
1334 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1335 let settings = this
1336 .read_with(&cx, |this, _| this.language_server_settings.clone());
1337 move |params| {
1338 let settings = settings.lock();
1339 Ok(params
1340 .items
1341 .into_iter()
1342 .map(|item| {
1343 if let Some(section) = &item.section {
1344 settings
1345 .get(section)
1346 .cloned()
1347 .unwrap_or(serde_json::Value::Null)
1348 } else {
1349 settings.clone()
1350 }
1351 })
1352 .collect())
1353 }
1354 })
1355 .detach();
1356
1357 language_server
1358 .on_notification::<lsp::notification::Progress, _>(move |params| {
1359 let token = match params.token {
1360 lsp::NumberOrString::String(token) => token,
1361 lsp::NumberOrString::Number(token) => {
1362 log::info!("skipping numeric progress token {}", token);
1363 return;
1364 }
1365 };
1366
1367 match params.value {
1368 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1369 lsp::WorkDoneProgress::Begin(_) => {
1370 language_server_events_tx
1371 .try_send(LanguageServerEvent::WorkStart { token })
1372 .ok();
1373 }
1374 lsp::WorkDoneProgress::Report(report) => {
1375 language_server_events_tx
1376 .try_send(LanguageServerEvent::WorkProgress {
1377 token,
1378 progress: LanguageServerProgress {
1379 message: report.message,
1380 percentage: report
1381 .percentage
1382 .map(|p| p as usize),
1383 last_update_at: Instant::now(),
1384 },
1385 })
1386 .ok();
1387 }
1388 lsp::WorkDoneProgress::End(_) => {
1389 language_server_events_tx
1390 .try_send(LanguageServerEvent::WorkEnd { token })
1391 .ok();
1392 }
1393 },
1394 }
1395 })
1396 .detach();
1397
1398 // Process all the LSP events.
1399 cx.spawn(|mut cx| {
1400 let this = this.downgrade();
1401 async move {
1402 while let Ok(event) = language_server_events_rx.recv().await {
1403 let this = this.upgrade(&cx)?;
1404 this.update(&mut cx, |this, cx| {
1405 this.on_lsp_event(server_id, event, &language, cx)
1406 });
1407
1408 // Don't starve the main thread when lots of events arrive all at once.
1409 smol::future::yield_now().await;
1410 }
1411 Some(())
1412 }
1413 })
1414 .detach();
1415
1416 let language_server = language_server.initialize().await.log_err()?;
1417 this.update(&mut cx, |this, cx| {
1418 this.language_servers
1419 .insert(key.clone(), language_server.clone());
1420 this.language_server_statuses.insert(
1421 server_id,
1422 LanguageServerStatus {
1423 name: language_server.name().to_string(),
1424 pending_work: Default::default(),
1425 pending_diagnostic_updates: 0,
1426 },
1427 );
1428 language_server
1429 .notify::<lsp::notification::DidChangeConfiguration>(
1430 lsp::DidChangeConfigurationParams {
1431 settings: this.language_server_settings.lock().clone(),
1432 },
1433 )
1434 .ok();
1435
1436 if let Some(project_id) = this.remote_id() {
1437 this.client
1438 .send(proto::StartLanguageServer {
1439 project_id,
1440 server: Some(proto::LanguageServer {
1441 id: server_id as u64,
1442 name: language_server.name().to_string(),
1443 }),
1444 })
1445 .log_err();
1446 }
1447
1448 // Tell the language server about every open buffer in the worktree that matches the language.
1449 for buffer in this.opened_buffers.values() {
1450 if let Some(buffer_handle) = buffer.upgrade(cx) {
1451 let buffer = buffer_handle.read(cx);
1452 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1453 file
1454 } else {
1455 continue;
1456 };
1457 let language = if let Some(language) = buffer.language() {
1458 language
1459 } else {
1460 continue;
1461 };
1462 if (file.worktree.read(cx).id(), language.name()) != key {
1463 continue;
1464 }
1465
1466 let file = file.as_local()?;
1467 let versions = this
1468 .buffer_snapshots
1469 .entry(buffer.remote_id())
1470 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1471 let (version, initial_snapshot) = versions.last().unwrap();
1472 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1473 language_server
1474 .notify::<lsp::notification::DidOpenTextDocument>(
1475 lsp::DidOpenTextDocumentParams {
1476 text_document: lsp::TextDocumentItem::new(
1477 uri,
1478 Default::default(),
1479 *version,
1480 initial_snapshot.text(),
1481 ),
1482 },
1483 )
1484 .log_err()?;
1485 buffer_handle.update(cx, |buffer, cx| {
1486 buffer.set_completion_triggers(
1487 language_server
1488 .capabilities()
1489 .completion_provider
1490 .as_ref()
1491 .and_then(|provider| {
1492 provider.trigger_characters.clone()
1493 })
1494 .unwrap_or(Vec::new()),
1495 cx,
1496 )
1497 });
1498 }
1499 }
1500
1501 cx.notify();
1502 Some(())
1503 });
1504
1505 Some(language_server)
1506 })
1507 });
1508 }
1509
1510 fn on_lsp_event(
1511 &mut self,
1512 language_server_id: usize,
1513 event: LanguageServerEvent,
1514 language: &Arc<Language>,
1515 cx: &mut ModelContext<Self>,
1516 ) {
1517 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1518 let language_server_status =
1519 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1520 status
1521 } else {
1522 return;
1523 };
1524
1525 match event {
1526 LanguageServerEvent::WorkStart { token } => {
1527 if Some(&token) == disk_diagnostics_token {
1528 language_server_status.pending_diagnostic_updates += 1;
1529 if language_server_status.pending_diagnostic_updates == 1 {
1530 self.disk_based_diagnostics_started(cx);
1531 self.broadcast_language_server_update(
1532 language_server_id,
1533 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1534 proto::LspDiskBasedDiagnosticsUpdating {},
1535 ),
1536 );
1537 }
1538 } else {
1539 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1540 self.broadcast_language_server_update(
1541 language_server_id,
1542 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1543 token,
1544 }),
1545 );
1546 }
1547 }
1548 LanguageServerEvent::WorkProgress { token, progress } => {
1549 if Some(&token) != disk_diagnostics_token {
1550 self.on_lsp_work_progress(
1551 language_server_id,
1552 token.clone(),
1553 progress.clone(),
1554 cx,
1555 );
1556 self.broadcast_language_server_update(
1557 language_server_id,
1558 proto::update_language_server::Variant::WorkProgress(
1559 proto::LspWorkProgress {
1560 token,
1561 message: progress.message,
1562 percentage: progress.percentage.map(|p| p as u32),
1563 },
1564 ),
1565 );
1566 }
1567 }
1568 LanguageServerEvent::WorkEnd { token } => {
1569 if Some(&token) == disk_diagnostics_token {
1570 language_server_status.pending_diagnostic_updates -= 1;
1571 if language_server_status.pending_diagnostic_updates == 0 {
1572 self.disk_based_diagnostics_finished(cx);
1573 self.broadcast_language_server_update(
1574 language_server_id,
1575 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1576 proto::LspDiskBasedDiagnosticsUpdated {},
1577 ),
1578 );
1579 }
1580 } else {
1581 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1582 self.broadcast_language_server_update(
1583 language_server_id,
1584 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1585 token,
1586 }),
1587 );
1588 }
1589 }
1590 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1591 language.process_diagnostics(&mut params);
1592
1593 if disk_diagnostics_token.is_none() {
1594 self.disk_based_diagnostics_started(cx);
1595 self.broadcast_language_server_update(
1596 language_server_id,
1597 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1598 proto::LspDiskBasedDiagnosticsUpdating {},
1599 ),
1600 );
1601 }
1602 self.update_diagnostics(
1603 params,
1604 language
1605 .disk_based_diagnostic_sources()
1606 .unwrap_or(&Default::default()),
1607 cx,
1608 )
1609 .log_err();
1610 if disk_diagnostics_token.is_none() {
1611 self.disk_based_diagnostics_finished(cx);
1612 self.broadcast_language_server_update(
1613 language_server_id,
1614 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1615 proto::LspDiskBasedDiagnosticsUpdated {},
1616 ),
1617 );
1618 }
1619 }
1620 }
1621 }
1622
1623 fn on_lsp_work_start(
1624 &mut self,
1625 language_server_id: usize,
1626 token: String,
1627 cx: &mut ModelContext<Self>,
1628 ) {
1629 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1630 status.pending_work.insert(
1631 token,
1632 LanguageServerProgress {
1633 message: None,
1634 percentage: None,
1635 last_update_at: Instant::now(),
1636 },
1637 );
1638 cx.notify();
1639 }
1640 }
1641
1642 fn on_lsp_work_progress(
1643 &mut self,
1644 language_server_id: usize,
1645 token: String,
1646 progress: LanguageServerProgress,
1647 cx: &mut ModelContext<Self>,
1648 ) {
1649 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1650 status.pending_work.insert(token, progress);
1651 cx.notify();
1652 }
1653 }
1654
1655 fn on_lsp_work_end(
1656 &mut self,
1657 language_server_id: usize,
1658 token: String,
1659 cx: &mut ModelContext<Self>,
1660 ) {
1661 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1662 status.pending_work.remove(&token);
1663 cx.notify();
1664 }
1665 }
1666
1667 fn broadcast_language_server_update(
1668 &self,
1669 language_server_id: usize,
1670 event: proto::update_language_server::Variant,
1671 ) {
1672 if let Some(project_id) = self.remote_id() {
1673 self.client
1674 .send(proto::UpdateLanguageServer {
1675 project_id,
1676 language_server_id: language_server_id as u64,
1677 variant: Some(event),
1678 })
1679 .log_err();
1680 }
1681 }
1682
1683 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1684 for server in self.language_servers.values() {
1685 server
1686 .notify::<lsp::notification::DidChangeConfiguration>(
1687 lsp::DidChangeConfigurationParams {
1688 settings: settings.clone(),
1689 },
1690 )
1691 .ok();
1692 }
1693 *self.language_server_settings.lock() = settings;
1694 }
1695
1696 pub fn language_server_statuses(
1697 &self,
1698 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1699 self.language_server_statuses.values()
1700 }
1701
1702 pub fn update_diagnostics(
1703 &mut self,
1704 params: lsp::PublishDiagnosticsParams,
1705 disk_based_sources: &HashSet<String>,
1706 cx: &mut ModelContext<Self>,
1707 ) -> Result<()> {
1708 let abs_path = params
1709 .uri
1710 .to_file_path()
1711 .map_err(|_| anyhow!("URI is not a file"))?;
1712 let mut next_group_id = 0;
1713 let mut diagnostics = Vec::default();
1714 let mut primary_diagnostic_group_ids = HashMap::default();
1715 let mut sources_by_group_id = HashMap::default();
1716 let mut supporting_diagnostics = HashMap::default();
1717 for diagnostic in ¶ms.diagnostics {
1718 let source = diagnostic.source.as_ref();
1719 let code = diagnostic.code.as_ref().map(|code| match code {
1720 lsp::NumberOrString::Number(code) => code.to_string(),
1721 lsp::NumberOrString::String(code) => code.clone(),
1722 });
1723 let range = range_from_lsp(diagnostic.range);
1724 let is_supporting = diagnostic
1725 .related_information
1726 .as_ref()
1727 .map_or(false, |infos| {
1728 infos.iter().any(|info| {
1729 primary_diagnostic_group_ids.contains_key(&(
1730 source,
1731 code.clone(),
1732 range_from_lsp(info.location.range),
1733 ))
1734 })
1735 });
1736
1737 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1738 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1739 });
1740
1741 if is_supporting {
1742 supporting_diagnostics.insert(
1743 (source, code.clone(), range),
1744 (diagnostic.severity, is_unnecessary),
1745 );
1746 } else {
1747 let group_id = post_inc(&mut next_group_id);
1748 let is_disk_based =
1749 source.map_or(false, |source| disk_based_sources.contains(source));
1750
1751 sources_by_group_id.insert(group_id, source);
1752 primary_diagnostic_group_ids
1753 .insert((source, code.clone(), range.clone()), group_id);
1754
1755 diagnostics.push(DiagnosticEntry {
1756 range,
1757 diagnostic: Diagnostic {
1758 code: code.clone(),
1759 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1760 message: diagnostic.message.clone(),
1761 group_id,
1762 is_primary: true,
1763 is_valid: true,
1764 is_disk_based,
1765 is_unnecessary,
1766 },
1767 });
1768 if let Some(infos) = &diagnostic.related_information {
1769 for info in infos {
1770 if info.location.uri == params.uri && !info.message.is_empty() {
1771 let range = range_from_lsp(info.location.range);
1772 diagnostics.push(DiagnosticEntry {
1773 range,
1774 diagnostic: Diagnostic {
1775 code: code.clone(),
1776 severity: DiagnosticSeverity::INFORMATION,
1777 message: info.message.clone(),
1778 group_id,
1779 is_primary: false,
1780 is_valid: true,
1781 is_disk_based,
1782 is_unnecessary: false,
1783 },
1784 });
1785 }
1786 }
1787 }
1788 }
1789 }
1790
1791 for entry in &mut diagnostics {
1792 let diagnostic = &mut entry.diagnostic;
1793 if !diagnostic.is_primary {
1794 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1795 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1796 source,
1797 diagnostic.code.clone(),
1798 entry.range.clone(),
1799 )) {
1800 if let Some(severity) = severity {
1801 diagnostic.severity = severity;
1802 }
1803 diagnostic.is_unnecessary = is_unnecessary;
1804 }
1805 }
1806 }
1807
1808 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1809 Ok(())
1810 }
1811
1812 pub fn update_diagnostic_entries(
1813 &mut self,
1814 abs_path: PathBuf,
1815 version: Option<i32>,
1816 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1817 cx: &mut ModelContext<Project>,
1818 ) -> Result<(), anyhow::Error> {
1819 let (worktree, relative_path) = self
1820 .find_local_worktree(&abs_path, cx)
1821 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1822 if !worktree.read(cx).is_visible() {
1823 return Ok(());
1824 }
1825
1826 let project_path = ProjectPath {
1827 worktree_id: worktree.read(cx).id(),
1828 path: relative_path.into(),
1829 };
1830
1831 for buffer in self.opened_buffers.values() {
1832 if let Some(buffer) = buffer.upgrade(cx) {
1833 if buffer
1834 .read(cx)
1835 .file()
1836 .map_or(false, |file| *file.path() == project_path.path)
1837 {
1838 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1839 break;
1840 }
1841 }
1842 }
1843 worktree.update(cx, |worktree, cx| {
1844 worktree
1845 .as_local_mut()
1846 .ok_or_else(|| anyhow!("not a local worktree"))?
1847 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1848 })?;
1849 cx.emit(Event::DiagnosticsUpdated(project_path));
1850 Ok(())
1851 }
1852
1853 fn update_buffer_diagnostics(
1854 &mut self,
1855 buffer: &ModelHandle<Buffer>,
1856 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1857 version: Option<i32>,
1858 cx: &mut ModelContext<Self>,
1859 ) -> Result<()> {
1860 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1861 Ordering::Equal
1862 .then_with(|| b.is_primary.cmp(&a.is_primary))
1863 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1864 .then_with(|| a.severity.cmp(&b.severity))
1865 .then_with(|| a.message.cmp(&b.message))
1866 }
1867
1868 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1869
1870 diagnostics.sort_unstable_by(|a, b| {
1871 Ordering::Equal
1872 .then_with(|| a.range.start.cmp(&b.range.start))
1873 .then_with(|| b.range.end.cmp(&a.range.end))
1874 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1875 });
1876
1877 let mut sanitized_diagnostics = Vec::new();
1878 let edits_since_save = Patch::new(
1879 snapshot
1880 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1881 .collect(),
1882 );
1883 for entry in diagnostics {
1884 let start;
1885 let end;
1886 if entry.diagnostic.is_disk_based {
1887 // Some diagnostics are based on files on disk instead of buffers'
1888 // current contents. Adjust these diagnostics' ranges to reflect
1889 // any unsaved edits.
1890 start = edits_since_save.old_to_new(entry.range.start);
1891 end = edits_since_save.old_to_new(entry.range.end);
1892 } else {
1893 start = entry.range.start;
1894 end = entry.range.end;
1895 }
1896
1897 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1898 ..snapshot.clip_point_utf16(end, Bias::Right);
1899
1900 // Expand empty ranges by one character
1901 if range.start == range.end {
1902 range.end.column += 1;
1903 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1904 if range.start == range.end && range.end.column > 0 {
1905 range.start.column -= 1;
1906 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1907 }
1908 }
1909
1910 sanitized_diagnostics.push(DiagnosticEntry {
1911 range,
1912 diagnostic: entry.diagnostic,
1913 });
1914 }
1915 drop(edits_since_save);
1916
1917 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1918 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1919 Ok(())
1920 }
1921
1922 pub fn format(
1923 &self,
1924 buffers: HashSet<ModelHandle<Buffer>>,
1925 push_to_history: bool,
1926 cx: &mut ModelContext<Project>,
1927 ) -> Task<Result<ProjectTransaction>> {
1928 let mut local_buffers = Vec::new();
1929 let mut remote_buffers = None;
1930 for buffer_handle in buffers {
1931 let buffer = buffer_handle.read(cx);
1932 if let Some(file) = File::from_dyn(buffer.file()) {
1933 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1934 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1935 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1936 }
1937 } else {
1938 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1939 }
1940 } else {
1941 return Task::ready(Ok(Default::default()));
1942 }
1943 }
1944
1945 let remote_buffers = self.remote_id().zip(remote_buffers);
1946 let client = self.client.clone();
1947
1948 cx.spawn(|this, mut cx| async move {
1949 let mut project_transaction = ProjectTransaction::default();
1950
1951 if let Some((project_id, remote_buffers)) = remote_buffers {
1952 let response = client
1953 .request(proto::FormatBuffers {
1954 project_id,
1955 buffer_ids: remote_buffers
1956 .iter()
1957 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1958 .collect(),
1959 })
1960 .await?
1961 .transaction
1962 .ok_or_else(|| anyhow!("missing transaction"))?;
1963 project_transaction = this
1964 .update(&mut cx, |this, cx| {
1965 this.deserialize_project_transaction(response, push_to_history, cx)
1966 })
1967 .await?;
1968 }
1969
1970 for (buffer, buffer_abs_path, language_server) in local_buffers {
1971 let text_document = lsp::TextDocumentIdentifier::new(
1972 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1973 );
1974 let capabilities = &language_server.capabilities();
1975 let lsp_edits = if capabilities
1976 .document_formatting_provider
1977 .as_ref()
1978 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1979 {
1980 language_server
1981 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1982 text_document,
1983 options: Default::default(),
1984 work_done_progress_params: Default::default(),
1985 })
1986 .await?
1987 } else if capabilities
1988 .document_range_formatting_provider
1989 .as_ref()
1990 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1991 {
1992 let buffer_start = lsp::Position::new(0, 0);
1993 let buffer_end = buffer
1994 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1995 .to_lsp_position();
1996 language_server
1997 .request::<lsp::request::RangeFormatting>(
1998 lsp::DocumentRangeFormattingParams {
1999 text_document,
2000 range: lsp::Range::new(buffer_start, buffer_end),
2001 options: Default::default(),
2002 work_done_progress_params: Default::default(),
2003 },
2004 )
2005 .await?
2006 } else {
2007 continue;
2008 };
2009
2010 if let Some(lsp_edits) = lsp_edits {
2011 let edits = this
2012 .update(&mut cx, |this, cx| {
2013 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2014 })
2015 .await?;
2016 buffer.update(&mut cx, |buffer, cx| {
2017 buffer.finalize_last_transaction();
2018 buffer.start_transaction();
2019 for (range, text) in edits {
2020 buffer.edit([range], text, cx);
2021 }
2022 if buffer.end_transaction(cx).is_some() {
2023 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2024 if !push_to_history {
2025 buffer.forget_transaction(transaction.id);
2026 }
2027 project_transaction.0.insert(cx.handle(), transaction);
2028 }
2029 });
2030 }
2031 }
2032
2033 Ok(project_transaction)
2034 })
2035 }
2036
2037 pub fn definition<T: ToPointUtf16>(
2038 &self,
2039 buffer: &ModelHandle<Buffer>,
2040 position: T,
2041 cx: &mut ModelContext<Self>,
2042 ) -> Task<Result<Vec<Location>>> {
2043 let position = position.to_point_utf16(buffer.read(cx));
2044 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2045 }
2046
2047 pub fn references<T: ToPointUtf16>(
2048 &self,
2049 buffer: &ModelHandle<Buffer>,
2050 position: T,
2051 cx: &mut ModelContext<Self>,
2052 ) -> Task<Result<Vec<Location>>> {
2053 let position = position.to_point_utf16(buffer.read(cx));
2054 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2055 }
2056
2057 pub fn document_highlights<T: ToPointUtf16>(
2058 &self,
2059 buffer: &ModelHandle<Buffer>,
2060 position: T,
2061 cx: &mut ModelContext<Self>,
2062 ) -> Task<Result<Vec<DocumentHighlight>>> {
2063 let position = position.to_point_utf16(buffer.read(cx));
2064
2065 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2066 }
2067
2068 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2069 if self.is_local() {
2070 let mut language_servers = HashMap::default();
2071 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2072 if let Some((worktree, language)) = self
2073 .worktree_for_id(*worktree_id, cx)
2074 .and_then(|worktree| worktree.read(cx).as_local())
2075 .zip(self.languages.get_language(language_name))
2076 {
2077 language_servers
2078 .entry(Arc::as_ptr(language_server))
2079 .or_insert((
2080 language_server.clone(),
2081 *worktree_id,
2082 worktree.abs_path().clone(),
2083 language.clone(),
2084 ));
2085 }
2086 }
2087
2088 let mut requests = Vec::new();
2089 for (language_server, _, _, _) in language_servers.values() {
2090 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2091 lsp::WorkspaceSymbolParams {
2092 query: query.to_string(),
2093 ..Default::default()
2094 },
2095 ));
2096 }
2097
2098 cx.spawn_weak(|this, cx| async move {
2099 let responses = futures::future::try_join_all(requests).await?;
2100
2101 let mut symbols = Vec::new();
2102 if let Some(this) = this.upgrade(&cx) {
2103 this.read_with(&cx, |this, cx| {
2104 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2105 language_servers.into_values().zip(responses)
2106 {
2107 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2108 |lsp_symbol| {
2109 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2110 let mut worktree_id = source_worktree_id;
2111 let path;
2112 if let Some((worktree, rel_path)) =
2113 this.find_local_worktree(&abs_path, cx)
2114 {
2115 worktree_id = worktree.read(cx).id();
2116 path = rel_path;
2117 } else {
2118 path = relativize_path(&worktree_abs_path, &abs_path);
2119 }
2120
2121 let label = language
2122 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2123 .unwrap_or_else(|| {
2124 CodeLabel::plain(lsp_symbol.name.clone(), None)
2125 });
2126 let signature = this.symbol_signature(worktree_id, &path);
2127
2128 Some(Symbol {
2129 source_worktree_id,
2130 worktree_id,
2131 language_name: language.name().to_string(),
2132 name: lsp_symbol.name,
2133 kind: lsp_symbol.kind,
2134 label,
2135 path,
2136 range: range_from_lsp(lsp_symbol.location.range),
2137 signature,
2138 })
2139 },
2140 ));
2141 }
2142 })
2143 }
2144
2145 Ok(symbols)
2146 })
2147 } else if let Some(project_id) = self.remote_id() {
2148 let request = self.client.request(proto::GetProjectSymbols {
2149 project_id,
2150 query: query.to_string(),
2151 });
2152 cx.spawn_weak(|this, cx| async move {
2153 let response = request.await?;
2154 let mut symbols = Vec::new();
2155 if let Some(this) = this.upgrade(&cx) {
2156 this.read_with(&cx, |this, _| {
2157 symbols.extend(
2158 response
2159 .symbols
2160 .into_iter()
2161 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2162 );
2163 })
2164 }
2165 Ok(symbols)
2166 })
2167 } else {
2168 Task::ready(Ok(Default::default()))
2169 }
2170 }
2171
2172 pub fn open_buffer_for_symbol(
2173 &mut self,
2174 symbol: &Symbol,
2175 cx: &mut ModelContext<Self>,
2176 ) -> Task<Result<ModelHandle<Buffer>>> {
2177 if self.is_local() {
2178 let language_server = if let Some(server) = self.language_servers.get(&(
2179 symbol.source_worktree_id,
2180 Arc::from(symbol.language_name.as_str()),
2181 )) {
2182 server.clone()
2183 } else {
2184 return Task::ready(Err(anyhow!(
2185 "language server for worktree and language not found"
2186 )));
2187 };
2188
2189 let worktree_abs_path = if let Some(worktree_abs_path) = self
2190 .worktree_for_id(symbol.worktree_id, cx)
2191 .and_then(|worktree| worktree.read(cx).as_local())
2192 .map(|local_worktree| local_worktree.abs_path())
2193 {
2194 worktree_abs_path
2195 } else {
2196 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2197 };
2198 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2199 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2200 uri
2201 } else {
2202 return Task::ready(Err(anyhow!("invalid symbol path")));
2203 };
2204
2205 self.open_local_buffer_via_lsp(
2206 symbol_uri,
2207 Arc::from(symbol.language_name.as_str()),
2208 language_server,
2209 cx,
2210 )
2211 } else if let Some(project_id) = self.remote_id() {
2212 let request = self.client.request(proto::OpenBufferForSymbol {
2213 project_id,
2214 symbol: Some(serialize_symbol(symbol)),
2215 });
2216 cx.spawn(|this, mut cx| async move {
2217 let response = request.await?;
2218 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2219 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2220 .await
2221 })
2222 } else {
2223 Task::ready(Err(anyhow!("project does not have a remote id")))
2224 }
2225 }
2226
2227 pub fn completions<T: ToPointUtf16>(
2228 &self,
2229 source_buffer_handle: &ModelHandle<Buffer>,
2230 position: T,
2231 cx: &mut ModelContext<Self>,
2232 ) -> Task<Result<Vec<Completion>>> {
2233 let source_buffer_handle = source_buffer_handle.clone();
2234 let source_buffer = source_buffer_handle.read(cx);
2235 let buffer_id = source_buffer.remote_id();
2236 let language = source_buffer.language().cloned();
2237 let worktree;
2238 let buffer_abs_path;
2239 if let Some(file) = File::from_dyn(source_buffer.file()) {
2240 worktree = file.worktree.clone();
2241 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2242 } else {
2243 return Task::ready(Ok(Default::default()));
2244 };
2245
2246 let position = position.to_point_utf16(source_buffer);
2247 let anchor = source_buffer.anchor_after(position);
2248
2249 if worktree.read(cx).as_local().is_some() {
2250 let buffer_abs_path = buffer_abs_path.unwrap();
2251 let lang_server =
2252 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2253 server.clone()
2254 } else {
2255 return Task::ready(Ok(Default::default()));
2256 };
2257
2258 cx.spawn(|_, cx| async move {
2259 let completions = lang_server
2260 .request::<lsp::request::Completion>(lsp::CompletionParams {
2261 text_document_position: lsp::TextDocumentPositionParams::new(
2262 lsp::TextDocumentIdentifier::new(
2263 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2264 ),
2265 position.to_lsp_position(),
2266 ),
2267 context: Default::default(),
2268 work_done_progress_params: Default::default(),
2269 partial_result_params: Default::default(),
2270 })
2271 .await
2272 .context("lsp completion request failed")?;
2273
2274 let completions = if let Some(completions) = completions {
2275 match completions {
2276 lsp::CompletionResponse::Array(completions) => completions,
2277 lsp::CompletionResponse::List(list) => list.items,
2278 }
2279 } else {
2280 Default::default()
2281 };
2282
2283 source_buffer_handle.read_with(&cx, |this, _| {
2284 Ok(completions
2285 .into_iter()
2286 .filter_map(|lsp_completion| {
2287 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2288 lsp::CompletionTextEdit::Edit(edit) => {
2289 (range_from_lsp(edit.range), edit.new_text.clone())
2290 }
2291 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2292 log::info!("unsupported insert/replace completion");
2293 return None;
2294 }
2295 };
2296
2297 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2298 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2299 if clipped_start == old_range.start && clipped_end == old_range.end {
2300 Some(Completion {
2301 old_range: this.anchor_before(old_range.start)
2302 ..this.anchor_after(old_range.end),
2303 new_text,
2304 label: language
2305 .as_ref()
2306 .and_then(|l| l.label_for_completion(&lsp_completion))
2307 .unwrap_or_else(|| {
2308 CodeLabel::plain(
2309 lsp_completion.label.clone(),
2310 lsp_completion.filter_text.as_deref(),
2311 )
2312 }),
2313 lsp_completion,
2314 })
2315 } else {
2316 None
2317 }
2318 })
2319 .collect())
2320 })
2321 })
2322 } else if let Some(project_id) = self.remote_id() {
2323 let rpc = self.client.clone();
2324 let message = proto::GetCompletions {
2325 project_id,
2326 buffer_id,
2327 position: Some(language::proto::serialize_anchor(&anchor)),
2328 version: serialize_version(&source_buffer.version()),
2329 };
2330 cx.spawn_weak(|_, mut cx| async move {
2331 let response = rpc.request(message).await?;
2332
2333 source_buffer_handle
2334 .update(&mut cx, |buffer, _| {
2335 buffer.wait_for_version(deserialize_version(response.version))
2336 })
2337 .await;
2338
2339 response
2340 .completions
2341 .into_iter()
2342 .map(|completion| {
2343 language::proto::deserialize_completion(completion, language.as_ref())
2344 })
2345 .collect()
2346 })
2347 } else {
2348 Task::ready(Ok(Default::default()))
2349 }
2350 }
2351
2352 pub fn apply_additional_edits_for_completion(
2353 &self,
2354 buffer_handle: ModelHandle<Buffer>,
2355 completion: Completion,
2356 push_to_history: bool,
2357 cx: &mut ModelContext<Self>,
2358 ) -> Task<Result<Option<Transaction>>> {
2359 let buffer = buffer_handle.read(cx);
2360 let buffer_id = buffer.remote_id();
2361
2362 if self.is_local() {
2363 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2364 server.clone()
2365 } else {
2366 return Task::ready(Ok(Default::default()));
2367 };
2368
2369 cx.spawn(|this, mut cx| async move {
2370 let resolved_completion = lang_server
2371 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2372 .await?;
2373 if let Some(edits) = resolved_completion.additional_text_edits {
2374 let edits = this
2375 .update(&mut cx, |this, cx| {
2376 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2377 })
2378 .await?;
2379 buffer_handle.update(&mut cx, |buffer, cx| {
2380 buffer.finalize_last_transaction();
2381 buffer.start_transaction();
2382 for (range, text) in edits {
2383 buffer.edit([range], text, cx);
2384 }
2385 let transaction = if buffer.end_transaction(cx).is_some() {
2386 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2387 if !push_to_history {
2388 buffer.forget_transaction(transaction.id);
2389 }
2390 Some(transaction)
2391 } else {
2392 None
2393 };
2394 Ok(transaction)
2395 })
2396 } else {
2397 Ok(None)
2398 }
2399 })
2400 } else if let Some(project_id) = self.remote_id() {
2401 let client = self.client.clone();
2402 cx.spawn(|_, mut cx| async move {
2403 let response = client
2404 .request(proto::ApplyCompletionAdditionalEdits {
2405 project_id,
2406 buffer_id,
2407 completion: Some(language::proto::serialize_completion(&completion)),
2408 })
2409 .await?;
2410
2411 if let Some(transaction) = response.transaction {
2412 let transaction = language::proto::deserialize_transaction(transaction)?;
2413 buffer_handle
2414 .update(&mut cx, |buffer, _| {
2415 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2416 })
2417 .await;
2418 if push_to_history {
2419 buffer_handle.update(&mut cx, |buffer, _| {
2420 buffer.push_transaction(transaction.clone(), Instant::now());
2421 });
2422 }
2423 Ok(Some(transaction))
2424 } else {
2425 Ok(None)
2426 }
2427 })
2428 } else {
2429 Task::ready(Err(anyhow!("project does not have a remote id")))
2430 }
2431 }
2432
2433 pub fn code_actions<T: ToOffset>(
2434 &self,
2435 buffer_handle: &ModelHandle<Buffer>,
2436 range: Range<T>,
2437 cx: &mut ModelContext<Self>,
2438 ) -> Task<Result<Vec<CodeAction>>> {
2439 let buffer_handle = buffer_handle.clone();
2440 let buffer = buffer_handle.read(cx);
2441 let buffer_id = buffer.remote_id();
2442 let worktree;
2443 let buffer_abs_path;
2444 if let Some(file) = File::from_dyn(buffer.file()) {
2445 worktree = file.worktree.clone();
2446 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2447 } else {
2448 return Task::ready(Ok(Default::default()));
2449 };
2450 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2451
2452 if worktree.read(cx).as_local().is_some() {
2453 let buffer_abs_path = buffer_abs_path.unwrap();
2454 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2455 server.clone()
2456 } else {
2457 return Task::ready(Ok(Default::default()));
2458 };
2459
2460 let lsp_range = lsp::Range::new(
2461 range.start.to_point_utf16(buffer).to_lsp_position(),
2462 range.end.to_point_utf16(buffer).to_lsp_position(),
2463 );
2464 cx.foreground().spawn(async move {
2465 if !lang_server.capabilities().code_action_provider.is_some() {
2466 return Ok(Default::default());
2467 }
2468
2469 Ok(lang_server
2470 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2471 text_document: lsp::TextDocumentIdentifier::new(
2472 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2473 ),
2474 range: lsp_range,
2475 work_done_progress_params: Default::default(),
2476 partial_result_params: Default::default(),
2477 context: lsp::CodeActionContext {
2478 diagnostics: Default::default(),
2479 only: Some(vec![
2480 lsp::CodeActionKind::QUICKFIX,
2481 lsp::CodeActionKind::REFACTOR,
2482 lsp::CodeActionKind::REFACTOR_EXTRACT,
2483 ]),
2484 },
2485 })
2486 .await?
2487 .unwrap_or_default()
2488 .into_iter()
2489 .filter_map(|entry| {
2490 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2491 Some(CodeAction {
2492 range: range.clone(),
2493 lsp_action,
2494 })
2495 } else {
2496 None
2497 }
2498 })
2499 .collect())
2500 })
2501 } else if let Some(project_id) = self.remote_id() {
2502 let rpc = self.client.clone();
2503 let version = buffer.version();
2504 cx.spawn_weak(|_, mut cx| async move {
2505 let response = rpc
2506 .request(proto::GetCodeActions {
2507 project_id,
2508 buffer_id,
2509 start: Some(language::proto::serialize_anchor(&range.start)),
2510 end: Some(language::proto::serialize_anchor(&range.end)),
2511 version: serialize_version(&version),
2512 })
2513 .await?;
2514
2515 buffer_handle
2516 .update(&mut cx, |buffer, _| {
2517 buffer.wait_for_version(deserialize_version(response.version))
2518 })
2519 .await;
2520
2521 response
2522 .actions
2523 .into_iter()
2524 .map(language::proto::deserialize_code_action)
2525 .collect()
2526 })
2527 } else {
2528 Task::ready(Ok(Default::default()))
2529 }
2530 }
2531
2532 pub fn apply_code_action(
2533 &self,
2534 buffer_handle: ModelHandle<Buffer>,
2535 mut action: CodeAction,
2536 push_to_history: bool,
2537 cx: &mut ModelContext<Self>,
2538 ) -> Task<Result<ProjectTransaction>> {
2539 if self.is_local() {
2540 let buffer = buffer_handle.read(cx);
2541 let lang_name = if let Some(lang) = buffer.language() {
2542 lang.name()
2543 } else {
2544 return Task::ready(Ok(Default::default()));
2545 };
2546 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2547 server.clone()
2548 } else {
2549 return Task::ready(Ok(Default::default()));
2550 };
2551 let range = action.range.to_point_utf16(buffer);
2552
2553 cx.spawn(|this, mut cx| async move {
2554 if let Some(lsp_range) = action
2555 .lsp_action
2556 .data
2557 .as_mut()
2558 .and_then(|d| d.get_mut("codeActionParams"))
2559 .and_then(|d| d.get_mut("range"))
2560 {
2561 *lsp_range = serde_json::to_value(&lsp::Range::new(
2562 range.start.to_lsp_position(),
2563 range.end.to_lsp_position(),
2564 ))
2565 .unwrap();
2566 action.lsp_action = lang_server
2567 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2568 .await?;
2569 } else {
2570 let actions = this
2571 .update(&mut cx, |this, cx| {
2572 this.code_actions(&buffer_handle, action.range, cx)
2573 })
2574 .await?;
2575 action.lsp_action = actions
2576 .into_iter()
2577 .find(|a| a.lsp_action.title == action.lsp_action.title)
2578 .ok_or_else(|| anyhow!("code action is outdated"))?
2579 .lsp_action;
2580 }
2581
2582 if let Some(edit) = action.lsp_action.edit {
2583 Self::deserialize_workspace_edit(
2584 this,
2585 edit,
2586 push_to_history,
2587 lang_name,
2588 lang_server,
2589 &mut cx,
2590 )
2591 .await
2592 } else {
2593 Ok(ProjectTransaction::default())
2594 }
2595 })
2596 } else if let Some(project_id) = self.remote_id() {
2597 let client = self.client.clone();
2598 let request = proto::ApplyCodeAction {
2599 project_id,
2600 buffer_id: buffer_handle.read(cx).remote_id(),
2601 action: Some(language::proto::serialize_code_action(&action)),
2602 };
2603 cx.spawn(|this, mut cx| async move {
2604 let response = client
2605 .request(request)
2606 .await?
2607 .transaction
2608 .ok_or_else(|| anyhow!("missing transaction"))?;
2609 this.update(&mut cx, |this, cx| {
2610 this.deserialize_project_transaction(response, push_to_history, cx)
2611 })
2612 .await
2613 })
2614 } else {
2615 Task::ready(Err(anyhow!("project does not have a remote id")))
2616 }
2617 }
2618
2619 async fn deserialize_workspace_edit(
2620 this: ModelHandle<Self>,
2621 edit: lsp::WorkspaceEdit,
2622 push_to_history: bool,
2623 language_name: Arc<str>,
2624 language_server: Arc<LanguageServer>,
2625 cx: &mut AsyncAppContext,
2626 ) -> Result<ProjectTransaction> {
2627 let fs = this.read_with(cx, |this, _| this.fs.clone());
2628 let mut operations = Vec::new();
2629 if let Some(document_changes) = edit.document_changes {
2630 match document_changes {
2631 lsp::DocumentChanges::Edits(edits) => {
2632 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2633 }
2634 lsp::DocumentChanges::Operations(ops) => operations = ops,
2635 }
2636 } else if let Some(changes) = edit.changes {
2637 operations.extend(changes.into_iter().map(|(uri, edits)| {
2638 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2639 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2640 uri,
2641 version: None,
2642 },
2643 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2644 })
2645 }));
2646 }
2647
2648 let mut project_transaction = ProjectTransaction::default();
2649 for operation in operations {
2650 match operation {
2651 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2652 let abs_path = op
2653 .uri
2654 .to_file_path()
2655 .map_err(|_| anyhow!("can't convert URI to path"))?;
2656
2657 if let Some(parent_path) = abs_path.parent() {
2658 fs.create_dir(parent_path).await?;
2659 }
2660 if abs_path.ends_with("/") {
2661 fs.create_dir(&abs_path).await?;
2662 } else {
2663 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2664 .await?;
2665 }
2666 }
2667 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2668 let source_abs_path = op
2669 .old_uri
2670 .to_file_path()
2671 .map_err(|_| anyhow!("can't convert URI to path"))?;
2672 let target_abs_path = op
2673 .new_uri
2674 .to_file_path()
2675 .map_err(|_| anyhow!("can't convert URI to path"))?;
2676 fs.rename(
2677 &source_abs_path,
2678 &target_abs_path,
2679 op.options.map(Into::into).unwrap_or_default(),
2680 )
2681 .await?;
2682 }
2683 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2684 let abs_path = op
2685 .uri
2686 .to_file_path()
2687 .map_err(|_| anyhow!("can't convert URI to path"))?;
2688 let options = op.options.map(Into::into).unwrap_or_default();
2689 if abs_path.ends_with("/") {
2690 fs.remove_dir(&abs_path, options).await?;
2691 } else {
2692 fs.remove_file(&abs_path, options).await?;
2693 }
2694 }
2695 lsp::DocumentChangeOperation::Edit(op) => {
2696 let buffer_to_edit = this
2697 .update(cx, |this, cx| {
2698 this.open_local_buffer_via_lsp(
2699 op.text_document.uri,
2700 language_name.clone(),
2701 language_server.clone(),
2702 cx,
2703 )
2704 })
2705 .await?;
2706
2707 let edits = this
2708 .update(cx, |this, cx| {
2709 let edits = op.edits.into_iter().map(|edit| match edit {
2710 lsp::OneOf::Left(edit) => edit,
2711 lsp::OneOf::Right(edit) => edit.text_edit,
2712 });
2713 this.edits_from_lsp(
2714 &buffer_to_edit,
2715 edits,
2716 op.text_document.version,
2717 cx,
2718 )
2719 })
2720 .await?;
2721
2722 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2723 buffer.finalize_last_transaction();
2724 buffer.start_transaction();
2725 for (range, text) in edits {
2726 buffer.edit([range], text, cx);
2727 }
2728 let transaction = if buffer.end_transaction(cx).is_some() {
2729 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2730 if !push_to_history {
2731 buffer.forget_transaction(transaction.id);
2732 }
2733 Some(transaction)
2734 } else {
2735 None
2736 };
2737
2738 transaction
2739 });
2740 if let Some(transaction) = transaction {
2741 project_transaction.0.insert(buffer_to_edit, transaction);
2742 }
2743 }
2744 }
2745 }
2746
2747 Ok(project_transaction)
2748 }
2749
2750 pub fn prepare_rename<T: ToPointUtf16>(
2751 &self,
2752 buffer: ModelHandle<Buffer>,
2753 position: T,
2754 cx: &mut ModelContext<Self>,
2755 ) -> Task<Result<Option<Range<Anchor>>>> {
2756 let position = position.to_point_utf16(buffer.read(cx));
2757 self.request_lsp(buffer, PrepareRename { position }, cx)
2758 }
2759
2760 pub fn perform_rename<T: ToPointUtf16>(
2761 &self,
2762 buffer: ModelHandle<Buffer>,
2763 position: T,
2764 new_name: String,
2765 push_to_history: bool,
2766 cx: &mut ModelContext<Self>,
2767 ) -> Task<Result<ProjectTransaction>> {
2768 let position = position.to_point_utf16(buffer.read(cx));
2769 self.request_lsp(
2770 buffer,
2771 PerformRename {
2772 position,
2773 new_name,
2774 push_to_history,
2775 },
2776 cx,
2777 )
2778 }
2779
2780 pub fn search(
2781 &self,
2782 query: SearchQuery,
2783 cx: &mut ModelContext<Self>,
2784 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2785 if self.is_local() {
2786 let snapshots = self
2787 .visible_worktrees(cx)
2788 .filter_map(|tree| {
2789 let tree = tree.read(cx).as_local()?;
2790 Some(tree.snapshot())
2791 })
2792 .collect::<Vec<_>>();
2793
2794 let background = cx.background().clone();
2795 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2796 if path_count == 0 {
2797 return Task::ready(Ok(Default::default()));
2798 }
2799 let workers = background.num_cpus().min(path_count);
2800 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2801 cx.background()
2802 .spawn({
2803 let fs = self.fs.clone();
2804 let background = cx.background().clone();
2805 let query = query.clone();
2806 async move {
2807 let fs = &fs;
2808 let query = &query;
2809 let matching_paths_tx = &matching_paths_tx;
2810 let paths_per_worker = (path_count + workers - 1) / workers;
2811 let snapshots = &snapshots;
2812 background
2813 .scoped(|scope| {
2814 for worker_ix in 0..workers {
2815 let worker_start_ix = worker_ix * paths_per_worker;
2816 let worker_end_ix = worker_start_ix + paths_per_worker;
2817 scope.spawn(async move {
2818 let mut snapshot_start_ix = 0;
2819 let mut abs_path = PathBuf::new();
2820 for snapshot in snapshots {
2821 let snapshot_end_ix =
2822 snapshot_start_ix + snapshot.visible_file_count();
2823 if worker_end_ix <= snapshot_start_ix {
2824 break;
2825 } else if worker_start_ix > snapshot_end_ix {
2826 snapshot_start_ix = snapshot_end_ix;
2827 continue;
2828 } else {
2829 let start_in_snapshot = worker_start_ix
2830 .saturating_sub(snapshot_start_ix);
2831 let end_in_snapshot =
2832 cmp::min(worker_end_ix, snapshot_end_ix)
2833 - snapshot_start_ix;
2834
2835 for entry in snapshot
2836 .files(false, start_in_snapshot)
2837 .take(end_in_snapshot - start_in_snapshot)
2838 {
2839 if matching_paths_tx.is_closed() {
2840 break;
2841 }
2842
2843 abs_path.clear();
2844 abs_path.push(&snapshot.abs_path());
2845 abs_path.push(&entry.path);
2846 let matches = if let Some(file) =
2847 fs.open_sync(&abs_path).await.log_err()
2848 {
2849 query.detect(file).unwrap_or(false)
2850 } else {
2851 false
2852 };
2853
2854 if matches {
2855 let project_path =
2856 (snapshot.id(), entry.path.clone());
2857 if matching_paths_tx
2858 .send(project_path)
2859 .await
2860 .is_err()
2861 {
2862 break;
2863 }
2864 }
2865 }
2866
2867 snapshot_start_ix = snapshot_end_ix;
2868 }
2869 }
2870 });
2871 }
2872 })
2873 .await;
2874 }
2875 })
2876 .detach();
2877
2878 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2879 let open_buffers = self
2880 .opened_buffers
2881 .values()
2882 .filter_map(|b| b.upgrade(cx))
2883 .collect::<HashSet<_>>();
2884 cx.spawn(|this, cx| async move {
2885 for buffer in &open_buffers {
2886 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2887 buffers_tx.send((buffer.clone(), snapshot)).await?;
2888 }
2889
2890 let open_buffers = Rc::new(RefCell::new(open_buffers));
2891 while let Some(project_path) = matching_paths_rx.next().await {
2892 if buffers_tx.is_closed() {
2893 break;
2894 }
2895
2896 let this = this.clone();
2897 let open_buffers = open_buffers.clone();
2898 let buffers_tx = buffers_tx.clone();
2899 cx.spawn(|mut cx| async move {
2900 if let Some(buffer) = this
2901 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2902 .await
2903 .log_err()
2904 {
2905 if open_buffers.borrow_mut().insert(buffer.clone()) {
2906 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2907 buffers_tx.send((buffer, snapshot)).await?;
2908 }
2909 }
2910
2911 Ok::<_, anyhow::Error>(())
2912 })
2913 .detach();
2914 }
2915
2916 Ok::<_, anyhow::Error>(())
2917 })
2918 .detach_and_log_err(cx);
2919
2920 let background = cx.background().clone();
2921 cx.background().spawn(async move {
2922 let query = &query;
2923 let mut matched_buffers = Vec::new();
2924 for _ in 0..workers {
2925 matched_buffers.push(HashMap::default());
2926 }
2927 background
2928 .scoped(|scope| {
2929 for worker_matched_buffers in matched_buffers.iter_mut() {
2930 let mut buffers_rx = buffers_rx.clone();
2931 scope.spawn(async move {
2932 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2933 let buffer_matches = query
2934 .search(snapshot.as_rope())
2935 .await
2936 .iter()
2937 .map(|range| {
2938 snapshot.anchor_before(range.start)
2939 ..snapshot.anchor_after(range.end)
2940 })
2941 .collect::<Vec<_>>();
2942 if !buffer_matches.is_empty() {
2943 worker_matched_buffers
2944 .insert(buffer.clone(), buffer_matches);
2945 }
2946 }
2947 });
2948 }
2949 })
2950 .await;
2951 Ok(matched_buffers.into_iter().flatten().collect())
2952 })
2953 } else if let Some(project_id) = self.remote_id() {
2954 let request = self.client.request(query.to_proto(project_id));
2955 cx.spawn(|this, mut cx| async move {
2956 let response = request.await?;
2957 let mut result = HashMap::default();
2958 for location in response.locations {
2959 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2960 let target_buffer = this
2961 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2962 .await?;
2963 let start = location
2964 .start
2965 .and_then(deserialize_anchor)
2966 .ok_or_else(|| anyhow!("missing target start"))?;
2967 let end = location
2968 .end
2969 .and_then(deserialize_anchor)
2970 .ok_or_else(|| anyhow!("missing target end"))?;
2971 result
2972 .entry(target_buffer)
2973 .or_insert(Vec::new())
2974 .push(start..end)
2975 }
2976 Ok(result)
2977 })
2978 } else {
2979 Task::ready(Ok(Default::default()))
2980 }
2981 }
2982
2983 fn request_lsp<R: LspCommand>(
2984 &self,
2985 buffer_handle: ModelHandle<Buffer>,
2986 request: R,
2987 cx: &mut ModelContext<Self>,
2988 ) -> Task<Result<R::Response>>
2989 where
2990 <R::LspRequest as lsp::request::Request>::Result: Send,
2991 {
2992 let buffer = buffer_handle.read(cx);
2993 if self.is_local() {
2994 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2995 if let Some((file, language_server)) =
2996 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2997 {
2998 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2999 return cx.spawn(|this, cx| async move {
3000 if !request.check_capabilities(&language_server.capabilities()) {
3001 return Ok(Default::default());
3002 }
3003
3004 let response = language_server
3005 .request::<R::LspRequest>(lsp_params)
3006 .await
3007 .context("lsp request failed")?;
3008 request
3009 .response_from_lsp(response, this, buffer_handle, cx)
3010 .await
3011 });
3012 }
3013 } else if let Some(project_id) = self.remote_id() {
3014 let rpc = self.client.clone();
3015 let message = request.to_proto(project_id, buffer);
3016 return cx.spawn(|this, cx| async move {
3017 let response = rpc.request(message).await?;
3018 request
3019 .response_from_proto(response, this, buffer_handle, cx)
3020 .await
3021 });
3022 }
3023 Task::ready(Ok(Default::default()))
3024 }
3025
3026 pub fn find_or_create_local_worktree(
3027 &mut self,
3028 abs_path: impl AsRef<Path>,
3029 visible: bool,
3030 cx: &mut ModelContext<Self>,
3031 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3032 let abs_path = abs_path.as_ref();
3033 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3034 Task::ready(Ok((tree.clone(), relative_path.into())))
3035 } else {
3036 let worktree = self.create_local_worktree(abs_path, visible, cx);
3037 cx.foreground()
3038 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3039 }
3040 }
3041
3042 pub fn find_local_worktree(
3043 &self,
3044 abs_path: &Path,
3045 cx: &AppContext,
3046 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3047 for tree in self.worktrees(cx) {
3048 if let Some(relative_path) = tree
3049 .read(cx)
3050 .as_local()
3051 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3052 {
3053 return Some((tree.clone(), relative_path.into()));
3054 }
3055 }
3056 None
3057 }
3058
3059 pub fn is_shared(&self) -> bool {
3060 match &self.client_state {
3061 ProjectClientState::Local { is_shared, .. } => *is_shared,
3062 ProjectClientState::Remote { .. } => false,
3063 }
3064 }
3065
3066 fn create_local_worktree(
3067 &mut self,
3068 abs_path: impl AsRef<Path>,
3069 visible: bool,
3070 cx: &mut ModelContext<Self>,
3071 ) -> Task<Result<ModelHandle<Worktree>>> {
3072 let fs = self.fs.clone();
3073 let client = self.client.clone();
3074 let next_entry_id = self.next_entry_id.clone();
3075 let path: Arc<Path> = abs_path.as_ref().into();
3076 let task = self
3077 .loading_local_worktrees
3078 .entry(path.clone())
3079 .or_insert_with(|| {
3080 cx.spawn(|project, mut cx| {
3081 async move {
3082 let worktree = Worktree::local(
3083 client.clone(),
3084 path.clone(),
3085 visible,
3086 fs,
3087 next_entry_id,
3088 &mut cx,
3089 )
3090 .await;
3091 project.update(&mut cx, |project, _| {
3092 project.loading_local_worktrees.remove(&path);
3093 });
3094 let worktree = worktree?;
3095
3096 let (remote_project_id, is_shared) =
3097 project.update(&mut cx, |project, cx| {
3098 project.add_worktree(&worktree, cx);
3099 (project.remote_id(), project.is_shared())
3100 });
3101
3102 if let Some(project_id) = remote_project_id {
3103 if is_shared {
3104 worktree
3105 .update(&mut cx, |worktree, cx| {
3106 worktree.as_local_mut().unwrap().share(project_id, cx)
3107 })
3108 .await?;
3109 } else {
3110 worktree
3111 .update(&mut cx, |worktree, cx| {
3112 worktree.as_local_mut().unwrap().register(project_id, cx)
3113 })
3114 .await?;
3115 }
3116 }
3117
3118 Ok(worktree)
3119 }
3120 .map_err(|err| Arc::new(err))
3121 })
3122 .shared()
3123 })
3124 .clone();
3125 cx.foreground().spawn(async move {
3126 match task.await {
3127 Ok(worktree) => Ok(worktree),
3128 Err(err) => Err(anyhow!("{}", err)),
3129 }
3130 })
3131 }
3132
3133 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3134 self.worktrees.retain(|worktree| {
3135 worktree
3136 .upgrade(cx)
3137 .map_or(false, |w| w.read(cx).id() != id)
3138 });
3139 cx.notify();
3140 }
3141
3142 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3143 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3144 if worktree.read(cx).is_local() {
3145 cx.subscribe(&worktree, |this, worktree, _, cx| {
3146 this.update_local_worktree_buffers(worktree, cx);
3147 })
3148 .detach();
3149 }
3150
3151 let push_strong_handle = {
3152 let worktree = worktree.read(cx);
3153 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3154 };
3155 if push_strong_handle {
3156 self.worktrees
3157 .push(WorktreeHandle::Strong(worktree.clone()));
3158 } else {
3159 cx.observe_release(&worktree, |this, _, cx| {
3160 this.worktrees
3161 .retain(|worktree| worktree.upgrade(cx).is_some());
3162 cx.notify();
3163 })
3164 .detach();
3165 self.worktrees
3166 .push(WorktreeHandle::Weak(worktree.downgrade()));
3167 }
3168 cx.notify();
3169 }
3170
3171 fn update_local_worktree_buffers(
3172 &mut self,
3173 worktree_handle: ModelHandle<Worktree>,
3174 cx: &mut ModelContext<Self>,
3175 ) {
3176 let snapshot = worktree_handle.read(cx).snapshot();
3177 let mut buffers_to_delete = Vec::new();
3178 for (buffer_id, buffer) in &self.opened_buffers {
3179 if let Some(buffer) = buffer.upgrade(cx) {
3180 buffer.update(cx, |buffer, cx| {
3181 if let Some(old_file) = File::from_dyn(buffer.file()) {
3182 if old_file.worktree != worktree_handle {
3183 return;
3184 }
3185
3186 let new_file = if let Some(entry) = old_file
3187 .entry_id
3188 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3189 {
3190 File {
3191 is_local: true,
3192 entry_id: Some(entry.id),
3193 mtime: entry.mtime,
3194 path: entry.path.clone(),
3195 worktree: worktree_handle.clone(),
3196 }
3197 } else if let Some(entry) =
3198 snapshot.entry_for_path(old_file.path().as_ref())
3199 {
3200 File {
3201 is_local: true,
3202 entry_id: Some(entry.id),
3203 mtime: entry.mtime,
3204 path: entry.path.clone(),
3205 worktree: worktree_handle.clone(),
3206 }
3207 } else {
3208 File {
3209 is_local: true,
3210 entry_id: None,
3211 path: old_file.path().clone(),
3212 mtime: old_file.mtime(),
3213 worktree: worktree_handle.clone(),
3214 }
3215 };
3216
3217 if let Some(project_id) = self.remote_id() {
3218 self.client
3219 .send(proto::UpdateBufferFile {
3220 project_id,
3221 buffer_id: *buffer_id as u64,
3222 file: Some(new_file.to_proto()),
3223 })
3224 .log_err();
3225 }
3226 buffer.file_updated(Box::new(new_file), cx).detach();
3227 }
3228 });
3229 } else {
3230 buffers_to_delete.push(*buffer_id);
3231 }
3232 }
3233
3234 for buffer_id in buffers_to_delete {
3235 self.opened_buffers.remove(&buffer_id);
3236 }
3237 }
3238
3239 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3240 let new_active_entry = entry.and_then(|project_path| {
3241 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3242 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3243 Some(entry.id)
3244 });
3245 if new_active_entry != self.active_entry {
3246 self.active_entry = new_active_entry;
3247 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3248 }
3249 }
3250
3251 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3252 self.language_servers_with_diagnostics_running > 0
3253 }
3254
3255 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3256 let mut summary = DiagnosticSummary::default();
3257 for (_, path_summary) in self.diagnostic_summaries(cx) {
3258 summary.error_count += path_summary.error_count;
3259 summary.warning_count += path_summary.warning_count;
3260 summary.info_count += path_summary.info_count;
3261 summary.hint_count += path_summary.hint_count;
3262 }
3263 summary
3264 }
3265
3266 pub fn diagnostic_summaries<'a>(
3267 &'a self,
3268 cx: &'a AppContext,
3269 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3270 self.worktrees(cx).flat_map(move |worktree| {
3271 let worktree = worktree.read(cx);
3272 let worktree_id = worktree.id();
3273 worktree
3274 .diagnostic_summaries()
3275 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3276 })
3277 }
3278
3279 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3280 self.language_servers_with_diagnostics_running += 1;
3281 if self.language_servers_with_diagnostics_running == 1 {
3282 cx.emit(Event::DiskBasedDiagnosticsStarted);
3283 }
3284 }
3285
3286 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3287 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3288 self.language_servers_with_diagnostics_running -= 1;
3289 if self.language_servers_with_diagnostics_running == 0 {
3290 cx.emit(Event::DiskBasedDiagnosticsFinished);
3291 }
3292 }
3293
3294 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3295 self.active_entry
3296 }
3297
3298 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3299 self.worktree_for_id(path.worktree_id, cx)?
3300 .read(cx)
3301 .entry_for_path(&path.path)
3302 .map(|entry| entry.id)
3303 }
3304
3305 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3306 let worktree = self.worktree_for_entry(entry_id, cx)?;
3307 let worktree = worktree.read(cx);
3308 let worktree_id = worktree.id();
3309 let path = worktree.entry_for_id(entry_id)?.path.clone();
3310 Some(ProjectPath { worktree_id, path })
3311 }
3312
3313 // RPC message handlers
3314
3315 async fn handle_unshare_project(
3316 this: ModelHandle<Self>,
3317 _: TypedEnvelope<proto::UnshareProject>,
3318 _: Arc<Client>,
3319 mut cx: AsyncAppContext,
3320 ) -> Result<()> {
3321 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3322 Ok(())
3323 }
3324
3325 async fn handle_add_collaborator(
3326 this: ModelHandle<Self>,
3327 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3328 _: Arc<Client>,
3329 mut cx: AsyncAppContext,
3330 ) -> Result<()> {
3331 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3332 let collaborator = envelope
3333 .payload
3334 .collaborator
3335 .take()
3336 .ok_or_else(|| anyhow!("empty collaborator"))?;
3337
3338 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3339 this.update(&mut cx, |this, cx| {
3340 this.collaborators
3341 .insert(collaborator.peer_id, collaborator);
3342 cx.notify();
3343 });
3344
3345 Ok(())
3346 }
3347
3348 async fn handle_remove_collaborator(
3349 this: ModelHandle<Self>,
3350 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3351 _: Arc<Client>,
3352 mut cx: AsyncAppContext,
3353 ) -> Result<()> {
3354 this.update(&mut cx, |this, cx| {
3355 let peer_id = PeerId(envelope.payload.peer_id);
3356 let replica_id = this
3357 .collaborators
3358 .remove(&peer_id)
3359 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3360 .replica_id;
3361 for (_, buffer) in &this.opened_buffers {
3362 if let Some(buffer) = buffer.upgrade(cx) {
3363 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3364 }
3365 }
3366 cx.emit(Event::CollaboratorLeft(peer_id));
3367 cx.notify();
3368 Ok(())
3369 })
3370 }
3371
3372 async fn handle_register_worktree(
3373 this: ModelHandle<Self>,
3374 envelope: TypedEnvelope<proto::RegisterWorktree>,
3375 client: Arc<Client>,
3376 mut cx: AsyncAppContext,
3377 ) -> Result<()> {
3378 this.update(&mut cx, |this, cx| {
3379 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3380 let replica_id = this.replica_id();
3381 let worktree = proto::Worktree {
3382 id: envelope.payload.worktree_id,
3383 root_name: envelope.payload.root_name,
3384 entries: Default::default(),
3385 diagnostic_summaries: Default::default(),
3386 visible: envelope.payload.visible,
3387 };
3388 let (worktree, load_task) =
3389 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3390 this.add_worktree(&worktree, cx);
3391 load_task.detach();
3392 Ok(())
3393 })
3394 }
3395
3396 async fn handle_unregister_worktree(
3397 this: ModelHandle<Self>,
3398 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3399 _: Arc<Client>,
3400 mut cx: AsyncAppContext,
3401 ) -> Result<()> {
3402 this.update(&mut cx, |this, cx| {
3403 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3404 this.remove_worktree(worktree_id, cx);
3405 Ok(())
3406 })
3407 }
3408
3409 async fn handle_update_worktree(
3410 this: ModelHandle<Self>,
3411 envelope: TypedEnvelope<proto::UpdateWorktree>,
3412 _: Arc<Client>,
3413 mut cx: AsyncAppContext,
3414 ) -> Result<()> {
3415 this.update(&mut cx, |this, cx| {
3416 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3417 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3418 worktree.update(cx, |worktree, _| {
3419 let worktree = worktree.as_remote_mut().unwrap();
3420 worktree.update_from_remote(envelope)
3421 })?;
3422 }
3423 Ok(())
3424 })
3425 }
3426
3427 async fn handle_update_diagnostic_summary(
3428 this: ModelHandle<Self>,
3429 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3430 _: Arc<Client>,
3431 mut cx: AsyncAppContext,
3432 ) -> Result<()> {
3433 this.update(&mut cx, |this, cx| {
3434 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3435 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3436 if let Some(summary) = envelope.payload.summary {
3437 let project_path = ProjectPath {
3438 worktree_id,
3439 path: Path::new(&summary.path).into(),
3440 };
3441 worktree.update(cx, |worktree, _| {
3442 worktree
3443 .as_remote_mut()
3444 .unwrap()
3445 .update_diagnostic_summary(project_path.path.clone(), &summary);
3446 });
3447 cx.emit(Event::DiagnosticsUpdated(project_path));
3448 }
3449 }
3450 Ok(())
3451 })
3452 }
3453
3454 async fn handle_start_language_server(
3455 this: ModelHandle<Self>,
3456 envelope: TypedEnvelope<proto::StartLanguageServer>,
3457 _: Arc<Client>,
3458 mut cx: AsyncAppContext,
3459 ) -> Result<()> {
3460 let server = envelope
3461 .payload
3462 .server
3463 .ok_or_else(|| anyhow!("invalid server"))?;
3464 this.update(&mut cx, |this, cx| {
3465 this.language_server_statuses.insert(
3466 server.id as usize,
3467 LanguageServerStatus {
3468 name: server.name,
3469 pending_work: Default::default(),
3470 pending_diagnostic_updates: 0,
3471 },
3472 );
3473 cx.notify();
3474 });
3475 Ok(())
3476 }
3477
3478 async fn handle_update_language_server(
3479 this: ModelHandle<Self>,
3480 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3481 _: Arc<Client>,
3482 mut cx: AsyncAppContext,
3483 ) -> Result<()> {
3484 let language_server_id = envelope.payload.language_server_id as usize;
3485 match envelope
3486 .payload
3487 .variant
3488 .ok_or_else(|| anyhow!("invalid variant"))?
3489 {
3490 proto::update_language_server::Variant::WorkStart(payload) => {
3491 this.update(&mut cx, |this, cx| {
3492 this.on_lsp_work_start(language_server_id, payload.token, cx);
3493 })
3494 }
3495 proto::update_language_server::Variant::WorkProgress(payload) => {
3496 this.update(&mut cx, |this, cx| {
3497 this.on_lsp_work_progress(
3498 language_server_id,
3499 payload.token,
3500 LanguageServerProgress {
3501 message: payload.message,
3502 percentage: payload.percentage.map(|p| p as usize),
3503 last_update_at: Instant::now(),
3504 },
3505 cx,
3506 );
3507 })
3508 }
3509 proto::update_language_server::Variant::WorkEnd(payload) => {
3510 this.update(&mut cx, |this, cx| {
3511 this.on_lsp_work_end(language_server_id, payload.token, cx);
3512 })
3513 }
3514 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3515 this.update(&mut cx, |this, cx| {
3516 this.disk_based_diagnostics_started(cx);
3517 })
3518 }
3519 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3520 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3521 }
3522 }
3523
3524 Ok(())
3525 }
3526
3527 async fn handle_update_buffer(
3528 this: ModelHandle<Self>,
3529 envelope: TypedEnvelope<proto::UpdateBuffer>,
3530 _: Arc<Client>,
3531 mut cx: AsyncAppContext,
3532 ) -> Result<()> {
3533 this.update(&mut cx, |this, cx| {
3534 let payload = envelope.payload.clone();
3535 let buffer_id = payload.buffer_id;
3536 let ops = payload
3537 .operations
3538 .into_iter()
3539 .map(|op| language::proto::deserialize_operation(op))
3540 .collect::<Result<Vec<_>, _>>()?;
3541 match this.opened_buffers.entry(buffer_id) {
3542 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3543 OpenBuffer::Strong(buffer) => {
3544 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3545 }
3546 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3547 OpenBuffer::Weak(_) => {}
3548 },
3549 hash_map::Entry::Vacant(e) => {
3550 e.insert(OpenBuffer::Loading(ops));
3551 }
3552 }
3553 Ok(())
3554 })
3555 }
3556
3557 async fn handle_update_buffer_file(
3558 this: ModelHandle<Self>,
3559 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3560 _: Arc<Client>,
3561 mut cx: AsyncAppContext,
3562 ) -> Result<()> {
3563 this.update(&mut cx, |this, cx| {
3564 let payload = envelope.payload.clone();
3565 let buffer_id = payload.buffer_id;
3566 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3567 let worktree = this
3568 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3569 .ok_or_else(|| anyhow!("no such worktree"))?;
3570 let file = File::from_proto(file, worktree.clone(), cx)?;
3571 let buffer = this
3572 .opened_buffers
3573 .get_mut(&buffer_id)
3574 .and_then(|b| b.upgrade(cx))
3575 .ok_or_else(|| anyhow!("no such buffer"))?;
3576 buffer.update(cx, |buffer, cx| {
3577 buffer.file_updated(Box::new(file), cx).detach();
3578 });
3579 Ok(())
3580 })
3581 }
3582
3583 async fn handle_save_buffer(
3584 this: ModelHandle<Self>,
3585 envelope: TypedEnvelope<proto::SaveBuffer>,
3586 _: Arc<Client>,
3587 mut cx: AsyncAppContext,
3588 ) -> Result<proto::BufferSaved> {
3589 let buffer_id = envelope.payload.buffer_id;
3590 let requested_version = deserialize_version(envelope.payload.version);
3591
3592 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3593 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3594 let buffer = this
3595 .opened_buffers
3596 .get(&buffer_id)
3597 .map(|buffer| buffer.upgrade(cx).unwrap())
3598 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3599 Ok::<_, anyhow::Error>((project_id, buffer))
3600 })?;
3601 buffer
3602 .update(&mut cx, |buffer, _| {
3603 buffer.wait_for_version(requested_version)
3604 })
3605 .await;
3606
3607 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3608 Ok(proto::BufferSaved {
3609 project_id,
3610 buffer_id,
3611 version: serialize_version(&saved_version),
3612 mtime: Some(mtime.into()),
3613 })
3614 }
3615
3616 async fn handle_format_buffers(
3617 this: ModelHandle<Self>,
3618 envelope: TypedEnvelope<proto::FormatBuffers>,
3619 _: Arc<Client>,
3620 mut cx: AsyncAppContext,
3621 ) -> Result<proto::FormatBuffersResponse> {
3622 let sender_id = envelope.original_sender_id()?;
3623 let format = this.update(&mut cx, |this, cx| {
3624 let mut buffers = HashSet::default();
3625 for buffer_id in &envelope.payload.buffer_ids {
3626 buffers.insert(
3627 this.opened_buffers
3628 .get(buffer_id)
3629 .map(|buffer| buffer.upgrade(cx).unwrap())
3630 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3631 );
3632 }
3633 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3634 })?;
3635
3636 let project_transaction = format.await?;
3637 let project_transaction = this.update(&mut cx, |this, cx| {
3638 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3639 });
3640 Ok(proto::FormatBuffersResponse {
3641 transaction: Some(project_transaction),
3642 })
3643 }
3644
3645 async fn handle_get_completions(
3646 this: ModelHandle<Self>,
3647 envelope: TypedEnvelope<proto::GetCompletions>,
3648 _: Arc<Client>,
3649 mut cx: AsyncAppContext,
3650 ) -> Result<proto::GetCompletionsResponse> {
3651 let position = envelope
3652 .payload
3653 .position
3654 .and_then(language::proto::deserialize_anchor)
3655 .ok_or_else(|| anyhow!("invalid position"))?;
3656 let version = deserialize_version(envelope.payload.version);
3657 let buffer = this.read_with(&cx, |this, cx| {
3658 this.opened_buffers
3659 .get(&envelope.payload.buffer_id)
3660 .map(|buffer| buffer.upgrade(cx).unwrap())
3661 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3662 })?;
3663 buffer
3664 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3665 .await;
3666 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3667 let completions = this
3668 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3669 .await?;
3670
3671 Ok(proto::GetCompletionsResponse {
3672 completions: completions
3673 .iter()
3674 .map(language::proto::serialize_completion)
3675 .collect(),
3676 version: serialize_version(&version),
3677 })
3678 }
3679
3680 async fn handle_apply_additional_edits_for_completion(
3681 this: ModelHandle<Self>,
3682 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3683 _: Arc<Client>,
3684 mut cx: AsyncAppContext,
3685 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3686 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3687 let buffer = this
3688 .opened_buffers
3689 .get(&envelope.payload.buffer_id)
3690 .map(|buffer| buffer.upgrade(cx).unwrap())
3691 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3692 let language = buffer.read(cx).language();
3693 let completion = language::proto::deserialize_completion(
3694 envelope
3695 .payload
3696 .completion
3697 .ok_or_else(|| anyhow!("invalid completion"))?,
3698 language,
3699 )?;
3700 Ok::<_, anyhow::Error>(
3701 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3702 )
3703 })?;
3704
3705 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3706 transaction: apply_additional_edits
3707 .await?
3708 .as_ref()
3709 .map(language::proto::serialize_transaction),
3710 })
3711 }
3712
3713 async fn handle_get_code_actions(
3714 this: ModelHandle<Self>,
3715 envelope: TypedEnvelope<proto::GetCodeActions>,
3716 _: Arc<Client>,
3717 mut cx: AsyncAppContext,
3718 ) -> Result<proto::GetCodeActionsResponse> {
3719 let start = envelope
3720 .payload
3721 .start
3722 .and_then(language::proto::deserialize_anchor)
3723 .ok_or_else(|| anyhow!("invalid start"))?;
3724 let end = envelope
3725 .payload
3726 .end
3727 .and_then(language::proto::deserialize_anchor)
3728 .ok_or_else(|| anyhow!("invalid end"))?;
3729 let buffer = this.update(&mut cx, |this, cx| {
3730 this.opened_buffers
3731 .get(&envelope.payload.buffer_id)
3732 .map(|buffer| buffer.upgrade(cx).unwrap())
3733 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3734 })?;
3735 buffer
3736 .update(&mut cx, |buffer, _| {
3737 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3738 })
3739 .await;
3740
3741 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3742 let code_actions = this.update(&mut cx, |this, cx| {
3743 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3744 })?;
3745
3746 Ok(proto::GetCodeActionsResponse {
3747 actions: code_actions
3748 .await?
3749 .iter()
3750 .map(language::proto::serialize_code_action)
3751 .collect(),
3752 version: serialize_version(&version),
3753 })
3754 }
3755
3756 async fn handle_apply_code_action(
3757 this: ModelHandle<Self>,
3758 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3759 _: Arc<Client>,
3760 mut cx: AsyncAppContext,
3761 ) -> Result<proto::ApplyCodeActionResponse> {
3762 let sender_id = envelope.original_sender_id()?;
3763 let action = language::proto::deserialize_code_action(
3764 envelope
3765 .payload
3766 .action
3767 .ok_or_else(|| anyhow!("invalid action"))?,
3768 )?;
3769 let apply_code_action = this.update(&mut cx, |this, cx| {
3770 let buffer = this
3771 .opened_buffers
3772 .get(&envelope.payload.buffer_id)
3773 .map(|buffer| buffer.upgrade(cx).unwrap())
3774 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3775 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3776 })?;
3777
3778 let project_transaction = apply_code_action.await?;
3779 let project_transaction = this.update(&mut cx, |this, cx| {
3780 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3781 });
3782 Ok(proto::ApplyCodeActionResponse {
3783 transaction: Some(project_transaction),
3784 })
3785 }
3786
3787 async fn handle_lsp_command<T: LspCommand>(
3788 this: ModelHandle<Self>,
3789 envelope: TypedEnvelope<T::ProtoRequest>,
3790 _: Arc<Client>,
3791 mut cx: AsyncAppContext,
3792 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3793 where
3794 <T::LspRequest as lsp::request::Request>::Result: Send,
3795 {
3796 let sender_id = envelope.original_sender_id()?;
3797 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3798 let buffer_handle = this.read_with(&cx, |this, _| {
3799 this.opened_buffers
3800 .get(&buffer_id)
3801 .and_then(|buffer| buffer.upgrade(&cx))
3802 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3803 })?;
3804 let request = T::from_proto(
3805 envelope.payload,
3806 this.clone(),
3807 buffer_handle.clone(),
3808 cx.clone(),
3809 )
3810 .await?;
3811 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3812 let response = this
3813 .update(&mut cx, |this, cx| {
3814 this.request_lsp(buffer_handle, request, cx)
3815 })
3816 .await?;
3817 this.update(&mut cx, |this, cx| {
3818 Ok(T::response_to_proto(
3819 response,
3820 this,
3821 sender_id,
3822 &buffer_version,
3823 cx,
3824 ))
3825 })
3826 }
3827
3828 async fn handle_get_project_symbols(
3829 this: ModelHandle<Self>,
3830 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3831 _: Arc<Client>,
3832 mut cx: AsyncAppContext,
3833 ) -> Result<proto::GetProjectSymbolsResponse> {
3834 let symbols = this
3835 .update(&mut cx, |this, cx| {
3836 this.symbols(&envelope.payload.query, cx)
3837 })
3838 .await?;
3839
3840 Ok(proto::GetProjectSymbolsResponse {
3841 symbols: symbols.iter().map(serialize_symbol).collect(),
3842 })
3843 }
3844
3845 async fn handle_search_project(
3846 this: ModelHandle<Self>,
3847 envelope: TypedEnvelope<proto::SearchProject>,
3848 _: Arc<Client>,
3849 mut cx: AsyncAppContext,
3850 ) -> Result<proto::SearchProjectResponse> {
3851 let peer_id = envelope.original_sender_id()?;
3852 let query = SearchQuery::from_proto(envelope.payload)?;
3853 let result = this
3854 .update(&mut cx, |this, cx| this.search(query, cx))
3855 .await?;
3856
3857 this.update(&mut cx, |this, cx| {
3858 let mut locations = Vec::new();
3859 for (buffer, ranges) in result {
3860 for range in ranges {
3861 let start = serialize_anchor(&range.start);
3862 let end = serialize_anchor(&range.end);
3863 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3864 locations.push(proto::Location {
3865 buffer: Some(buffer),
3866 start: Some(start),
3867 end: Some(end),
3868 });
3869 }
3870 }
3871 Ok(proto::SearchProjectResponse { locations })
3872 })
3873 }
3874
3875 async fn handle_open_buffer_for_symbol(
3876 this: ModelHandle<Self>,
3877 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3878 _: Arc<Client>,
3879 mut cx: AsyncAppContext,
3880 ) -> Result<proto::OpenBufferForSymbolResponse> {
3881 let peer_id = envelope.original_sender_id()?;
3882 let symbol = envelope
3883 .payload
3884 .symbol
3885 .ok_or_else(|| anyhow!("invalid symbol"))?;
3886 let symbol = this.read_with(&cx, |this, _| {
3887 let symbol = this.deserialize_symbol(symbol)?;
3888 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3889 if signature == symbol.signature {
3890 Ok(symbol)
3891 } else {
3892 Err(anyhow!("invalid symbol signature"))
3893 }
3894 })?;
3895 let buffer = this
3896 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3897 .await?;
3898
3899 Ok(proto::OpenBufferForSymbolResponse {
3900 buffer: Some(this.update(&mut cx, |this, cx| {
3901 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3902 })),
3903 })
3904 }
3905
3906 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3907 let mut hasher = Sha256::new();
3908 hasher.update(worktree_id.to_proto().to_be_bytes());
3909 hasher.update(path.to_string_lossy().as_bytes());
3910 hasher.update(self.nonce.to_be_bytes());
3911 hasher.finalize().as_slice().try_into().unwrap()
3912 }
3913
3914 async fn handle_open_buffer_by_id(
3915 this: ModelHandle<Self>,
3916 envelope: TypedEnvelope<proto::OpenBufferById>,
3917 _: Arc<Client>,
3918 mut cx: AsyncAppContext,
3919 ) -> Result<proto::OpenBufferResponse> {
3920 let peer_id = envelope.original_sender_id()?;
3921 let buffer = this
3922 .update(&mut cx, |this, cx| {
3923 this.open_buffer_by_id(envelope.payload.id, cx)
3924 })
3925 .await?;
3926 this.update(&mut cx, |this, cx| {
3927 Ok(proto::OpenBufferResponse {
3928 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3929 })
3930 })
3931 }
3932
3933 async fn handle_open_buffer_by_path(
3934 this: ModelHandle<Self>,
3935 envelope: TypedEnvelope<proto::OpenBufferByPath>,
3936 _: Arc<Client>,
3937 mut cx: AsyncAppContext,
3938 ) -> Result<proto::OpenBufferResponse> {
3939 let peer_id = envelope.original_sender_id()?;
3940 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3941 let open_buffer = this.update(&mut cx, |this, cx| {
3942 this.open_buffer(
3943 ProjectPath {
3944 worktree_id,
3945 path: PathBuf::from(envelope.payload.path).into(),
3946 },
3947 cx,
3948 )
3949 });
3950
3951 let buffer = open_buffer.await?;
3952 this.update(&mut cx, |this, cx| {
3953 Ok(proto::OpenBufferResponse {
3954 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3955 })
3956 })
3957 }
3958
3959 fn serialize_project_transaction_for_peer(
3960 &mut self,
3961 project_transaction: ProjectTransaction,
3962 peer_id: PeerId,
3963 cx: &AppContext,
3964 ) -> proto::ProjectTransaction {
3965 let mut serialized_transaction = proto::ProjectTransaction {
3966 buffers: Default::default(),
3967 transactions: Default::default(),
3968 };
3969 for (buffer, transaction) in project_transaction.0 {
3970 serialized_transaction
3971 .buffers
3972 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3973 serialized_transaction
3974 .transactions
3975 .push(language::proto::serialize_transaction(&transaction));
3976 }
3977 serialized_transaction
3978 }
3979
3980 fn deserialize_project_transaction(
3981 &mut self,
3982 message: proto::ProjectTransaction,
3983 push_to_history: bool,
3984 cx: &mut ModelContext<Self>,
3985 ) -> Task<Result<ProjectTransaction>> {
3986 cx.spawn(|this, mut cx| async move {
3987 let mut project_transaction = ProjectTransaction::default();
3988 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3989 let buffer = this
3990 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3991 .await?;
3992 let transaction = language::proto::deserialize_transaction(transaction)?;
3993 project_transaction.0.insert(buffer, transaction);
3994 }
3995
3996 for (buffer, transaction) in &project_transaction.0 {
3997 buffer
3998 .update(&mut cx, |buffer, _| {
3999 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4000 })
4001 .await;
4002
4003 if push_to_history {
4004 buffer.update(&mut cx, |buffer, _| {
4005 buffer.push_transaction(transaction.clone(), Instant::now());
4006 });
4007 }
4008 }
4009
4010 Ok(project_transaction)
4011 })
4012 }
4013
4014 fn serialize_buffer_for_peer(
4015 &mut self,
4016 buffer: &ModelHandle<Buffer>,
4017 peer_id: PeerId,
4018 cx: &AppContext,
4019 ) -> proto::Buffer {
4020 let buffer_id = buffer.read(cx).remote_id();
4021 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4022 if shared_buffers.insert(buffer_id) {
4023 proto::Buffer {
4024 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4025 }
4026 } else {
4027 proto::Buffer {
4028 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4029 }
4030 }
4031 }
4032
4033 fn deserialize_buffer(
4034 &mut self,
4035 buffer: proto::Buffer,
4036 cx: &mut ModelContext<Self>,
4037 ) -> Task<Result<ModelHandle<Buffer>>> {
4038 let replica_id = self.replica_id();
4039
4040 let opened_buffer_tx = self.opened_buffer.0.clone();
4041 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4042 cx.spawn(|this, mut cx| async move {
4043 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4044 proto::buffer::Variant::Id(id) => {
4045 let buffer = loop {
4046 let buffer = this.read_with(&cx, |this, cx| {
4047 this.opened_buffers
4048 .get(&id)
4049 .and_then(|buffer| buffer.upgrade(cx))
4050 });
4051 if let Some(buffer) = buffer {
4052 break buffer;
4053 }
4054 opened_buffer_rx
4055 .next()
4056 .await
4057 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4058 };
4059 Ok(buffer)
4060 }
4061 proto::buffer::Variant::State(mut buffer) => {
4062 let mut buffer_worktree = None;
4063 let mut buffer_file = None;
4064 if let Some(file) = buffer.file.take() {
4065 this.read_with(&cx, |this, cx| {
4066 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4067 let worktree =
4068 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4069 anyhow!("no worktree found for id {}", file.worktree_id)
4070 })?;
4071 buffer_file =
4072 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4073 as Box<dyn language::File>);
4074 buffer_worktree = Some(worktree);
4075 Ok::<_, anyhow::Error>(())
4076 })?;
4077 }
4078
4079 let buffer = cx.add_model(|cx| {
4080 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4081 });
4082
4083 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4084
4085 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4086 Ok(buffer)
4087 }
4088 }
4089 })
4090 }
4091
4092 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4093 let language = self
4094 .languages
4095 .get_language(&serialized_symbol.language_name);
4096 let start = serialized_symbol
4097 .start
4098 .ok_or_else(|| anyhow!("invalid start"))?;
4099 let end = serialized_symbol
4100 .end
4101 .ok_or_else(|| anyhow!("invalid end"))?;
4102 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4103 Ok(Symbol {
4104 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4105 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4106 language_name: serialized_symbol.language_name.clone(),
4107 label: language
4108 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4109 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4110 name: serialized_symbol.name,
4111 path: PathBuf::from(serialized_symbol.path),
4112 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4113 kind,
4114 signature: serialized_symbol
4115 .signature
4116 .try_into()
4117 .map_err(|_| anyhow!("invalid signature"))?,
4118 })
4119 }
4120
4121 async fn handle_buffer_saved(
4122 this: ModelHandle<Self>,
4123 envelope: TypedEnvelope<proto::BufferSaved>,
4124 _: Arc<Client>,
4125 mut cx: AsyncAppContext,
4126 ) -> Result<()> {
4127 let version = deserialize_version(envelope.payload.version);
4128 let mtime = envelope
4129 .payload
4130 .mtime
4131 .ok_or_else(|| anyhow!("missing mtime"))?
4132 .into();
4133
4134 this.update(&mut cx, |this, cx| {
4135 let buffer = this
4136 .opened_buffers
4137 .get(&envelope.payload.buffer_id)
4138 .and_then(|buffer| buffer.upgrade(cx));
4139 if let Some(buffer) = buffer {
4140 buffer.update(cx, |buffer, cx| {
4141 buffer.did_save(version, mtime, None, cx);
4142 });
4143 }
4144 Ok(())
4145 })
4146 }
4147
4148 async fn handle_buffer_reloaded(
4149 this: ModelHandle<Self>,
4150 envelope: TypedEnvelope<proto::BufferReloaded>,
4151 _: Arc<Client>,
4152 mut cx: AsyncAppContext,
4153 ) -> Result<()> {
4154 let payload = envelope.payload.clone();
4155 let version = deserialize_version(payload.version);
4156 let mtime = payload
4157 .mtime
4158 .ok_or_else(|| anyhow!("missing mtime"))?
4159 .into();
4160 this.update(&mut cx, |this, cx| {
4161 let buffer = this
4162 .opened_buffers
4163 .get(&payload.buffer_id)
4164 .and_then(|buffer| buffer.upgrade(cx));
4165 if let Some(buffer) = buffer {
4166 buffer.update(cx, |buffer, cx| {
4167 buffer.did_reload(version, mtime, cx);
4168 });
4169 }
4170 Ok(())
4171 })
4172 }
4173
4174 pub fn match_paths<'a>(
4175 &self,
4176 query: &'a str,
4177 include_ignored: bool,
4178 smart_case: bool,
4179 max_results: usize,
4180 cancel_flag: &'a AtomicBool,
4181 cx: &AppContext,
4182 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4183 let worktrees = self
4184 .worktrees(cx)
4185 .filter(|worktree| worktree.read(cx).is_visible())
4186 .collect::<Vec<_>>();
4187 let include_root_name = worktrees.len() > 1;
4188 let candidate_sets = worktrees
4189 .into_iter()
4190 .map(|worktree| CandidateSet {
4191 snapshot: worktree.read(cx).snapshot(),
4192 include_ignored,
4193 include_root_name,
4194 })
4195 .collect::<Vec<_>>();
4196
4197 let background = cx.background().clone();
4198 async move {
4199 fuzzy::match_paths(
4200 candidate_sets.as_slice(),
4201 query,
4202 smart_case,
4203 max_results,
4204 cancel_flag,
4205 background,
4206 )
4207 .await
4208 }
4209 }
4210
4211 fn edits_from_lsp(
4212 &mut self,
4213 buffer: &ModelHandle<Buffer>,
4214 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4215 version: Option<i32>,
4216 cx: &mut ModelContext<Self>,
4217 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4218 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4219 cx.background().spawn(async move {
4220 let snapshot = snapshot?;
4221 let mut lsp_edits = lsp_edits
4222 .into_iter()
4223 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4224 .peekable();
4225
4226 let mut edits = Vec::new();
4227 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4228 // Combine any LSP edits that are adjacent.
4229 //
4230 // Also, combine LSP edits that are separated from each other by only
4231 // a newline. This is important because for some code actions,
4232 // Rust-analyzer rewrites the entire buffer via a series of edits that
4233 // are separated by unchanged newline characters.
4234 //
4235 // In order for the diffing logic below to work properly, any edits that
4236 // cancel each other out must be combined into one.
4237 while let Some((next_range, next_text)) = lsp_edits.peek() {
4238 if next_range.start > range.end {
4239 if next_range.start.row > range.end.row + 1
4240 || next_range.start.column > 0
4241 || snapshot.clip_point_utf16(
4242 PointUtf16::new(range.end.row, u32::MAX),
4243 Bias::Left,
4244 ) > range.end
4245 {
4246 break;
4247 }
4248 new_text.push('\n');
4249 }
4250 range.end = next_range.end;
4251 new_text.push_str(&next_text);
4252 lsp_edits.next();
4253 }
4254
4255 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4256 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4257 {
4258 return Err(anyhow!("invalid edits received from language server"));
4259 }
4260
4261 // For multiline edits, perform a diff of the old and new text so that
4262 // we can identify the changes more precisely, preserving the locations
4263 // of any anchors positioned in the unchanged regions.
4264 if range.end.row > range.start.row {
4265 let mut offset = range.start.to_offset(&snapshot);
4266 let old_text = snapshot.text_for_range(range).collect::<String>();
4267
4268 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4269 let mut moved_since_edit = true;
4270 for change in diff.iter_all_changes() {
4271 let tag = change.tag();
4272 let value = change.value();
4273 match tag {
4274 ChangeTag::Equal => {
4275 offset += value.len();
4276 moved_since_edit = true;
4277 }
4278 ChangeTag::Delete => {
4279 let start = snapshot.anchor_after(offset);
4280 let end = snapshot.anchor_before(offset + value.len());
4281 if moved_since_edit {
4282 edits.push((start..end, String::new()));
4283 } else {
4284 edits.last_mut().unwrap().0.end = end;
4285 }
4286 offset += value.len();
4287 moved_since_edit = false;
4288 }
4289 ChangeTag::Insert => {
4290 if moved_since_edit {
4291 let anchor = snapshot.anchor_after(offset);
4292 edits.push((anchor.clone()..anchor, value.to_string()));
4293 } else {
4294 edits.last_mut().unwrap().1.push_str(value);
4295 }
4296 moved_since_edit = false;
4297 }
4298 }
4299 }
4300 } else if range.end == range.start {
4301 let anchor = snapshot.anchor_after(range.start);
4302 edits.push((anchor.clone()..anchor, new_text));
4303 } else {
4304 let edit_start = snapshot.anchor_after(range.start);
4305 let edit_end = snapshot.anchor_before(range.end);
4306 edits.push((edit_start..edit_end, new_text));
4307 }
4308 }
4309
4310 Ok(edits)
4311 })
4312 }
4313
4314 fn buffer_snapshot_for_lsp_version(
4315 &mut self,
4316 buffer: &ModelHandle<Buffer>,
4317 version: Option<i32>,
4318 cx: &AppContext,
4319 ) -> Result<TextBufferSnapshot> {
4320 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4321
4322 if let Some(version) = version {
4323 let buffer_id = buffer.read(cx).remote_id();
4324 let snapshots = self
4325 .buffer_snapshots
4326 .get_mut(&buffer_id)
4327 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4328 let mut found_snapshot = None;
4329 snapshots.retain(|(snapshot_version, snapshot)| {
4330 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4331 false
4332 } else {
4333 if *snapshot_version == version {
4334 found_snapshot = Some(snapshot.clone());
4335 }
4336 true
4337 }
4338 });
4339
4340 found_snapshot.ok_or_else(|| {
4341 anyhow!(
4342 "snapshot not found for buffer {} at version {}",
4343 buffer_id,
4344 version
4345 )
4346 })
4347 } else {
4348 Ok((buffer.read(cx)).text_snapshot())
4349 }
4350 }
4351
4352 fn language_server_for_buffer(
4353 &self,
4354 buffer: &Buffer,
4355 cx: &AppContext,
4356 ) -> Option<&Arc<LanguageServer>> {
4357 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4358 let worktree_id = file.worktree_id(cx);
4359 self.language_servers.get(&(worktree_id, language.name()))
4360 } else {
4361 None
4362 }
4363 }
4364}
4365
4366impl WorktreeHandle {
4367 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4368 match self {
4369 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4370 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4371 }
4372 }
4373}
4374
4375impl OpenBuffer {
4376 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4377 match self {
4378 OpenBuffer::Strong(handle) => Some(handle.clone()),
4379 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4380 OpenBuffer::Loading(_) => None,
4381 }
4382 }
4383}
4384
4385struct CandidateSet {
4386 snapshot: Snapshot,
4387 include_ignored: bool,
4388 include_root_name: bool,
4389}
4390
4391impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4392 type Candidates = CandidateSetIter<'a>;
4393
4394 fn id(&self) -> usize {
4395 self.snapshot.id().to_usize()
4396 }
4397
4398 fn len(&self) -> usize {
4399 if self.include_ignored {
4400 self.snapshot.file_count()
4401 } else {
4402 self.snapshot.visible_file_count()
4403 }
4404 }
4405
4406 fn prefix(&self) -> Arc<str> {
4407 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4408 self.snapshot.root_name().into()
4409 } else if self.include_root_name {
4410 format!("{}/", self.snapshot.root_name()).into()
4411 } else {
4412 "".into()
4413 }
4414 }
4415
4416 fn candidates(&'a self, start: usize) -> Self::Candidates {
4417 CandidateSetIter {
4418 traversal: self.snapshot.files(self.include_ignored, start),
4419 }
4420 }
4421}
4422
4423struct CandidateSetIter<'a> {
4424 traversal: Traversal<'a>,
4425}
4426
4427impl<'a> Iterator for CandidateSetIter<'a> {
4428 type Item = PathMatchCandidate<'a>;
4429
4430 fn next(&mut self) -> Option<Self::Item> {
4431 self.traversal.next().map(|entry| {
4432 if let EntryKind::File(char_bag) = entry.kind {
4433 PathMatchCandidate {
4434 path: &entry.path,
4435 char_bag,
4436 }
4437 } else {
4438 unreachable!()
4439 }
4440 })
4441 }
4442}
4443
4444impl Entity for Project {
4445 type Event = Event;
4446
4447 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4448 match &self.client_state {
4449 ProjectClientState::Local { remote_id_rx, .. } => {
4450 if let Some(project_id) = *remote_id_rx.borrow() {
4451 self.client
4452 .send(proto::UnregisterProject { project_id })
4453 .log_err();
4454 }
4455 }
4456 ProjectClientState::Remote { remote_id, .. } => {
4457 self.client
4458 .send(proto::LeaveProject {
4459 project_id: *remote_id,
4460 })
4461 .log_err();
4462 }
4463 }
4464 }
4465
4466 fn app_will_quit(
4467 &mut self,
4468 _: &mut MutableAppContext,
4469 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4470 let shutdown_futures = self
4471 .language_servers
4472 .drain()
4473 .filter_map(|(_, server)| server.shutdown())
4474 .collect::<Vec<_>>();
4475 Some(
4476 async move {
4477 futures::future::join_all(shutdown_futures).await;
4478 }
4479 .boxed(),
4480 )
4481 }
4482}
4483
4484impl Collaborator {
4485 fn from_proto(
4486 message: proto::Collaborator,
4487 user_store: &ModelHandle<UserStore>,
4488 cx: &mut AsyncAppContext,
4489 ) -> impl Future<Output = Result<Self>> {
4490 let user = user_store.update(cx, |user_store, cx| {
4491 user_store.fetch_user(message.user_id, cx)
4492 });
4493
4494 async move {
4495 Ok(Self {
4496 peer_id: PeerId(message.peer_id),
4497 user: user.await?,
4498 replica_id: message.replica_id as ReplicaId,
4499 })
4500 }
4501 }
4502}
4503
4504impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4505 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4506 Self {
4507 worktree_id,
4508 path: path.as_ref().into(),
4509 }
4510 }
4511}
4512
4513impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4514 fn from(options: lsp::CreateFileOptions) -> Self {
4515 Self {
4516 overwrite: options.overwrite.unwrap_or(false),
4517 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4518 }
4519 }
4520}
4521
4522impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4523 fn from(options: lsp::RenameFileOptions) -> Self {
4524 Self {
4525 overwrite: options.overwrite.unwrap_or(false),
4526 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4527 }
4528 }
4529}
4530
4531impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4532 fn from(options: lsp::DeleteFileOptions) -> Self {
4533 Self {
4534 recursive: options.recursive.unwrap_or(false),
4535 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4536 }
4537 }
4538}
4539
4540fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4541 proto::Symbol {
4542 source_worktree_id: symbol.source_worktree_id.to_proto(),
4543 worktree_id: symbol.worktree_id.to_proto(),
4544 language_name: symbol.language_name.clone(),
4545 name: symbol.name.clone(),
4546 kind: unsafe { mem::transmute(symbol.kind) },
4547 path: symbol.path.to_string_lossy().to_string(),
4548 start: Some(proto::Point {
4549 row: symbol.range.start.row,
4550 column: symbol.range.start.column,
4551 }),
4552 end: Some(proto::Point {
4553 row: symbol.range.end.row,
4554 column: symbol.range.end.column,
4555 }),
4556 signature: symbol.signature.to_vec(),
4557 }
4558}
4559
4560fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4561 let mut path_components = path.components();
4562 let mut base_components = base.components();
4563 let mut components: Vec<Component> = Vec::new();
4564 loop {
4565 match (path_components.next(), base_components.next()) {
4566 (None, None) => break,
4567 (Some(a), None) => {
4568 components.push(a);
4569 components.extend(path_components.by_ref());
4570 break;
4571 }
4572 (None, _) => components.push(Component::ParentDir),
4573 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4574 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4575 (Some(a), Some(_)) => {
4576 components.push(Component::ParentDir);
4577 for _ in base_components {
4578 components.push(Component::ParentDir);
4579 }
4580 components.push(a);
4581 components.extend(path_components.by_ref());
4582 break;
4583 }
4584 }
4585 }
4586 components.iter().map(|c| c.as_os_str()).collect()
4587}
4588
4589impl Item for Buffer {
4590 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4591 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4592 }
4593}
4594
4595#[cfg(test)]
4596mod tests {
4597 use super::{Event, *};
4598 use fs::RealFs;
4599 use futures::StreamExt;
4600 use gpui::test::subscribe;
4601 use language::{
4602 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4603 ToPoint,
4604 };
4605 use lsp::Url;
4606 use serde_json::json;
4607 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4608 use unindent::Unindent as _;
4609 use util::test::temp_tree;
4610 use worktree::WorktreeHandle as _;
4611
4612 #[gpui::test]
4613 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4614 let dir = temp_tree(json!({
4615 "root": {
4616 "apple": "",
4617 "banana": {
4618 "carrot": {
4619 "date": "",
4620 "endive": "",
4621 }
4622 },
4623 "fennel": {
4624 "grape": "",
4625 }
4626 }
4627 }));
4628
4629 let root_link_path = dir.path().join("root_link");
4630 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4631 unix::fs::symlink(
4632 &dir.path().join("root/fennel"),
4633 &dir.path().join("root/finnochio"),
4634 )
4635 .unwrap();
4636
4637 let project = Project::test(Arc::new(RealFs), cx);
4638
4639 let (tree, _) = project
4640 .update(cx, |project, cx| {
4641 project.find_or_create_local_worktree(&root_link_path, true, cx)
4642 })
4643 .await
4644 .unwrap();
4645
4646 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4647 .await;
4648 cx.read(|cx| {
4649 let tree = tree.read(cx);
4650 assert_eq!(tree.file_count(), 5);
4651 assert_eq!(
4652 tree.inode_for_path("fennel/grape"),
4653 tree.inode_for_path("finnochio/grape")
4654 );
4655 });
4656
4657 let cancel_flag = Default::default();
4658 let results = project
4659 .read_with(cx, |project, cx| {
4660 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4661 })
4662 .await;
4663 assert_eq!(
4664 results
4665 .into_iter()
4666 .map(|result| result.path)
4667 .collect::<Vec<Arc<Path>>>(),
4668 vec![
4669 PathBuf::from("banana/carrot/date").into(),
4670 PathBuf::from("banana/carrot/endive").into(),
4671 ]
4672 );
4673 }
4674
4675 #[gpui::test]
4676 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4677 cx.foreground().forbid_parking();
4678
4679 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4680 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4681 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4682 completion_provider: Some(lsp::CompletionOptions {
4683 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4684 ..Default::default()
4685 }),
4686 ..Default::default()
4687 });
4688 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4689 completion_provider: Some(lsp::CompletionOptions {
4690 trigger_characters: Some(vec![":".to_string()]),
4691 ..Default::default()
4692 }),
4693 ..Default::default()
4694 });
4695
4696 let rust_language = Arc::new(Language::new(
4697 LanguageConfig {
4698 name: "Rust".into(),
4699 path_suffixes: vec!["rs".to_string()],
4700 language_server: Some(rust_lsp_config),
4701 ..Default::default()
4702 },
4703 Some(tree_sitter_rust::language()),
4704 ));
4705 let json_language = Arc::new(Language::new(
4706 LanguageConfig {
4707 name: "JSON".into(),
4708 path_suffixes: vec!["json".to_string()],
4709 language_server: Some(json_lsp_config),
4710 ..Default::default()
4711 },
4712 None,
4713 ));
4714
4715 let fs = FakeFs::new(cx.background());
4716 fs.insert_tree(
4717 "/the-root",
4718 json!({
4719 "test.rs": "const A: i32 = 1;",
4720 "test2.rs": "",
4721 "Cargo.toml": "a = 1",
4722 "package.json": "{\"a\": 1}",
4723 }),
4724 )
4725 .await;
4726
4727 let project = Project::test(fs, cx);
4728 project.update(cx, |project, _| {
4729 project.languages.add(rust_language);
4730 project.languages.add(json_language);
4731 });
4732
4733 let worktree_id = project
4734 .update(cx, |project, cx| {
4735 project.find_or_create_local_worktree("/the-root", true, cx)
4736 })
4737 .await
4738 .unwrap()
4739 .0
4740 .read_with(cx, |tree, _| tree.id());
4741
4742 // Open a buffer without an associated language server.
4743 let toml_buffer = project
4744 .update(cx, |project, cx| {
4745 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4746 })
4747 .await
4748 .unwrap();
4749
4750 // Open a buffer with an associated language server.
4751 let rust_buffer = project
4752 .update(cx, |project, cx| {
4753 project.open_buffer((worktree_id, "test.rs"), cx)
4754 })
4755 .await
4756 .unwrap();
4757
4758 // A server is started up, and it is notified about Rust files.
4759 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4760 assert_eq!(
4761 fake_rust_server
4762 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4763 .await
4764 .text_document,
4765 lsp::TextDocumentItem {
4766 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4767 version: 0,
4768 text: "const A: i32 = 1;".to_string(),
4769 language_id: Default::default()
4770 }
4771 );
4772
4773 // The buffer is configured based on the language server's capabilities.
4774 rust_buffer.read_with(cx, |buffer, _| {
4775 assert_eq!(
4776 buffer.completion_triggers(),
4777 &[".".to_string(), "::".to_string()]
4778 );
4779 });
4780 toml_buffer.read_with(cx, |buffer, _| {
4781 assert!(buffer.completion_triggers().is_empty());
4782 });
4783
4784 // Edit a buffer. The changes are reported to the language server.
4785 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4786 assert_eq!(
4787 fake_rust_server
4788 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4789 .await
4790 .text_document,
4791 lsp::VersionedTextDocumentIdentifier::new(
4792 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4793 1
4794 )
4795 );
4796
4797 // Open a third buffer with a different associated language server.
4798 let json_buffer = project
4799 .update(cx, |project, cx| {
4800 project.open_buffer((worktree_id, "package.json"), cx)
4801 })
4802 .await
4803 .unwrap();
4804
4805 // Another language server is started up, and it is notified about
4806 // all three open buffers.
4807 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4808 assert_eq!(
4809 fake_json_server
4810 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4811 .await
4812 .text_document,
4813 lsp::TextDocumentItem {
4814 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4815 version: 0,
4816 text: "{\"a\": 1}".to_string(),
4817 language_id: Default::default()
4818 }
4819 );
4820
4821 // This buffer is configured based on the second language server's
4822 // capabilities.
4823 json_buffer.read_with(cx, |buffer, _| {
4824 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4825 });
4826
4827 // When opening another buffer whose language server is already running,
4828 // it is also configured based on the existing language server's capabilities.
4829 let rust_buffer2 = project
4830 .update(cx, |project, cx| {
4831 project.open_buffer((worktree_id, "test2.rs"), cx)
4832 })
4833 .await
4834 .unwrap();
4835 rust_buffer2.read_with(cx, |buffer, _| {
4836 assert_eq!(
4837 buffer.completion_triggers(),
4838 &[".".to_string(), "::".to_string()]
4839 );
4840 });
4841
4842 // Changes are reported only to servers matching the buffer's language.
4843 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4844 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4845 assert_eq!(
4846 fake_rust_server
4847 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4848 .await
4849 .text_document,
4850 lsp::VersionedTextDocumentIdentifier::new(
4851 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4852 1
4853 )
4854 );
4855
4856 // Save notifications are reported to all servers.
4857 toml_buffer
4858 .update(cx, |buffer, cx| buffer.save(cx))
4859 .await
4860 .unwrap();
4861 assert_eq!(
4862 fake_rust_server
4863 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4864 .await
4865 .text_document,
4866 lsp::TextDocumentIdentifier::new(
4867 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4868 )
4869 );
4870 assert_eq!(
4871 fake_json_server
4872 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4873 .await
4874 .text_document,
4875 lsp::TextDocumentIdentifier::new(
4876 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4877 )
4878 );
4879
4880 // Close notifications are reported only to servers matching the buffer's language.
4881 cx.update(|_| drop(json_buffer));
4882 let close_message = lsp::DidCloseTextDocumentParams {
4883 text_document: lsp::TextDocumentIdentifier::new(
4884 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4885 ),
4886 };
4887 assert_eq!(
4888 fake_json_server
4889 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4890 .await,
4891 close_message,
4892 );
4893 }
4894
4895 #[gpui::test]
4896 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4897 cx.foreground().forbid_parking();
4898
4899 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4900 let progress_token = language_server_config
4901 .disk_based_diagnostics_progress_token
4902 .clone()
4903 .unwrap();
4904
4905 let language = Arc::new(Language::new(
4906 LanguageConfig {
4907 name: "Rust".into(),
4908 path_suffixes: vec!["rs".to_string()],
4909 language_server: Some(language_server_config),
4910 ..Default::default()
4911 },
4912 Some(tree_sitter_rust::language()),
4913 ));
4914
4915 let fs = FakeFs::new(cx.background());
4916 fs.insert_tree(
4917 "/dir",
4918 json!({
4919 "a.rs": "fn a() { A }",
4920 "b.rs": "const y: i32 = 1",
4921 }),
4922 )
4923 .await;
4924
4925 let project = Project::test(fs, cx);
4926 project.update(cx, |project, _| project.languages.add(language));
4927
4928 let (tree, _) = project
4929 .update(cx, |project, cx| {
4930 project.find_or_create_local_worktree("/dir", true, cx)
4931 })
4932 .await
4933 .unwrap();
4934 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4935
4936 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4937 .await;
4938
4939 // Cause worktree to start the fake language server
4940 let _buffer = project
4941 .update(cx, |project, cx| {
4942 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4943 })
4944 .await
4945 .unwrap();
4946
4947 let mut events = subscribe(&project, cx);
4948
4949 let mut fake_server = fake_servers.next().await.unwrap();
4950 fake_server.start_progress(&progress_token).await;
4951 assert_eq!(
4952 events.next().await.unwrap(),
4953 Event::DiskBasedDiagnosticsStarted
4954 );
4955
4956 fake_server.start_progress(&progress_token).await;
4957 fake_server.end_progress(&progress_token).await;
4958 fake_server.start_progress(&progress_token).await;
4959
4960 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4961 lsp::PublishDiagnosticsParams {
4962 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4963 version: None,
4964 diagnostics: vec![lsp::Diagnostic {
4965 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4966 severity: Some(lsp::DiagnosticSeverity::ERROR),
4967 message: "undefined variable 'A'".to_string(),
4968 ..Default::default()
4969 }],
4970 },
4971 );
4972 assert_eq!(
4973 events.next().await.unwrap(),
4974 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4975 );
4976
4977 fake_server.end_progress(&progress_token).await;
4978 fake_server.end_progress(&progress_token).await;
4979 assert_eq!(
4980 events.next().await.unwrap(),
4981 Event::DiskBasedDiagnosticsUpdated
4982 );
4983 assert_eq!(
4984 events.next().await.unwrap(),
4985 Event::DiskBasedDiagnosticsFinished
4986 );
4987
4988 let buffer = project
4989 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4990 .await
4991 .unwrap();
4992
4993 buffer.read_with(cx, |buffer, _| {
4994 let snapshot = buffer.snapshot();
4995 let diagnostics = snapshot
4996 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4997 .collect::<Vec<_>>();
4998 assert_eq!(
4999 diagnostics,
5000 &[DiagnosticEntry {
5001 range: Point::new(0, 9)..Point::new(0, 10),
5002 diagnostic: Diagnostic {
5003 severity: lsp::DiagnosticSeverity::ERROR,
5004 message: "undefined variable 'A'".to_string(),
5005 group_id: 0,
5006 is_primary: true,
5007 ..Default::default()
5008 }
5009 }]
5010 )
5011 });
5012 }
5013
5014 #[gpui::test]
5015 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5016 cx.foreground().forbid_parking();
5017
5018 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5019 lsp_config
5020 .disk_based_diagnostic_sources
5021 .insert("disk".to_string());
5022 let language = Arc::new(Language::new(
5023 LanguageConfig {
5024 name: "Rust".into(),
5025 path_suffixes: vec!["rs".to_string()],
5026 language_server: Some(lsp_config),
5027 ..Default::default()
5028 },
5029 Some(tree_sitter_rust::language()),
5030 ));
5031
5032 let text = "
5033 fn a() { A }
5034 fn b() { BB }
5035 fn c() { CCC }
5036 "
5037 .unindent();
5038
5039 let fs = FakeFs::new(cx.background());
5040 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5041
5042 let project = Project::test(fs, cx);
5043 project.update(cx, |project, _| project.languages.add(language));
5044
5045 let worktree_id = project
5046 .update(cx, |project, cx| {
5047 project.find_or_create_local_worktree("/dir", true, cx)
5048 })
5049 .await
5050 .unwrap()
5051 .0
5052 .read_with(cx, |tree, _| tree.id());
5053
5054 let buffer = project
5055 .update(cx, |project, cx| {
5056 project.open_buffer((worktree_id, "a.rs"), cx)
5057 })
5058 .await
5059 .unwrap();
5060
5061 let mut fake_server = fake_servers.next().await.unwrap();
5062 let open_notification = fake_server
5063 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5064 .await;
5065
5066 // Edit the buffer, moving the content down
5067 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5068 let change_notification_1 = fake_server
5069 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5070 .await;
5071 assert!(
5072 change_notification_1.text_document.version > open_notification.text_document.version
5073 );
5074
5075 // Report some diagnostics for the initial version of the buffer
5076 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5077 lsp::PublishDiagnosticsParams {
5078 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5079 version: Some(open_notification.text_document.version),
5080 diagnostics: vec![
5081 lsp::Diagnostic {
5082 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5083 severity: Some(DiagnosticSeverity::ERROR),
5084 message: "undefined variable 'A'".to_string(),
5085 source: Some("disk".to_string()),
5086 ..Default::default()
5087 },
5088 lsp::Diagnostic {
5089 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5090 severity: Some(DiagnosticSeverity::ERROR),
5091 message: "undefined variable 'BB'".to_string(),
5092 source: Some("disk".to_string()),
5093 ..Default::default()
5094 },
5095 lsp::Diagnostic {
5096 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5097 severity: Some(DiagnosticSeverity::ERROR),
5098 source: Some("disk".to_string()),
5099 message: "undefined variable 'CCC'".to_string(),
5100 ..Default::default()
5101 },
5102 ],
5103 },
5104 );
5105
5106 // The diagnostics have moved down since they were created.
5107 buffer.next_notification(cx).await;
5108 buffer.read_with(cx, |buffer, _| {
5109 assert_eq!(
5110 buffer
5111 .snapshot()
5112 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5113 .collect::<Vec<_>>(),
5114 &[
5115 DiagnosticEntry {
5116 range: Point::new(3, 9)..Point::new(3, 11),
5117 diagnostic: Diagnostic {
5118 severity: DiagnosticSeverity::ERROR,
5119 message: "undefined variable 'BB'".to_string(),
5120 is_disk_based: true,
5121 group_id: 1,
5122 is_primary: true,
5123 ..Default::default()
5124 },
5125 },
5126 DiagnosticEntry {
5127 range: Point::new(4, 9)..Point::new(4, 12),
5128 diagnostic: Diagnostic {
5129 severity: DiagnosticSeverity::ERROR,
5130 message: "undefined variable 'CCC'".to_string(),
5131 is_disk_based: true,
5132 group_id: 2,
5133 is_primary: true,
5134 ..Default::default()
5135 }
5136 }
5137 ]
5138 );
5139 assert_eq!(
5140 chunks_with_diagnostics(buffer, 0..buffer.len()),
5141 [
5142 ("\n\nfn a() { ".to_string(), None),
5143 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5144 (" }\nfn b() { ".to_string(), None),
5145 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5146 (" }\nfn c() { ".to_string(), None),
5147 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5148 (" }\n".to_string(), None),
5149 ]
5150 );
5151 assert_eq!(
5152 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5153 [
5154 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5155 (" }\nfn c() { ".to_string(), None),
5156 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5157 ]
5158 );
5159 });
5160
5161 // Ensure overlapping diagnostics are highlighted correctly.
5162 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5163 lsp::PublishDiagnosticsParams {
5164 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5165 version: Some(open_notification.text_document.version),
5166 diagnostics: vec![
5167 lsp::Diagnostic {
5168 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5169 severity: Some(DiagnosticSeverity::ERROR),
5170 message: "undefined variable 'A'".to_string(),
5171 source: Some("disk".to_string()),
5172 ..Default::default()
5173 },
5174 lsp::Diagnostic {
5175 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5176 severity: Some(DiagnosticSeverity::WARNING),
5177 message: "unreachable statement".to_string(),
5178 source: Some("disk".to_string()),
5179 ..Default::default()
5180 },
5181 ],
5182 },
5183 );
5184
5185 buffer.next_notification(cx).await;
5186 buffer.read_with(cx, |buffer, _| {
5187 assert_eq!(
5188 buffer
5189 .snapshot()
5190 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5191 .collect::<Vec<_>>(),
5192 &[
5193 DiagnosticEntry {
5194 range: Point::new(2, 9)..Point::new(2, 12),
5195 diagnostic: Diagnostic {
5196 severity: DiagnosticSeverity::WARNING,
5197 message: "unreachable statement".to_string(),
5198 is_disk_based: true,
5199 group_id: 1,
5200 is_primary: true,
5201 ..Default::default()
5202 }
5203 },
5204 DiagnosticEntry {
5205 range: Point::new(2, 9)..Point::new(2, 10),
5206 diagnostic: Diagnostic {
5207 severity: DiagnosticSeverity::ERROR,
5208 message: "undefined variable 'A'".to_string(),
5209 is_disk_based: true,
5210 group_id: 0,
5211 is_primary: true,
5212 ..Default::default()
5213 },
5214 }
5215 ]
5216 );
5217 assert_eq!(
5218 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5219 [
5220 ("fn a() { ".to_string(), None),
5221 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5222 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5223 ("\n".to_string(), None),
5224 ]
5225 );
5226 assert_eq!(
5227 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5228 [
5229 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5230 ("\n".to_string(), None),
5231 ]
5232 );
5233 });
5234
5235 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5236 // changes since the last save.
5237 buffer.update(cx, |buffer, cx| {
5238 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5239 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5240 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5241 });
5242 let change_notification_2 = fake_server
5243 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5244 .await;
5245 assert!(
5246 change_notification_2.text_document.version
5247 > change_notification_1.text_document.version
5248 );
5249
5250 // Handle out-of-order diagnostics
5251 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5252 lsp::PublishDiagnosticsParams {
5253 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5254 version: Some(change_notification_2.text_document.version),
5255 diagnostics: vec![
5256 lsp::Diagnostic {
5257 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5258 severity: Some(DiagnosticSeverity::ERROR),
5259 message: "undefined variable 'BB'".to_string(),
5260 source: Some("disk".to_string()),
5261 ..Default::default()
5262 },
5263 lsp::Diagnostic {
5264 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5265 severity: Some(DiagnosticSeverity::WARNING),
5266 message: "undefined variable 'A'".to_string(),
5267 source: Some("disk".to_string()),
5268 ..Default::default()
5269 },
5270 ],
5271 },
5272 );
5273
5274 buffer.next_notification(cx).await;
5275 buffer.read_with(cx, |buffer, _| {
5276 assert_eq!(
5277 buffer
5278 .snapshot()
5279 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5280 .collect::<Vec<_>>(),
5281 &[
5282 DiagnosticEntry {
5283 range: Point::new(2, 21)..Point::new(2, 22),
5284 diagnostic: Diagnostic {
5285 severity: DiagnosticSeverity::WARNING,
5286 message: "undefined variable 'A'".to_string(),
5287 is_disk_based: true,
5288 group_id: 1,
5289 is_primary: true,
5290 ..Default::default()
5291 }
5292 },
5293 DiagnosticEntry {
5294 range: Point::new(3, 9)..Point::new(3, 14),
5295 diagnostic: Diagnostic {
5296 severity: DiagnosticSeverity::ERROR,
5297 message: "undefined variable 'BB'".to_string(),
5298 is_disk_based: true,
5299 group_id: 0,
5300 is_primary: true,
5301 ..Default::default()
5302 },
5303 }
5304 ]
5305 );
5306 });
5307 }
5308
5309 #[gpui::test]
5310 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5311 cx.foreground().forbid_parking();
5312
5313 let text = concat!(
5314 "let one = ;\n", //
5315 "let two = \n",
5316 "let three = 3;\n",
5317 );
5318
5319 let fs = FakeFs::new(cx.background());
5320 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5321
5322 let project = Project::test(fs, cx);
5323 let worktree_id = project
5324 .update(cx, |project, cx| {
5325 project.find_or_create_local_worktree("/dir", true, cx)
5326 })
5327 .await
5328 .unwrap()
5329 .0
5330 .read_with(cx, |tree, _| tree.id());
5331
5332 let buffer = project
5333 .update(cx, |project, cx| {
5334 project.open_buffer((worktree_id, "a.rs"), cx)
5335 })
5336 .await
5337 .unwrap();
5338
5339 project.update(cx, |project, cx| {
5340 project
5341 .update_buffer_diagnostics(
5342 &buffer,
5343 vec![
5344 DiagnosticEntry {
5345 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5346 diagnostic: Diagnostic {
5347 severity: DiagnosticSeverity::ERROR,
5348 message: "syntax error 1".to_string(),
5349 ..Default::default()
5350 },
5351 },
5352 DiagnosticEntry {
5353 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5354 diagnostic: Diagnostic {
5355 severity: DiagnosticSeverity::ERROR,
5356 message: "syntax error 2".to_string(),
5357 ..Default::default()
5358 },
5359 },
5360 ],
5361 None,
5362 cx,
5363 )
5364 .unwrap();
5365 });
5366
5367 // An empty range is extended forward to include the following character.
5368 // At the end of a line, an empty range is extended backward to include
5369 // the preceding character.
5370 buffer.read_with(cx, |buffer, _| {
5371 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5372 assert_eq!(
5373 chunks
5374 .iter()
5375 .map(|(s, d)| (s.as_str(), *d))
5376 .collect::<Vec<_>>(),
5377 &[
5378 ("let one = ", None),
5379 (";", Some(DiagnosticSeverity::ERROR)),
5380 ("\nlet two =", None),
5381 (" ", Some(DiagnosticSeverity::ERROR)),
5382 ("\nlet three = 3;\n", None)
5383 ]
5384 );
5385 });
5386 }
5387
5388 #[gpui::test]
5389 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5390 cx.foreground().forbid_parking();
5391
5392 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5393 let language = Arc::new(Language::new(
5394 LanguageConfig {
5395 name: "Rust".into(),
5396 path_suffixes: vec!["rs".to_string()],
5397 language_server: Some(lsp_config),
5398 ..Default::default()
5399 },
5400 Some(tree_sitter_rust::language()),
5401 ));
5402
5403 let text = "
5404 fn a() {
5405 f1();
5406 }
5407 fn b() {
5408 f2();
5409 }
5410 fn c() {
5411 f3();
5412 }
5413 "
5414 .unindent();
5415
5416 let fs = FakeFs::new(cx.background());
5417 fs.insert_tree(
5418 "/dir",
5419 json!({
5420 "a.rs": text.clone(),
5421 }),
5422 )
5423 .await;
5424
5425 let project = Project::test(fs, cx);
5426 project.update(cx, |project, _| project.languages.add(language));
5427
5428 let worktree_id = project
5429 .update(cx, |project, cx| {
5430 project.find_or_create_local_worktree("/dir", true, cx)
5431 })
5432 .await
5433 .unwrap()
5434 .0
5435 .read_with(cx, |tree, _| tree.id());
5436
5437 let buffer = project
5438 .update(cx, |project, cx| {
5439 project.open_buffer((worktree_id, "a.rs"), cx)
5440 })
5441 .await
5442 .unwrap();
5443
5444 let mut fake_server = fake_servers.next().await.unwrap();
5445 let lsp_document_version = fake_server
5446 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5447 .await
5448 .text_document
5449 .version;
5450
5451 // Simulate editing the buffer after the language server computes some edits.
5452 buffer.update(cx, |buffer, cx| {
5453 buffer.edit(
5454 [Point::new(0, 0)..Point::new(0, 0)],
5455 "// above first function\n",
5456 cx,
5457 );
5458 buffer.edit(
5459 [Point::new(2, 0)..Point::new(2, 0)],
5460 " // inside first function\n",
5461 cx,
5462 );
5463 buffer.edit(
5464 [Point::new(6, 4)..Point::new(6, 4)],
5465 "// inside second function ",
5466 cx,
5467 );
5468
5469 assert_eq!(
5470 buffer.text(),
5471 "
5472 // above first function
5473 fn a() {
5474 // inside first function
5475 f1();
5476 }
5477 fn b() {
5478 // inside second function f2();
5479 }
5480 fn c() {
5481 f3();
5482 }
5483 "
5484 .unindent()
5485 );
5486 });
5487
5488 let edits = project
5489 .update(cx, |project, cx| {
5490 project.edits_from_lsp(
5491 &buffer,
5492 vec![
5493 // replace body of first function
5494 lsp::TextEdit {
5495 range: lsp::Range::new(
5496 lsp::Position::new(0, 0),
5497 lsp::Position::new(3, 0),
5498 ),
5499 new_text: "
5500 fn a() {
5501 f10();
5502 }
5503 "
5504 .unindent(),
5505 },
5506 // edit inside second function
5507 lsp::TextEdit {
5508 range: lsp::Range::new(
5509 lsp::Position::new(4, 6),
5510 lsp::Position::new(4, 6),
5511 ),
5512 new_text: "00".into(),
5513 },
5514 // edit inside third function via two distinct edits
5515 lsp::TextEdit {
5516 range: lsp::Range::new(
5517 lsp::Position::new(7, 5),
5518 lsp::Position::new(7, 5),
5519 ),
5520 new_text: "4000".into(),
5521 },
5522 lsp::TextEdit {
5523 range: lsp::Range::new(
5524 lsp::Position::new(7, 5),
5525 lsp::Position::new(7, 6),
5526 ),
5527 new_text: "".into(),
5528 },
5529 ],
5530 Some(lsp_document_version),
5531 cx,
5532 )
5533 })
5534 .await
5535 .unwrap();
5536
5537 buffer.update(cx, |buffer, cx| {
5538 for (range, new_text) in edits {
5539 buffer.edit([range], new_text, cx);
5540 }
5541 assert_eq!(
5542 buffer.text(),
5543 "
5544 // above first function
5545 fn a() {
5546 // inside first function
5547 f10();
5548 }
5549 fn b() {
5550 // inside second function f200();
5551 }
5552 fn c() {
5553 f4000();
5554 }
5555 "
5556 .unindent()
5557 );
5558 });
5559 }
5560
5561 #[gpui::test]
5562 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5563 cx.foreground().forbid_parking();
5564
5565 let text = "
5566 use a::b;
5567 use a::c;
5568
5569 fn f() {
5570 b();
5571 c();
5572 }
5573 "
5574 .unindent();
5575
5576 let fs = FakeFs::new(cx.background());
5577 fs.insert_tree(
5578 "/dir",
5579 json!({
5580 "a.rs": text.clone(),
5581 }),
5582 )
5583 .await;
5584
5585 let project = Project::test(fs, cx);
5586 let worktree_id = project
5587 .update(cx, |project, cx| {
5588 project.find_or_create_local_worktree("/dir", true, cx)
5589 })
5590 .await
5591 .unwrap()
5592 .0
5593 .read_with(cx, |tree, _| tree.id());
5594
5595 let buffer = project
5596 .update(cx, |project, cx| {
5597 project.open_buffer((worktree_id, "a.rs"), cx)
5598 })
5599 .await
5600 .unwrap();
5601
5602 // Simulate the language server sending us a small edit in the form of a very large diff.
5603 // Rust-analyzer does this when performing a merge-imports code action.
5604 let edits = project
5605 .update(cx, |project, cx| {
5606 project.edits_from_lsp(
5607 &buffer,
5608 [
5609 // Replace the first use statement without editing the semicolon.
5610 lsp::TextEdit {
5611 range: lsp::Range::new(
5612 lsp::Position::new(0, 4),
5613 lsp::Position::new(0, 8),
5614 ),
5615 new_text: "a::{b, c}".into(),
5616 },
5617 // Reinsert the remainder of the file between the semicolon and the final
5618 // newline of the file.
5619 lsp::TextEdit {
5620 range: lsp::Range::new(
5621 lsp::Position::new(0, 9),
5622 lsp::Position::new(0, 9),
5623 ),
5624 new_text: "\n\n".into(),
5625 },
5626 lsp::TextEdit {
5627 range: lsp::Range::new(
5628 lsp::Position::new(0, 9),
5629 lsp::Position::new(0, 9),
5630 ),
5631 new_text: "
5632 fn f() {
5633 b();
5634 c();
5635 }"
5636 .unindent(),
5637 },
5638 // Delete everything after the first newline of the file.
5639 lsp::TextEdit {
5640 range: lsp::Range::new(
5641 lsp::Position::new(1, 0),
5642 lsp::Position::new(7, 0),
5643 ),
5644 new_text: "".into(),
5645 },
5646 ],
5647 None,
5648 cx,
5649 )
5650 })
5651 .await
5652 .unwrap();
5653
5654 buffer.update(cx, |buffer, cx| {
5655 let edits = edits
5656 .into_iter()
5657 .map(|(range, text)| {
5658 (
5659 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5660 text,
5661 )
5662 })
5663 .collect::<Vec<_>>();
5664
5665 assert_eq!(
5666 edits,
5667 [
5668 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5669 (Point::new(1, 0)..Point::new(2, 0), "".into())
5670 ]
5671 );
5672
5673 for (range, new_text) in edits {
5674 buffer.edit([range], new_text, cx);
5675 }
5676 assert_eq!(
5677 buffer.text(),
5678 "
5679 use a::{b, c};
5680
5681 fn f() {
5682 b();
5683 c();
5684 }
5685 "
5686 .unindent()
5687 );
5688 });
5689 }
5690
5691 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5692 buffer: &Buffer,
5693 range: Range<T>,
5694 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5695 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5696 for chunk in buffer.snapshot().chunks(range, true) {
5697 if chunks.last().map_or(false, |prev_chunk| {
5698 prev_chunk.1 == chunk.diagnostic_severity
5699 }) {
5700 chunks.last_mut().unwrap().0.push_str(chunk.text);
5701 } else {
5702 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5703 }
5704 }
5705 chunks
5706 }
5707
5708 #[gpui::test]
5709 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5710 let dir = temp_tree(json!({
5711 "root": {
5712 "dir1": {},
5713 "dir2": {
5714 "dir3": {}
5715 }
5716 }
5717 }));
5718
5719 let project = Project::test(Arc::new(RealFs), cx);
5720 let (tree, _) = project
5721 .update(cx, |project, cx| {
5722 project.find_or_create_local_worktree(&dir.path(), true, cx)
5723 })
5724 .await
5725 .unwrap();
5726
5727 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5728 .await;
5729
5730 let cancel_flag = Default::default();
5731 let results = project
5732 .read_with(cx, |project, cx| {
5733 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5734 })
5735 .await;
5736
5737 assert!(results.is_empty());
5738 }
5739
5740 #[gpui::test]
5741 async fn test_definition(cx: &mut gpui::TestAppContext) {
5742 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5743 let language = Arc::new(Language::new(
5744 LanguageConfig {
5745 name: "Rust".into(),
5746 path_suffixes: vec!["rs".to_string()],
5747 language_server: Some(language_server_config),
5748 ..Default::default()
5749 },
5750 Some(tree_sitter_rust::language()),
5751 ));
5752
5753 let fs = FakeFs::new(cx.background());
5754 fs.insert_tree(
5755 "/dir",
5756 json!({
5757 "a.rs": "const fn a() { A }",
5758 "b.rs": "const y: i32 = crate::a()",
5759 }),
5760 )
5761 .await;
5762
5763 let project = Project::test(fs, cx);
5764 project.update(cx, |project, _| {
5765 Arc::get_mut(&mut project.languages).unwrap().add(language);
5766 });
5767
5768 let (tree, _) = project
5769 .update(cx, |project, cx| {
5770 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5771 })
5772 .await
5773 .unwrap();
5774 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5775 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5776 .await;
5777
5778 let buffer = project
5779 .update(cx, |project, cx| {
5780 project.open_buffer(
5781 ProjectPath {
5782 worktree_id,
5783 path: Path::new("").into(),
5784 },
5785 cx,
5786 )
5787 })
5788 .await
5789 .unwrap();
5790
5791 let mut fake_server = fake_servers.next().await.unwrap();
5792 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5793 let params = params.text_document_position_params;
5794 assert_eq!(
5795 params.text_document.uri.to_file_path().unwrap(),
5796 Path::new("/dir/b.rs"),
5797 );
5798 assert_eq!(params.position, lsp::Position::new(0, 22));
5799
5800 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5801 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5802 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5803 )))
5804 });
5805
5806 let mut definitions = project
5807 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5808 .await
5809 .unwrap();
5810
5811 assert_eq!(definitions.len(), 1);
5812 let definition = definitions.pop().unwrap();
5813 cx.update(|cx| {
5814 let target_buffer = definition.buffer.read(cx);
5815 assert_eq!(
5816 target_buffer
5817 .file()
5818 .unwrap()
5819 .as_local()
5820 .unwrap()
5821 .abs_path(cx),
5822 Path::new("/dir/a.rs"),
5823 );
5824 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5825 assert_eq!(
5826 list_worktrees(&project, cx),
5827 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5828 );
5829
5830 drop(definition);
5831 });
5832 cx.read(|cx| {
5833 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5834 });
5835
5836 fn list_worktrees<'a>(
5837 project: &'a ModelHandle<Project>,
5838 cx: &'a AppContext,
5839 ) -> Vec<(&'a Path, bool)> {
5840 project
5841 .read(cx)
5842 .worktrees(cx)
5843 .map(|worktree| {
5844 let worktree = worktree.read(cx);
5845 (
5846 worktree.as_local().unwrap().abs_path().as_ref(),
5847 worktree.is_visible(),
5848 )
5849 })
5850 .collect::<Vec<_>>()
5851 }
5852 }
5853
5854 #[gpui::test]
5855 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5856 let fs = FakeFs::new(cx.background());
5857 fs.insert_tree(
5858 "/dir",
5859 json!({
5860 "file1": "the old contents",
5861 }),
5862 )
5863 .await;
5864
5865 let project = Project::test(fs.clone(), cx);
5866 let worktree_id = project
5867 .update(cx, |p, cx| {
5868 p.find_or_create_local_worktree("/dir", true, cx)
5869 })
5870 .await
5871 .unwrap()
5872 .0
5873 .read_with(cx, |tree, _| tree.id());
5874
5875 let buffer = project
5876 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5877 .await
5878 .unwrap();
5879 buffer
5880 .update(cx, |buffer, cx| {
5881 assert_eq!(buffer.text(), "the old contents");
5882 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5883 buffer.save(cx)
5884 })
5885 .await
5886 .unwrap();
5887
5888 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5889 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5890 }
5891
5892 #[gpui::test]
5893 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5894 let fs = FakeFs::new(cx.background());
5895 fs.insert_tree(
5896 "/dir",
5897 json!({
5898 "file1": "the old contents",
5899 }),
5900 )
5901 .await;
5902
5903 let project = Project::test(fs.clone(), cx);
5904 let worktree_id = project
5905 .update(cx, |p, cx| {
5906 p.find_or_create_local_worktree("/dir/file1", true, cx)
5907 })
5908 .await
5909 .unwrap()
5910 .0
5911 .read_with(cx, |tree, _| tree.id());
5912
5913 let buffer = project
5914 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5915 .await
5916 .unwrap();
5917 buffer
5918 .update(cx, |buffer, cx| {
5919 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5920 buffer.save(cx)
5921 })
5922 .await
5923 .unwrap();
5924
5925 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5926 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5927 }
5928
5929 #[gpui::test]
5930 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5931 let fs = FakeFs::new(cx.background());
5932 fs.insert_tree("/dir", json!({})).await;
5933
5934 let project = Project::test(fs.clone(), cx);
5935 let (worktree, _) = project
5936 .update(cx, |project, cx| {
5937 project.find_or_create_local_worktree("/dir", true, cx)
5938 })
5939 .await
5940 .unwrap();
5941 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5942
5943 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5944 buffer.update(cx, |buffer, cx| {
5945 buffer.edit([0..0], "abc", cx);
5946 assert!(buffer.is_dirty());
5947 assert!(!buffer.has_conflict());
5948 });
5949 project
5950 .update(cx, |project, cx| {
5951 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5952 })
5953 .await
5954 .unwrap();
5955 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5956 buffer.read_with(cx, |buffer, cx| {
5957 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5958 assert!(!buffer.is_dirty());
5959 assert!(!buffer.has_conflict());
5960 });
5961
5962 let opened_buffer = project
5963 .update(cx, |project, cx| {
5964 project.open_buffer((worktree_id, "file1"), cx)
5965 })
5966 .await
5967 .unwrap();
5968 assert_eq!(opened_buffer, buffer);
5969 }
5970
5971 #[gpui::test(retries = 5)]
5972 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5973 let dir = temp_tree(json!({
5974 "a": {
5975 "file1": "",
5976 "file2": "",
5977 "file3": "",
5978 },
5979 "b": {
5980 "c": {
5981 "file4": "",
5982 "file5": "",
5983 }
5984 }
5985 }));
5986
5987 let project = Project::test(Arc::new(RealFs), cx);
5988 let rpc = project.read_with(cx, |p, _| p.client.clone());
5989
5990 let (tree, _) = project
5991 .update(cx, |p, cx| {
5992 p.find_or_create_local_worktree(dir.path(), true, cx)
5993 })
5994 .await
5995 .unwrap();
5996 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5997
5998 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5999 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6000 async move { buffer.await.unwrap() }
6001 };
6002 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6003 tree.read_with(cx, |tree, _| {
6004 tree.entry_for_path(path)
6005 .expect(&format!("no entry for path {}", path))
6006 .id
6007 })
6008 };
6009
6010 let buffer2 = buffer_for_path("a/file2", cx).await;
6011 let buffer3 = buffer_for_path("a/file3", cx).await;
6012 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6013 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6014
6015 let file2_id = id_for_path("a/file2", &cx);
6016 let file3_id = id_for_path("a/file3", &cx);
6017 let file4_id = id_for_path("b/c/file4", &cx);
6018
6019 // Wait for the initial scan.
6020 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6021 .await;
6022
6023 // Create a remote copy of this worktree.
6024 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6025 let (remote, load_task) = cx.update(|cx| {
6026 Worktree::remote(
6027 1,
6028 1,
6029 initial_snapshot.to_proto(&Default::default(), true),
6030 rpc.clone(),
6031 cx,
6032 )
6033 });
6034 load_task.await;
6035
6036 cx.read(|cx| {
6037 assert!(!buffer2.read(cx).is_dirty());
6038 assert!(!buffer3.read(cx).is_dirty());
6039 assert!(!buffer4.read(cx).is_dirty());
6040 assert!(!buffer5.read(cx).is_dirty());
6041 });
6042
6043 // Rename and delete files and directories.
6044 tree.flush_fs_events(&cx).await;
6045 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6046 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6047 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6048 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6049 tree.flush_fs_events(&cx).await;
6050
6051 let expected_paths = vec![
6052 "a",
6053 "a/file1",
6054 "a/file2.new",
6055 "b",
6056 "d",
6057 "d/file3",
6058 "d/file4",
6059 ];
6060
6061 cx.read(|app| {
6062 assert_eq!(
6063 tree.read(app)
6064 .paths()
6065 .map(|p| p.to_str().unwrap())
6066 .collect::<Vec<_>>(),
6067 expected_paths
6068 );
6069
6070 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6071 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6072 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6073
6074 assert_eq!(
6075 buffer2.read(app).file().unwrap().path().as_ref(),
6076 Path::new("a/file2.new")
6077 );
6078 assert_eq!(
6079 buffer3.read(app).file().unwrap().path().as_ref(),
6080 Path::new("d/file3")
6081 );
6082 assert_eq!(
6083 buffer4.read(app).file().unwrap().path().as_ref(),
6084 Path::new("d/file4")
6085 );
6086 assert_eq!(
6087 buffer5.read(app).file().unwrap().path().as_ref(),
6088 Path::new("b/c/file5")
6089 );
6090
6091 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6092 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6093 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6094 assert!(buffer5.read(app).file().unwrap().is_deleted());
6095 });
6096
6097 // Update the remote worktree. Check that it becomes consistent with the
6098 // local worktree.
6099 remote.update(cx, |remote, cx| {
6100 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6101 &initial_snapshot,
6102 1,
6103 1,
6104 true,
6105 );
6106 remote
6107 .as_remote_mut()
6108 .unwrap()
6109 .snapshot
6110 .apply_remote_update(update_message)
6111 .unwrap();
6112
6113 assert_eq!(
6114 remote
6115 .paths()
6116 .map(|p| p.to_str().unwrap())
6117 .collect::<Vec<_>>(),
6118 expected_paths
6119 );
6120 });
6121 }
6122
6123 #[gpui::test]
6124 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6125 let fs = FakeFs::new(cx.background());
6126 fs.insert_tree(
6127 "/the-dir",
6128 json!({
6129 "a.txt": "a-contents",
6130 "b.txt": "b-contents",
6131 }),
6132 )
6133 .await;
6134
6135 let project = Project::test(fs.clone(), cx);
6136 let worktree_id = project
6137 .update(cx, |p, cx| {
6138 p.find_or_create_local_worktree("/the-dir", true, cx)
6139 })
6140 .await
6141 .unwrap()
6142 .0
6143 .read_with(cx, |tree, _| tree.id());
6144
6145 // Spawn multiple tasks to open paths, repeating some paths.
6146 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6147 (
6148 p.open_buffer((worktree_id, "a.txt"), cx),
6149 p.open_buffer((worktree_id, "b.txt"), cx),
6150 p.open_buffer((worktree_id, "a.txt"), cx),
6151 )
6152 });
6153
6154 let buffer_a_1 = buffer_a_1.await.unwrap();
6155 let buffer_a_2 = buffer_a_2.await.unwrap();
6156 let buffer_b = buffer_b.await.unwrap();
6157 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6158 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6159
6160 // There is only one buffer per path.
6161 let buffer_a_id = buffer_a_1.id();
6162 assert_eq!(buffer_a_2.id(), buffer_a_id);
6163
6164 // Open the same path again while it is still open.
6165 drop(buffer_a_1);
6166 let buffer_a_3 = project
6167 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6168 .await
6169 .unwrap();
6170
6171 // There's still only one buffer per path.
6172 assert_eq!(buffer_a_3.id(), buffer_a_id);
6173 }
6174
6175 #[gpui::test]
6176 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6177 use std::fs;
6178
6179 let dir = temp_tree(json!({
6180 "file1": "abc",
6181 "file2": "def",
6182 "file3": "ghi",
6183 }));
6184
6185 let project = Project::test(Arc::new(RealFs), cx);
6186 let (worktree, _) = project
6187 .update(cx, |p, cx| {
6188 p.find_or_create_local_worktree(dir.path(), true, cx)
6189 })
6190 .await
6191 .unwrap();
6192 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6193
6194 worktree.flush_fs_events(&cx).await;
6195 worktree
6196 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6197 .await;
6198
6199 let buffer1 = project
6200 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6201 .await
6202 .unwrap();
6203 let events = Rc::new(RefCell::new(Vec::new()));
6204
6205 // initially, the buffer isn't dirty.
6206 buffer1.update(cx, |buffer, cx| {
6207 cx.subscribe(&buffer1, {
6208 let events = events.clone();
6209 move |_, _, event, _| match event {
6210 BufferEvent::Operation(_) => {}
6211 _ => events.borrow_mut().push(event.clone()),
6212 }
6213 })
6214 .detach();
6215
6216 assert!(!buffer.is_dirty());
6217 assert!(events.borrow().is_empty());
6218
6219 buffer.edit(vec![1..2], "", cx);
6220 });
6221
6222 // after the first edit, the buffer is dirty, and emits a dirtied event.
6223 buffer1.update(cx, |buffer, cx| {
6224 assert!(buffer.text() == "ac");
6225 assert!(buffer.is_dirty());
6226 assert_eq!(
6227 *events.borrow(),
6228 &[language::Event::Edited, language::Event::Dirtied]
6229 );
6230 events.borrow_mut().clear();
6231 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6232 });
6233
6234 // after saving, the buffer is not dirty, and emits a saved event.
6235 buffer1.update(cx, |buffer, cx| {
6236 assert!(!buffer.is_dirty());
6237 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6238 events.borrow_mut().clear();
6239
6240 buffer.edit(vec![1..1], "B", cx);
6241 buffer.edit(vec![2..2], "D", cx);
6242 });
6243
6244 // after editing again, the buffer is dirty, and emits another dirty event.
6245 buffer1.update(cx, |buffer, cx| {
6246 assert!(buffer.text() == "aBDc");
6247 assert!(buffer.is_dirty());
6248 assert_eq!(
6249 *events.borrow(),
6250 &[
6251 language::Event::Edited,
6252 language::Event::Dirtied,
6253 language::Event::Edited,
6254 ],
6255 );
6256 events.borrow_mut().clear();
6257
6258 // TODO - currently, after restoring the buffer to its
6259 // previously-saved state, the is still considered dirty.
6260 buffer.edit([1..3], "", cx);
6261 assert!(buffer.text() == "ac");
6262 assert!(buffer.is_dirty());
6263 });
6264
6265 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6266
6267 // When a file is deleted, the buffer is considered dirty.
6268 let events = Rc::new(RefCell::new(Vec::new()));
6269 let buffer2 = project
6270 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6271 .await
6272 .unwrap();
6273 buffer2.update(cx, |_, cx| {
6274 cx.subscribe(&buffer2, {
6275 let events = events.clone();
6276 move |_, _, event, _| events.borrow_mut().push(event.clone())
6277 })
6278 .detach();
6279 });
6280
6281 fs::remove_file(dir.path().join("file2")).unwrap();
6282 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6283 assert_eq!(
6284 *events.borrow(),
6285 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6286 );
6287
6288 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6289 let events = Rc::new(RefCell::new(Vec::new()));
6290 let buffer3 = project
6291 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6292 .await
6293 .unwrap();
6294 buffer3.update(cx, |_, cx| {
6295 cx.subscribe(&buffer3, {
6296 let events = events.clone();
6297 move |_, _, event, _| events.borrow_mut().push(event.clone())
6298 })
6299 .detach();
6300 });
6301
6302 worktree.flush_fs_events(&cx).await;
6303 buffer3.update(cx, |buffer, cx| {
6304 buffer.edit(Some(0..0), "x", cx);
6305 });
6306 events.borrow_mut().clear();
6307 fs::remove_file(dir.path().join("file3")).unwrap();
6308 buffer3
6309 .condition(&cx, |_, _| !events.borrow().is_empty())
6310 .await;
6311 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6312 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6313 }
6314
6315 #[gpui::test]
6316 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6317 use std::fs;
6318
6319 let initial_contents = "aaa\nbbbbb\nc\n";
6320 let dir = temp_tree(json!({ "the-file": initial_contents }));
6321
6322 let project = Project::test(Arc::new(RealFs), cx);
6323 let (worktree, _) = project
6324 .update(cx, |p, cx| {
6325 p.find_or_create_local_worktree(dir.path(), true, cx)
6326 })
6327 .await
6328 .unwrap();
6329 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6330
6331 worktree
6332 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6333 .await;
6334
6335 let abs_path = dir.path().join("the-file");
6336 let buffer = project
6337 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6338 .await
6339 .unwrap();
6340
6341 // TODO
6342 // Add a cursor on each row.
6343 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6344 // assert!(!buffer.is_dirty());
6345 // buffer.add_selection_set(
6346 // &(0..3)
6347 // .map(|row| Selection {
6348 // id: row as usize,
6349 // start: Point::new(row, 1),
6350 // end: Point::new(row, 1),
6351 // reversed: false,
6352 // goal: SelectionGoal::None,
6353 // })
6354 // .collect::<Vec<_>>(),
6355 // cx,
6356 // )
6357 // });
6358
6359 // Change the file on disk, adding two new lines of text, and removing
6360 // one line.
6361 buffer.read_with(cx, |buffer, _| {
6362 assert!(!buffer.is_dirty());
6363 assert!(!buffer.has_conflict());
6364 });
6365 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6366 fs::write(&abs_path, new_contents).unwrap();
6367
6368 // Because the buffer was not modified, it is reloaded from disk. Its
6369 // contents are edited according to the diff between the old and new
6370 // file contents.
6371 buffer
6372 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6373 .await;
6374
6375 buffer.update(cx, |buffer, _| {
6376 assert_eq!(buffer.text(), new_contents);
6377 assert!(!buffer.is_dirty());
6378 assert!(!buffer.has_conflict());
6379
6380 // TODO
6381 // let cursor_positions = buffer
6382 // .selection_set(selection_set_id)
6383 // .unwrap()
6384 // .selections::<Point>(&*buffer)
6385 // .map(|selection| {
6386 // assert_eq!(selection.start, selection.end);
6387 // selection.start
6388 // })
6389 // .collect::<Vec<_>>();
6390 // assert_eq!(
6391 // cursor_positions,
6392 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6393 // );
6394 });
6395
6396 // Modify the buffer
6397 buffer.update(cx, |buffer, cx| {
6398 buffer.edit(vec![0..0], " ", cx);
6399 assert!(buffer.is_dirty());
6400 assert!(!buffer.has_conflict());
6401 });
6402
6403 // Change the file on disk again, adding blank lines to the beginning.
6404 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6405
6406 // Because the buffer is modified, it doesn't reload from disk, but is
6407 // marked as having a conflict.
6408 buffer
6409 .condition(&cx, |buffer, _| buffer.has_conflict())
6410 .await;
6411 }
6412
6413 #[gpui::test]
6414 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6415 cx.foreground().forbid_parking();
6416
6417 let fs = FakeFs::new(cx.background());
6418 fs.insert_tree(
6419 "/the-dir",
6420 json!({
6421 "a.rs": "
6422 fn foo(mut v: Vec<usize>) {
6423 for x in &v {
6424 v.push(1);
6425 }
6426 }
6427 "
6428 .unindent(),
6429 }),
6430 )
6431 .await;
6432
6433 let project = Project::test(fs.clone(), cx);
6434 let (worktree, _) = project
6435 .update(cx, |p, cx| {
6436 p.find_or_create_local_worktree("/the-dir", true, cx)
6437 })
6438 .await
6439 .unwrap();
6440 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6441
6442 let buffer = project
6443 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6444 .await
6445 .unwrap();
6446
6447 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6448 let message = lsp::PublishDiagnosticsParams {
6449 uri: buffer_uri.clone(),
6450 diagnostics: vec![
6451 lsp::Diagnostic {
6452 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6453 severity: Some(DiagnosticSeverity::WARNING),
6454 message: "error 1".to_string(),
6455 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6456 location: lsp::Location {
6457 uri: buffer_uri.clone(),
6458 range: lsp::Range::new(
6459 lsp::Position::new(1, 8),
6460 lsp::Position::new(1, 9),
6461 ),
6462 },
6463 message: "error 1 hint 1".to_string(),
6464 }]),
6465 ..Default::default()
6466 },
6467 lsp::Diagnostic {
6468 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6469 severity: Some(DiagnosticSeverity::HINT),
6470 message: "error 1 hint 1".to_string(),
6471 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6472 location: lsp::Location {
6473 uri: buffer_uri.clone(),
6474 range: lsp::Range::new(
6475 lsp::Position::new(1, 8),
6476 lsp::Position::new(1, 9),
6477 ),
6478 },
6479 message: "original diagnostic".to_string(),
6480 }]),
6481 ..Default::default()
6482 },
6483 lsp::Diagnostic {
6484 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6485 severity: Some(DiagnosticSeverity::ERROR),
6486 message: "error 2".to_string(),
6487 related_information: Some(vec![
6488 lsp::DiagnosticRelatedInformation {
6489 location: lsp::Location {
6490 uri: buffer_uri.clone(),
6491 range: lsp::Range::new(
6492 lsp::Position::new(1, 13),
6493 lsp::Position::new(1, 15),
6494 ),
6495 },
6496 message: "error 2 hint 1".to_string(),
6497 },
6498 lsp::DiagnosticRelatedInformation {
6499 location: lsp::Location {
6500 uri: buffer_uri.clone(),
6501 range: lsp::Range::new(
6502 lsp::Position::new(1, 13),
6503 lsp::Position::new(1, 15),
6504 ),
6505 },
6506 message: "error 2 hint 2".to_string(),
6507 },
6508 ]),
6509 ..Default::default()
6510 },
6511 lsp::Diagnostic {
6512 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6513 severity: Some(DiagnosticSeverity::HINT),
6514 message: "error 2 hint 1".to_string(),
6515 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6516 location: lsp::Location {
6517 uri: buffer_uri.clone(),
6518 range: lsp::Range::new(
6519 lsp::Position::new(2, 8),
6520 lsp::Position::new(2, 17),
6521 ),
6522 },
6523 message: "original diagnostic".to_string(),
6524 }]),
6525 ..Default::default()
6526 },
6527 lsp::Diagnostic {
6528 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6529 severity: Some(DiagnosticSeverity::HINT),
6530 message: "error 2 hint 2".to_string(),
6531 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6532 location: lsp::Location {
6533 uri: buffer_uri.clone(),
6534 range: lsp::Range::new(
6535 lsp::Position::new(2, 8),
6536 lsp::Position::new(2, 17),
6537 ),
6538 },
6539 message: "original diagnostic".to_string(),
6540 }]),
6541 ..Default::default()
6542 },
6543 ],
6544 version: None,
6545 };
6546
6547 project
6548 .update(cx, |p, cx| {
6549 p.update_diagnostics(message, &Default::default(), cx)
6550 })
6551 .unwrap();
6552 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6553
6554 assert_eq!(
6555 buffer
6556 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6557 .collect::<Vec<_>>(),
6558 &[
6559 DiagnosticEntry {
6560 range: Point::new(1, 8)..Point::new(1, 9),
6561 diagnostic: Diagnostic {
6562 severity: DiagnosticSeverity::WARNING,
6563 message: "error 1".to_string(),
6564 group_id: 0,
6565 is_primary: true,
6566 ..Default::default()
6567 }
6568 },
6569 DiagnosticEntry {
6570 range: Point::new(1, 8)..Point::new(1, 9),
6571 diagnostic: Diagnostic {
6572 severity: DiagnosticSeverity::HINT,
6573 message: "error 1 hint 1".to_string(),
6574 group_id: 0,
6575 is_primary: false,
6576 ..Default::default()
6577 }
6578 },
6579 DiagnosticEntry {
6580 range: Point::new(1, 13)..Point::new(1, 15),
6581 diagnostic: Diagnostic {
6582 severity: DiagnosticSeverity::HINT,
6583 message: "error 2 hint 1".to_string(),
6584 group_id: 1,
6585 is_primary: false,
6586 ..Default::default()
6587 }
6588 },
6589 DiagnosticEntry {
6590 range: Point::new(1, 13)..Point::new(1, 15),
6591 diagnostic: Diagnostic {
6592 severity: DiagnosticSeverity::HINT,
6593 message: "error 2 hint 2".to_string(),
6594 group_id: 1,
6595 is_primary: false,
6596 ..Default::default()
6597 }
6598 },
6599 DiagnosticEntry {
6600 range: Point::new(2, 8)..Point::new(2, 17),
6601 diagnostic: Diagnostic {
6602 severity: DiagnosticSeverity::ERROR,
6603 message: "error 2".to_string(),
6604 group_id: 1,
6605 is_primary: true,
6606 ..Default::default()
6607 }
6608 }
6609 ]
6610 );
6611
6612 assert_eq!(
6613 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6614 &[
6615 DiagnosticEntry {
6616 range: Point::new(1, 8)..Point::new(1, 9),
6617 diagnostic: Diagnostic {
6618 severity: DiagnosticSeverity::WARNING,
6619 message: "error 1".to_string(),
6620 group_id: 0,
6621 is_primary: true,
6622 ..Default::default()
6623 }
6624 },
6625 DiagnosticEntry {
6626 range: Point::new(1, 8)..Point::new(1, 9),
6627 diagnostic: Diagnostic {
6628 severity: DiagnosticSeverity::HINT,
6629 message: "error 1 hint 1".to_string(),
6630 group_id: 0,
6631 is_primary: false,
6632 ..Default::default()
6633 }
6634 },
6635 ]
6636 );
6637 assert_eq!(
6638 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6639 &[
6640 DiagnosticEntry {
6641 range: Point::new(1, 13)..Point::new(1, 15),
6642 diagnostic: Diagnostic {
6643 severity: DiagnosticSeverity::HINT,
6644 message: "error 2 hint 1".to_string(),
6645 group_id: 1,
6646 is_primary: false,
6647 ..Default::default()
6648 }
6649 },
6650 DiagnosticEntry {
6651 range: Point::new(1, 13)..Point::new(1, 15),
6652 diagnostic: Diagnostic {
6653 severity: DiagnosticSeverity::HINT,
6654 message: "error 2 hint 2".to_string(),
6655 group_id: 1,
6656 is_primary: false,
6657 ..Default::default()
6658 }
6659 },
6660 DiagnosticEntry {
6661 range: Point::new(2, 8)..Point::new(2, 17),
6662 diagnostic: Diagnostic {
6663 severity: DiagnosticSeverity::ERROR,
6664 message: "error 2".to_string(),
6665 group_id: 1,
6666 is_primary: true,
6667 ..Default::default()
6668 }
6669 }
6670 ]
6671 );
6672 }
6673
6674 #[gpui::test]
6675 async fn test_rename(cx: &mut gpui::TestAppContext) {
6676 cx.foreground().forbid_parking();
6677
6678 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6679 let language = Arc::new(Language::new(
6680 LanguageConfig {
6681 name: "Rust".into(),
6682 path_suffixes: vec!["rs".to_string()],
6683 language_server: Some(language_server_config),
6684 ..Default::default()
6685 },
6686 Some(tree_sitter_rust::language()),
6687 ));
6688
6689 let fs = FakeFs::new(cx.background());
6690 fs.insert_tree(
6691 "/dir",
6692 json!({
6693 "one.rs": "const ONE: usize = 1;",
6694 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6695 }),
6696 )
6697 .await;
6698
6699 let project = Project::test(fs.clone(), cx);
6700 project.update(cx, |project, _| {
6701 Arc::get_mut(&mut project.languages).unwrap().add(language);
6702 });
6703
6704 let (tree, _) = project
6705 .update(cx, |project, cx| {
6706 project.find_or_create_local_worktree("/dir", true, cx)
6707 })
6708 .await
6709 .unwrap();
6710 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6711 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6712 .await;
6713
6714 let buffer = project
6715 .update(cx, |project, cx| {
6716 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6717 })
6718 .await
6719 .unwrap();
6720
6721 let mut fake_server = fake_servers.next().await.unwrap();
6722
6723 let response = project.update(cx, |project, cx| {
6724 project.prepare_rename(buffer.clone(), 7, cx)
6725 });
6726 fake_server
6727 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6728 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6729 assert_eq!(params.position, lsp::Position::new(0, 7));
6730 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6731 lsp::Position::new(0, 6),
6732 lsp::Position::new(0, 9),
6733 )))
6734 })
6735 .next()
6736 .await
6737 .unwrap();
6738 let range = response.await.unwrap().unwrap();
6739 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6740 assert_eq!(range, 6..9);
6741
6742 let response = project.update(cx, |project, cx| {
6743 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6744 });
6745 fake_server
6746 .handle_request::<lsp::request::Rename, _>(|params, _| {
6747 assert_eq!(
6748 params.text_document_position.text_document.uri.as_str(),
6749 "file:///dir/one.rs"
6750 );
6751 assert_eq!(
6752 params.text_document_position.position,
6753 lsp::Position::new(0, 7)
6754 );
6755 assert_eq!(params.new_name, "THREE");
6756 Some(lsp::WorkspaceEdit {
6757 changes: Some(
6758 [
6759 (
6760 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6761 vec![lsp::TextEdit::new(
6762 lsp::Range::new(
6763 lsp::Position::new(0, 6),
6764 lsp::Position::new(0, 9),
6765 ),
6766 "THREE".to_string(),
6767 )],
6768 ),
6769 (
6770 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6771 vec![
6772 lsp::TextEdit::new(
6773 lsp::Range::new(
6774 lsp::Position::new(0, 24),
6775 lsp::Position::new(0, 27),
6776 ),
6777 "THREE".to_string(),
6778 ),
6779 lsp::TextEdit::new(
6780 lsp::Range::new(
6781 lsp::Position::new(0, 35),
6782 lsp::Position::new(0, 38),
6783 ),
6784 "THREE".to_string(),
6785 ),
6786 ],
6787 ),
6788 ]
6789 .into_iter()
6790 .collect(),
6791 ),
6792 ..Default::default()
6793 })
6794 })
6795 .next()
6796 .await
6797 .unwrap();
6798 let mut transaction = response.await.unwrap().0;
6799 assert_eq!(transaction.len(), 2);
6800 assert_eq!(
6801 transaction
6802 .remove_entry(&buffer)
6803 .unwrap()
6804 .0
6805 .read_with(cx, |buffer, _| buffer.text()),
6806 "const THREE: usize = 1;"
6807 );
6808 assert_eq!(
6809 transaction
6810 .into_keys()
6811 .next()
6812 .unwrap()
6813 .read_with(cx, |buffer, _| buffer.text()),
6814 "const TWO: usize = one::THREE + one::THREE;"
6815 );
6816 }
6817
6818 #[gpui::test]
6819 async fn test_search(cx: &mut gpui::TestAppContext) {
6820 let fs = FakeFs::new(cx.background());
6821 fs.insert_tree(
6822 "/dir",
6823 json!({
6824 "one.rs": "const ONE: usize = 1;",
6825 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6826 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6827 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6828 }),
6829 )
6830 .await;
6831 let project = Project::test(fs.clone(), cx);
6832 let (tree, _) = project
6833 .update(cx, |project, cx| {
6834 project.find_or_create_local_worktree("/dir", true, cx)
6835 })
6836 .await
6837 .unwrap();
6838 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6839 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6840 .await;
6841
6842 assert_eq!(
6843 search(&project, SearchQuery::text("TWO", false, true), cx)
6844 .await
6845 .unwrap(),
6846 HashMap::from_iter([
6847 ("two.rs".to_string(), vec![6..9]),
6848 ("three.rs".to_string(), vec![37..40])
6849 ])
6850 );
6851
6852 let buffer_4 = project
6853 .update(cx, |project, cx| {
6854 project.open_buffer((worktree_id, "four.rs"), cx)
6855 })
6856 .await
6857 .unwrap();
6858 buffer_4.update(cx, |buffer, cx| {
6859 buffer.edit([20..28, 31..43], "two::TWO", cx);
6860 });
6861
6862 assert_eq!(
6863 search(&project, SearchQuery::text("TWO", false, true), cx)
6864 .await
6865 .unwrap(),
6866 HashMap::from_iter([
6867 ("two.rs".to_string(), vec![6..9]),
6868 ("three.rs".to_string(), vec![37..40]),
6869 ("four.rs".to_string(), vec![25..28, 36..39])
6870 ])
6871 );
6872
6873 async fn search(
6874 project: &ModelHandle<Project>,
6875 query: SearchQuery,
6876 cx: &mut gpui::TestAppContext,
6877 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6878 let results = project
6879 .update(cx, |project, cx| project.search(query, cx))
6880 .await?;
6881
6882 Ok(results
6883 .into_iter()
6884 .map(|(buffer, ranges)| {
6885 buffer.read_with(cx, |buffer, _| {
6886 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6887 let ranges = ranges
6888 .into_iter()
6889 .map(|range| range.to_offset(buffer))
6890 .collect::<Vec<_>>();
6891 (path, ranges)
6892 })
6893 })
6894 .collect())
6895 }
6896 }
6897}