1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
61 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
62 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
63 language_server_settings: Arc<Mutex<serde_json::Value>>,
64 next_language_server_id: usize,
65 client: Arc<client::Client>,
66 next_entry_id: Arc<AtomicUsize>,
67 user_store: ModelHandle<UserStore>,
68 fs: Arc<dyn Fs>,
69 client_state: ProjectClientState,
70 collaborators: HashMap<PeerId, Collaborator>,
71 subscriptions: Vec<client::Subscription>,
72 language_servers_with_diagnostics_running: isize,
73 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
74 shared_buffers: HashMap<PeerId, HashSet<u64>>,
75 loading_buffers: HashMap<
76 ProjectPath,
77 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
78 >,
79 loading_local_worktrees:
80 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
81 opened_buffers: HashMap<u64, OpenBuffer>,
82 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
83 nonce: u128,
84}
85
86enum OpenBuffer {
87 Strong(ModelHandle<Buffer>),
88 Weak(WeakModelHandle<Buffer>),
89 Loading(Vec<Operation>),
90}
91
92enum WorktreeHandle {
93 Strong(ModelHandle<Worktree>),
94 Weak(WeakModelHandle<Worktree>),
95}
96
97enum ProjectClientState {
98 Local {
99 is_shared: bool,
100 remote_id_tx: watch::Sender<Option<u64>>,
101 remote_id_rx: watch::Receiver<Option<u64>>,
102 _maintain_remote_id_task: Task<Option<()>>,
103 },
104 Remote {
105 sharing_has_stopped: bool,
106 remote_id: u64,
107 replica_id: ReplicaId,
108 _detect_unshare_task: Task<Option<()>>,
109 },
110}
111
112#[derive(Clone, Debug)]
113pub struct Collaborator {
114 pub user: Arc<User>,
115 pub peer_id: PeerId,
116 pub replica_id: ReplicaId,
117}
118
119#[derive(Clone, Debug, PartialEq)]
120pub enum Event {
121 ActiveEntryChanged(Option<ProjectEntryId>),
122 WorktreeRemoved(WorktreeId),
123 DiskBasedDiagnosticsStarted,
124 DiskBasedDiagnosticsUpdated,
125 DiskBasedDiagnosticsFinished,
126 DiagnosticsUpdated(ProjectPath),
127 RemoteIdChanged(Option<u64>),
128 CollaboratorLeft(PeerId),
129}
130
131enum LanguageServerEvent {
132 WorkStart {
133 token: String,
134 },
135 WorkProgress {
136 token: String,
137 progress: LanguageServerProgress,
138 },
139 WorkEnd {
140 token: String,
141 },
142 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
143}
144
145pub struct LanguageServerStatus {
146 pub name: String,
147 pub pending_work: BTreeMap<String, LanguageServerProgress>,
148 pending_diagnostic_updates: isize,
149}
150
151#[derive(Clone, Debug)]
152pub struct LanguageServerProgress {
153 pub message: Option<String>,
154 pub percentage: Option<usize>,
155 pub last_update_at: Instant,
156}
157
158#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
159pub struct ProjectPath {
160 pub worktree_id: WorktreeId,
161 pub path: Arc<Path>,
162}
163
164#[derive(Clone, Debug, Default, PartialEq)]
165pub struct DiagnosticSummary {
166 pub error_count: usize,
167 pub warning_count: usize,
168 pub info_count: usize,
169 pub hint_count: usize,
170}
171
172#[derive(Debug)]
173pub struct Location {
174 pub buffer: ModelHandle<Buffer>,
175 pub range: Range<language::Anchor>,
176}
177
178#[derive(Debug)]
179pub struct DocumentHighlight {
180 pub range: Range<language::Anchor>,
181 pub kind: DocumentHighlightKind,
182}
183
184#[derive(Clone, Debug)]
185pub struct Symbol {
186 pub source_worktree_id: WorktreeId,
187 pub worktree_id: WorktreeId,
188 pub language_name: String,
189 pub path: PathBuf,
190 pub label: CodeLabel,
191 pub name: String,
192 pub kind: lsp::SymbolKind,
193 pub range: Range<PointUtf16>,
194 pub signature: [u8; 32],
195}
196
197#[derive(Default)]
198pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
199
200impl DiagnosticSummary {
201 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
202 let mut this = Self {
203 error_count: 0,
204 warning_count: 0,
205 info_count: 0,
206 hint_count: 0,
207 };
208
209 for entry in diagnostics {
210 if entry.diagnostic.is_primary {
211 match entry.diagnostic.severity {
212 DiagnosticSeverity::ERROR => this.error_count += 1,
213 DiagnosticSeverity::WARNING => this.warning_count += 1,
214 DiagnosticSeverity::INFORMATION => this.info_count += 1,
215 DiagnosticSeverity::HINT => this.hint_count += 1,
216 _ => {}
217 }
218 }
219 }
220
221 this
222 }
223
224 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
225 proto::DiagnosticSummary {
226 path: path.to_string_lossy().to_string(),
227 error_count: self.error_count as u32,
228 warning_count: self.warning_count as u32,
229 info_count: self.info_count as u32,
230 hint_count: self.hint_count as u32,
231 }
232 }
233}
234
235#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
236pub struct ProjectEntryId(usize);
237
238impl ProjectEntryId {
239 pub fn new(counter: &AtomicUsize) -> Self {
240 Self(counter.fetch_add(1, SeqCst))
241 }
242
243 pub fn from_proto(id: u64) -> Self {
244 Self(id as usize)
245 }
246
247 pub fn to_proto(&self) -> u64 {
248 self.0 as u64
249 }
250
251 pub fn to_usize(&self) -> usize {
252 self.0
253 }
254}
255
256impl Project {
257 pub fn init(client: &Arc<Client>) {
258 client.add_model_message_handler(Self::handle_add_collaborator);
259 client.add_model_message_handler(Self::handle_buffer_reloaded);
260 client.add_model_message_handler(Self::handle_buffer_saved);
261 client.add_model_message_handler(Self::handle_start_language_server);
262 client.add_model_message_handler(Self::handle_update_language_server);
263 client.add_model_message_handler(Self::handle_remove_collaborator);
264 client.add_model_message_handler(Self::handle_register_worktree);
265 client.add_model_message_handler(Self::handle_unregister_worktree);
266 client.add_model_message_handler(Self::handle_unshare_project);
267 client.add_model_message_handler(Self::handle_update_buffer_file);
268 client.add_model_message_handler(Self::handle_update_buffer);
269 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
270 client.add_model_message_handler(Self::handle_update_worktree);
271 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
272 client.add_model_request_handler(Self::handle_apply_code_action);
273 client.add_model_request_handler(Self::handle_format_buffers);
274 client.add_model_request_handler(Self::handle_get_code_actions);
275 client.add_model_request_handler(Self::handle_get_completions);
276 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
277 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
278 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
279 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
280 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
281 client.add_model_request_handler(Self::handle_search_project);
282 client.add_model_request_handler(Self::handle_get_project_symbols);
283 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
284 client.add_model_request_handler(Self::handle_open_buffer_by_id);
285 client.add_model_request_handler(Self::handle_open_buffer_by_path);
286 client.add_model_request_handler(Self::handle_save_buffer);
287 }
288
289 pub fn local(
290 client: Arc<Client>,
291 user_store: ModelHandle<UserStore>,
292 languages: Arc<LanguageRegistry>,
293 fs: Arc<dyn Fs>,
294 cx: &mut MutableAppContext,
295 ) -> ModelHandle<Self> {
296 cx.add_model(|cx: &mut ModelContext<Self>| {
297 let (remote_id_tx, remote_id_rx) = watch::channel();
298 let _maintain_remote_id_task = cx.spawn_weak({
299 let rpc = client.clone();
300 move |this, mut cx| {
301 async move {
302 let mut status = rpc.status();
303 while let Some(status) = status.next().await {
304 if let Some(this) = this.upgrade(&cx) {
305 let remote_id = if status.is_connected() {
306 let response = rpc.request(proto::RegisterProject {}).await?;
307 Some(response.project_id)
308 } else {
309 None
310 };
311
312 if let Some(project_id) = remote_id {
313 let mut registrations = Vec::new();
314 this.update(&mut cx, |this, cx| {
315 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
316 registrations.push(worktree.update(
317 cx,
318 |worktree, cx| {
319 let worktree = worktree.as_local_mut().unwrap();
320 worktree.register(project_id, cx)
321 },
322 ));
323 }
324 });
325 for registration in registrations {
326 registration.await?;
327 }
328 }
329 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
330 }
331 }
332 Ok(())
333 }
334 .log_err()
335 }
336 });
337
338 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
339 Self {
340 worktrees: Default::default(),
341 collaborators: Default::default(),
342 opened_buffers: Default::default(),
343 shared_buffers: Default::default(),
344 loading_buffers: Default::default(),
345 loading_local_worktrees: Default::default(),
346 buffer_snapshots: Default::default(),
347 client_state: ProjectClientState::Local {
348 is_shared: false,
349 remote_id_tx,
350 remote_id_rx,
351 _maintain_remote_id_task,
352 },
353 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
354 subscriptions: Vec::new(),
355 active_entry: None,
356 languages,
357 client,
358 user_store,
359 fs,
360 next_entry_id: Default::default(),
361 language_servers_with_diagnostics_running: 0,
362 language_servers: Default::default(),
363 started_language_servers: Default::default(),
364 language_server_statuses: Default::default(),
365 language_server_settings: Default::default(),
366 next_language_server_id: 0,
367 nonce: StdRng::from_entropy().gen(),
368 }
369 })
370 }
371
372 pub async fn remote(
373 remote_id: u64,
374 client: Arc<Client>,
375 user_store: ModelHandle<UserStore>,
376 languages: Arc<LanguageRegistry>,
377 fs: Arc<dyn Fs>,
378 cx: &mut AsyncAppContext,
379 ) -> Result<ModelHandle<Self>> {
380 client.authenticate_and_connect(true, &cx).await?;
381
382 let response = client
383 .request(proto::JoinProject {
384 project_id: remote_id,
385 })
386 .await?;
387
388 let replica_id = response.replica_id as ReplicaId;
389
390 let mut worktrees = Vec::new();
391 for worktree in response.worktrees {
392 let (worktree, load_task) = cx
393 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
394 worktrees.push(worktree);
395 load_task.detach();
396 }
397
398 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
399 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
400 let mut this = Self {
401 worktrees: Vec::new(),
402 loading_buffers: Default::default(),
403 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
404 shared_buffers: Default::default(),
405 loading_local_worktrees: Default::default(),
406 active_entry: None,
407 collaborators: Default::default(),
408 languages,
409 user_store: user_store.clone(),
410 fs,
411 next_entry_id: Default::default(),
412 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
413 client: client.clone(),
414 client_state: ProjectClientState::Remote {
415 sharing_has_stopped: false,
416 remote_id,
417 replica_id,
418 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
419 async move {
420 let mut status = client.status();
421 let is_connected =
422 status.next().await.map_or(false, |s| s.is_connected());
423 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
424 if !is_connected || status.next().await.is_some() {
425 if let Some(this) = this.upgrade(&cx) {
426 this.update(&mut cx, |this, cx| this.project_unshared(cx))
427 }
428 }
429 Ok(())
430 }
431 .log_err()
432 }),
433 },
434 language_servers_with_diagnostics_running: 0,
435 language_servers: Default::default(),
436 started_language_servers: Default::default(),
437 language_server_settings: Default::default(),
438 language_server_statuses: response
439 .language_servers
440 .into_iter()
441 .map(|server| {
442 (
443 server.id as usize,
444 LanguageServerStatus {
445 name: server.name,
446 pending_work: Default::default(),
447 pending_diagnostic_updates: 0,
448 },
449 )
450 })
451 .collect(),
452 next_language_server_id: 0,
453 opened_buffers: Default::default(),
454 buffer_snapshots: Default::default(),
455 nonce: StdRng::from_entropy().gen(),
456 };
457 for worktree in worktrees {
458 this.add_worktree(&worktree, cx);
459 }
460 this
461 });
462
463 let user_ids = response
464 .collaborators
465 .iter()
466 .map(|peer| peer.user_id)
467 .collect();
468 user_store
469 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
470 .await?;
471 let mut collaborators = HashMap::default();
472 for message in response.collaborators {
473 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
474 collaborators.insert(collaborator.peer_id, collaborator);
475 }
476
477 this.update(cx, |this, _| {
478 this.collaborators = collaborators;
479 });
480
481 Ok(this)
482 }
483
484 #[cfg(any(test, feature = "test-support"))]
485 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
486 let languages = Arc::new(LanguageRegistry::test());
487 let http_client = client::test::FakeHttpClient::with_404_response();
488 let client = client::Client::new(http_client.clone());
489 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
490 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
491 }
492
493 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
494 self.opened_buffers
495 .get(&remote_id)
496 .and_then(|buffer| buffer.upgrade(cx))
497 }
498
499 #[cfg(any(test, feature = "test-support"))]
500 pub fn languages(&self) -> &Arc<LanguageRegistry> {
501 &self.languages
502 }
503
504 #[cfg(any(test, feature = "test-support"))]
505 pub fn check_invariants(&self, cx: &AppContext) {
506 if self.is_local() {
507 let mut worktree_root_paths = HashMap::default();
508 for worktree in self.worktrees(cx) {
509 let worktree = worktree.read(cx);
510 let abs_path = worktree.as_local().unwrap().abs_path().clone();
511 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
512 assert_eq!(
513 prev_worktree_id,
514 None,
515 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
516 abs_path,
517 worktree.id(),
518 prev_worktree_id
519 )
520 }
521 } else {
522 let replica_id = self.replica_id();
523 for buffer in self.opened_buffers.values() {
524 if let Some(buffer) = buffer.upgrade(cx) {
525 let buffer = buffer.read(cx);
526 assert_eq!(
527 buffer.deferred_ops_len(),
528 0,
529 "replica {}, buffer {} has deferred operations",
530 replica_id,
531 buffer.remote_id()
532 );
533 }
534 }
535 }
536 }
537
538 #[cfg(any(test, feature = "test-support"))]
539 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
540 let path = path.into();
541 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
542 self.opened_buffers.iter().any(|(_, buffer)| {
543 if let Some(buffer) = buffer.upgrade(cx) {
544 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
545 if file.worktree == worktree && file.path() == &path.path {
546 return true;
547 }
548 }
549 }
550 false
551 })
552 } else {
553 false
554 }
555 }
556
557 pub fn fs(&self) -> &Arc<dyn Fs> {
558 &self.fs
559 }
560
561 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
562 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
563 *remote_id_tx.borrow_mut() = remote_id;
564 }
565
566 self.subscriptions.clear();
567 if let Some(remote_id) = remote_id {
568 self.subscriptions
569 .push(self.client.add_model_for_remote_entity(remote_id, cx));
570 }
571 cx.emit(Event::RemoteIdChanged(remote_id))
572 }
573
574 pub fn remote_id(&self) -> Option<u64> {
575 match &self.client_state {
576 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
577 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
578 }
579 }
580
581 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
582 let mut id = None;
583 let mut watch = None;
584 match &self.client_state {
585 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
586 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
587 }
588
589 async move {
590 if let Some(id) = id {
591 return id;
592 }
593 let mut watch = watch.unwrap();
594 loop {
595 let id = *watch.borrow();
596 if let Some(id) = id {
597 return id;
598 }
599 watch.next().await;
600 }
601 }
602 }
603
604 pub fn replica_id(&self) -> ReplicaId {
605 match &self.client_state {
606 ProjectClientState::Local { .. } => 0,
607 ProjectClientState::Remote { replica_id, .. } => *replica_id,
608 }
609 }
610
611 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
612 &self.collaborators
613 }
614
615 pub fn worktrees<'a>(
616 &'a self,
617 cx: &'a AppContext,
618 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
619 self.worktrees
620 .iter()
621 .filter_map(move |worktree| worktree.upgrade(cx))
622 }
623
624 pub fn visible_worktrees<'a>(
625 &'a self,
626 cx: &'a AppContext,
627 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
628 self.worktrees.iter().filter_map(|worktree| {
629 worktree.upgrade(cx).and_then(|worktree| {
630 if worktree.read(cx).is_visible() {
631 Some(worktree)
632 } else {
633 None
634 }
635 })
636 })
637 }
638
639 pub fn worktree_for_id(
640 &self,
641 id: WorktreeId,
642 cx: &AppContext,
643 ) -> Option<ModelHandle<Worktree>> {
644 self.worktrees(cx)
645 .find(|worktree| worktree.read(cx).id() == id)
646 }
647
648 pub fn worktree_for_entry(
649 &self,
650 entry_id: ProjectEntryId,
651 cx: &AppContext,
652 ) -> Option<ModelHandle<Worktree>> {
653 self.worktrees(cx)
654 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
655 }
656
657 pub fn worktree_id_for_entry(
658 &self,
659 entry_id: ProjectEntryId,
660 cx: &AppContext,
661 ) -> Option<WorktreeId> {
662 self.worktree_for_entry(entry_id, cx)
663 .map(|worktree| worktree.read(cx).id())
664 }
665
666 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
667 let rpc = self.client.clone();
668 cx.spawn(|this, mut cx| async move {
669 let project_id = this.update(&mut cx, |this, cx| {
670 if let ProjectClientState::Local {
671 is_shared,
672 remote_id_rx,
673 ..
674 } = &mut this.client_state
675 {
676 *is_shared = true;
677
678 for open_buffer in this.opened_buffers.values_mut() {
679 match open_buffer {
680 OpenBuffer::Strong(_) => {}
681 OpenBuffer::Weak(buffer) => {
682 if let Some(buffer) = buffer.upgrade(cx) {
683 *open_buffer = OpenBuffer::Strong(buffer);
684 }
685 }
686 OpenBuffer::Loading(_) => unreachable!(),
687 }
688 }
689
690 for worktree_handle in this.worktrees.iter_mut() {
691 match worktree_handle {
692 WorktreeHandle::Strong(_) => {}
693 WorktreeHandle::Weak(worktree) => {
694 if let Some(worktree) = worktree.upgrade(cx) {
695 *worktree_handle = WorktreeHandle::Strong(worktree);
696 }
697 }
698 }
699 }
700
701 remote_id_rx
702 .borrow()
703 .ok_or_else(|| anyhow!("no project id"))
704 } else {
705 Err(anyhow!("can't share a remote project"))
706 }
707 })?;
708
709 rpc.request(proto::ShareProject { project_id }).await?;
710
711 let mut tasks = Vec::new();
712 this.update(&mut cx, |this, cx| {
713 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
714 worktree.update(cx, |worktree, cx| {
715 let worktree = worktree.as_local_mut().unwrap();
716 tasks.push(worktree.share(project_id, cx));
717 });
718 }
719 });
720 for task in tasks {
721 task.await?;
722 }
723 this.update(&mut cx, |_, cx| cx.notify());
724 Ok(())
725 })
726 }
727
728 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
729 let rpc = self.client.clone();
730 cx.spawn(|this, mut cx| async move {
731 let project_id = this.update(&mut cx, |this, cx| {
732 if let ProjectClientState::Local {
733 is_shared,
734 remote_id_rx,
735 ..
736 } = &mut this.client_state
737 {
738 *is_shared = false;
739
740 for open_buffer in this.opened_buffers.values_mut() {
741 match open_buffer {
742 OpenBuffer::Strong(buffer) => {
743 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
744 }
745 _ => {}
746 }
747 }
748
749 for worktree_handle in this.worktrees.iter_mut() {
750 match worktree_handle {
751 WorktreeHandle::Strong(worktree) => {
752 if !worktree.read(cx).is_visible() {
753 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
754 }
755 }
756 _ => {}
757 }
758 }
759
760 remote_id_rx
761 .borrow()
762 .ok_or_else(|| anyhow!("no project id"))
763 } else {
764 Err(anyhow!("can't share a remote project"))
765 }
766 })?;
767
768 rpc.send(proto::UnshareProject { project_id })?;
769 this.update(&mut cx, |this, cx| {
770 this.collaborators.clear();
771 this.shared_buffers.clear();
772 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
773 worktree.update(cx, |worktree, _| {
774 worktree.as_local_mut().unwrap().unshare();
775 });
776 }
777 cx.notify()
778 });
779 Ok(())
780 })
781 }
782
783 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
784 if let ProjectClientState::Remote {
785 sharing_has_stopped,
786 ..
787 } = &mut self.client_state
788 {
789 *sharing_has_stopped = true;
790 self.collaborators.clear();
791 cx.notify();
792 }
793 }
794
795 pub fn is_read_only(&self) -> bool {
796 match &self.client_state {
797 ProjectClientState::Local { .. } => false,
798 ProjectClientState::Remote {
799 sharing_has_stopped,
800 ..
801 } => *sharing_has_stopped,
802 }
803 }
804
805 pub fn is_local(&self) -> bool {
806 match &self.client_state {
807 ProjectClientState::Local { .. } => true,
808 ProjectClientState::Remote { .. } => false,
809 }
810 }
811
812 pub fn is_remote(&self) -> bool {
813 !self.is_local()
814 }
815
816 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
817 if self.is_remote() {
818 return Err(anyhow!("creating buffers as a guest is not supported yet"));
819 }
820
821 let buffer = cx.add_model(|cx| {
822 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
823 });
824 self.register_buffer(&buffer, cx)?;
825 Ok(buffer)
826 }
827
828 pub fn open_path(
829 &mut self,
830 path: impl Into<ProjectPath>,
831 cx: &mut ModelContext<Self>,
832 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
833 let task = self.open_buffer(path, cx);
834 cx.spawn_weak(|_, cx| async move {
835 let buffer = task.await?;
836 let project_entry_id = buffer
837 .read_with(&cx, |buffer, cx| {
838 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
839 })
840 .ok_or_else(|| anyhow!("no project entry"))?;
841 Ok((project_entry_id, buffer.into()))
842 })
843 }
844
845 pub fn open_buffer(
846 &mut self,
847 path: impl Into<ProjectPath>,
848 cx: &mut ModelContext<Self>,
849 ) -> Task<Result<ModelHandle<Buffer>>> {
850 let project_path = path.into();
851 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
852 worktree
853 } else {
854 return Task::ready(Err(anyhow!("no such worktree")));
855 };
856
857 // If there is already a buffer for the given path, then return it.
858 let existing_buffer = self.get_open_buffer(&project_path, cx);
859 if let Some(existing_buffer) = existing_buffer {
860 return Task::ready(Ok(existing_buffer));
861 }
862
863 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
864 // If the given path is already being loaded, then wait for that existing
865 // task to complete and return the same buffer.
866 hash_map::Entry::Occupied(e) => e.get().clone(),
867
868 // Otherwise, record the fact that this path is now being loaded.
869 hash_map::Entry::Vacant(entry) => {
870 let (mut tx, rx) = postage::watch::channel();
871 entry.insert(rx.clone());
872
873 let load_buffer = if worktree.read(cx).is_local() {
874 self.open_local_buffer(&project_path.path, &worktree, cx)
875 } else {
876 self.open_remote_buffer(&project_path.path, &worktree, cx)
877 };
878
879 cx.spawn(move |this, mut cx| async move {
880 let load_result = load_buffer.await;
881 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
882 // Record the fact that the buffer is no longer loading.
883 this.loading_buffers.remove(&project_path);
884 let buffer = load_result.map_err(Arc::new)?;
885 Ok(buffer)
886 }));
887 })
888 .detach();
889 rx
890 }
891 };
892
893 cx.foreground().spawn(async move {
894 loop {
895 if let Some(result) = loading_watch.borrow().as_ref() {
896 match result {
897 Ok(buffer) => return Ok(buffer.clone()),
898 Err(error) => return Err(anyhow!("{}", error)),
899 }
900 }
901 loading_watch.next().await;
902 }
903 })
904 }
905
906 fn open_local_buffer(
907 &mut self,
908 path: &Arc<Path>,
909 worktree: &ModelHandle<Worktree>,
910 cx: &mut ModelContext<Self>,
911 ) -> Task<Result<ModelHandle<Buffer>>> {
912 let load_buffer = worktree.update(cx, |worktree, cx| {
913 let worktree = worktree.as_local_mut().unwrap();
914 worktree.load_buffer(path, cx)
915 });
916 cx.spawn(|this, mut cx| async move {
917 let buffer = load_buffer.await?;
918 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
919 Ok(buffer)
920 })
921 }
922
923 fn open_remote_buffer(
924 &mut self,
925 path: &Arc<Path>,
926 worktree: &ModelHandle<Worktree>,
927 cx: &mut ModelContext<Self>,
928 ) -> Task<Result<ModelHandle<Buffer>>> {
929 let rpc = self.client.clone();
930 let project_id = self.remote_id().unwrap();
931 let remote_worktree_id = worktree.read(cx).id();
932 let path = path.clone();
933 let path_string = path.to_string_lossy().to_string();
934 cx.spawn(|this, mut cx| async move {
935 let response = rpc
936 .request(proto::OpenBufferByPath {
937 project_id,
938 worktree_id: remote_worktree_id.to_proto(),
939 path: path_string,
940 })
941 .await?;
942 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
943 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
944 .await
945 })
946 }
947
948 fn open_local_buffer_via_lsp(
949 &mut self,
950 abs_path: lsp::Url,
951 lang_name: Arc<str>,
952 lang_server: Arc<LanguageServer>,
953 cx: &mut ModelContext<Self>,
954 ) -> Task<Result<ModelHandle<Buffer>>> {
955 cx.spawn(|this, mut cx| async move {
956 let abs_path = abs_path
957 .to_file_path()
958 .map_err(|_| anyhow!("can't convert URI to path"))?;
959 let (worktree, relative_path) = if let Some(result) =
960 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
961 {
962 result
963 } else {
964 let worktree = this
965 .update(&mut cx, |this, cx| {
966 this.create_local_worktree(&abs_path, false, cx)
967 })
968 .await?;
969 this.update(&mut cx, |this, cx| {
970 this.language_servers
971 .insert((worktree.read(cx).id(), lang_name), lang_server);
972 });
973 (worktree, PathBuf::new())
974 };
975
976 let project_path = ProjectPath {
977 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
978 path: relative_path.into(),
979 };
980 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
981 .await
982 })
983 }
984
985 pub fn open_buffer_by_id(
986 &mut self,
987 id: u64,
988 cx: &mut ModelContext<Self>,
989 ) -> Task<Result<ModelHandle<Buffer>>> {
990 if let Some(buffer) = self.buffer_for_id(id, cx) {
991 Task::ready(Ok(buffer))
992 } else if self.is_local() {
993 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
994 } else if let Some(project_id) = self.remote_id() {
995 let request = self
996 .client
997 .request(proto::OpenBufferById { project_id, id });
998 cx.spawn(|this, mut cx| async move {
999 let buffer = request
1000 .await?
1001 .buffer
1002 .ok_or_else(|| anyhow!("invalid buffer"))?;
1003 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1004 .await
1005 })
1006 } else {
1007 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1008 }
1009 }
1010
1011 pub fn save_buffer_as(
1012 &mut self,
1013 buffer: ModelHandle<Buffer>,
1014 abs_path: PathBuf,
1015 cx: &mut ModelContext<Project>,
1016 ) -> Task<Result<()>> {
1017 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1018 cx.spawn(|this, mut cx| async move {
1019 let (worktree, path) = worktree_task.await?;
1020 worktree
1021 .update(&mut cx, |worktree, cx| {
1022 worktree
1023 .as_local_mut()
1024 .unwrap()
1025 .save_buffer_as(buffer.clone(), path, cx)
1026 })
1027 .await?;
1028 this.update(&mut cx, |this, cx| {
1029 this.assign_language_to_buffer(&buffer, cx);
1030 this.register_buffer_with_language_server(&buffer, cx);
1031 });
1032 Ok(())
1033 })
1034 }
1035
1036 pub fn get_open_buffer(
1037 &mut self,
1038 path: &ProjectPath,
1039 cx: &mut ModelContext<Self>,
1040 ) -> Option<ModelHandle<Buffer>> {
1041 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1042 self.opened_buffers.values().find_map(|buffer| {
1043 let buffer = buffer.upgrade(cx)?;
1044 let file = File::from_dyn(buffer.read(cx).file())?;
1045 if file.worktree == worktree && file.path() == &path.path {
1046 Some(buffer)
1047 } else {
1048 None
1049 }
1050 })
1051 }
1052
1053 fn register_buffer(
1054 &mut self,
1055 buffer: &ModelHandle<Buffer>,
1056 cx: &mut ModelContext<Self>,
1057 ) -> Result<()> {
1058 let remote_id = buffer.read(cx).remote_id();
1059 let open_buffer = if self.is_remote() || self.is_shared() {
1060 OpenBuffer::Strong(buffer.clone())
1061 } else {
1062 OpenBuffer::Weak(buffer.downgrade())
1063 };
1064
1065 match self.opened_buffers.insert(remote_id, open_buffer) {
1066 None => {}
1067 Some(OpenBuffer::Loading(operations)) => {
1068 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1069 }
1070 Some(OpenBuffer::Weak(existing_handle)) => {
1071 if existing_handle.upgrade(cx).is_some() {
1072 Err(anyhow!(
1073 "already registered buffer with remote id {}",
1074 remote_id
1075 ))?
1076 }
1077 }
1078 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1079 "already registered buffer with remote id {}",
1080 remote_id
1081 ))?,
1082 }
1083 cx.subscribe(buffer, |this, buffer, event, cx| {
1084 this.on_buffer_event(buffer, event, cx);
1085 })
1086 .detach();
1087
1088 self.assign_language_to_buffer(buffer, cx);
1089 self.register_buffer_with_language_server(buffer, cx);
1090
1091 Ok(())
1092 }
1093
1094 fn register_buffer_with_language_server(
1095 &mut self,
1096 buffer_handle: &ModelHandle<Buffer>,
1097 cx: &mut ModelContext<Self>,
1098 ) {
1099 let buffer = buffer_handle.read(cx);
1100 let buffer_id = buffer.remote_id();
1101 if let Some(file) = File::from_dyn(buffer.file()) {
1102 if file.is_local() {
1103 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1104 let initial_snapshot = buffer.text_snapshot();
1105 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1106
1107 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1108 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1109 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1110 .log_err();
1111 }
1112 }
1113
1114 if let Some(server) = language_server {
1115 server
1116 .notify::<lsp::notification::DidOpenTextDocument>(
1117 lsp::DidOpenTextDocumentParams {
1118 text_document: lsp::TextDocumentItem::new(
1119 uri,
1120 Default::default(),
1121 0,
1122 initial_snapshot.text(),
1123 ),
1124 }
1125 .clone(),
1126 )
1127 .log_err();
1128 buffer_handle.update(cx, |buffer, cx| {
1129 buffer.set_completion_triggers(
1130 server
1131 .capabilities()
1132 .completion_provider
1133 .as_ref()
1134 .and_then(|provider| provider.trigger_characters.clone())
1135 .unwrap_or(Vec::new()),
1136 cx,
1137 )
1138 });
1139 self.buffer_snapshots
1140 .insert(buffer_id, vec![(0, initial_snapshot)]);
1141 }
1142
1143 cx.observe_release(buffer_handle, |this, buffer, cx| {
1144 if let Some(file) = File::from_dyn(buffer.file()) {
1145 if file.is_local() {
1146 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1147 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1148 server
1149 .notify::<lsp::notification::DidCloseTextDocument>(
1150 lsp::DidCloseTextDocumentParams {
1151 text_document: lsp::TextDocumentIdentifier::new(
1152 uri.clone(),
1153 ),
1154 },
1155 )
1156 .log_err();
1157 }
1158 }
1159 }
1160 })
1161 .detach();
1162 }
1163 }
1164 }
1165
1166 fn on_buffer_event(
1167 &mut self,
1168 buffer: ModelHandle<Buffer>,
1169 event: &BufferEvent,
1170 cx: &mut ModelContext<Self>,
1171 ) -> Option<()> {
1172 match event {
1173 BufferEvent::Operation(operation) => {
1174 let project_id = self.remote_id()?;
1175 let request = self.client.request(proto::UpdateBuffer {
1176 project_id,
1177 buffer_id: buffer.read(cx).remote_id(),
1178 operations: vec![language::proto::serialize_operation(&operation)],
1179 });
1180 cx.background().spawn(request).detach_and_log_err(cx);
1181 }
1182 BufferEvent::Edited { .. } => {
1183 let language_server = self
1184 .language_server_for_buffer(buffer.read(cx), cx)?
1185 .clone();
1186 let buffer = buffer.read(cx);
1187 let file = File::from_dyn(buffer.file())?;
1188 let abs_path = file.as_local()?.abs_path(cx);
1189 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1190 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1191 let (version, prev_snapshot) = buffer_snapshots.last()?;
1192 let next_snapshot = buffer.text_snapshot();
1193 let next_version = version + 1;
1194
1195 let content_changes = buffer
1196 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1197 .map(|edit| {
1198 let edit_start = edit.new.start.0;
1199 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1200 let new_text = next_snapshot
1201 .text_for_range(edit.new.start.1..edit.new.end.1)
1202 .collect();
1203 lsp::TextDocumentContentChangeEvent {
1204 range: Some(lsp::Range::new(
1205 edit_start.to_lsp_position(),
1206 edit_end.to_lsp_position(),
1207 )),
1208 range_length: None,
1209 text: new_text,
1210 }
1211 })
1212 .collect();
1213
1214 buffer_snapshots.push((next_version, next_snapshot));
1215
1216 language_server
1217 .notify::<lsp::notification::DidChangeTextDocument>(
1218 lsp::DidChangeTextDocumentParams {
1219 text_document: lsp::VersionedTextDocumentIdentifier::new(
1220 uri,
1221 next_version,
1222 ),
1223 content_changes,
1224 },
1225 )
1226 .log_err();
1227 }
1228 BufferEvent::Saved => {
1229 let file = File::from_dyn(buffer.read(cx).file())?;
1230 let worktree_id = file.worktree_id(cx);
1231 let abs_path = file.as_local()?.abs_path(cx);
1232 let text_document = lsp::TextDocumentIdentifier {
1233 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1234 };
1235
1236 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1237 server
1238 .notify::<lsp::notification::DidSaveTextDocument>(
1239 lsp::DidSaveTextDocumentParams {
1240 text_document: text_document.clone(),
1241 text: None,
1242 },
1243 )
1244 .log_err();
1245 }
1246 }
1247 _ => {}
1248 }
1249
1250 None
1251 }
1252
1253 fn language_servers_for_worktree(
1254 &self,
1255 worktree_id: WorktreeId,
1256 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1257 self.language_servers.iter().filter_map(
1258 move |((language_server_worktree_id, language_name), server)| {
1259 if *language_server_worktree_id == worktree_id {
1260 Some((language_name.as_ref(), server))
1261 } else {
1262 None
1263 }
1264 },
1265 )
1266 }
1267
1268 fn assign_language_to_buffer(
1269 &mut self,
1270 buffer: &ModelHandle<Buffer>,
1271 cx: &mut ModelContext<Self>,
1272 ) -> Option<()> {
1273 // If the buffer has a language, set it and start the language server if we haven't already.
1274 let full_path = buffer.read(cx).file()?.full_path(cx);
1275 let language = self.languages.select_language(&full_path)?;
1276 buffer.update(cx, |buffer, cx| {
1277 buffer.set_language(Some(language.clone()), cx);
1278 });
1279
1280 let file = File::from_dyn(buffer.read(cx).file())?;
1281 let worktree = file.worktree.read(cx).as_local()?;
1282 let worktree_id = worktree.id();
1283 let worktree_abs_path = worktree.abs_path().clone();
1284 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1285
1286 None
1287 }
1288
1289 fn start_language_server(
1290 &mut self,
1291 worktree_id: WorktreeId,
1292 worktree_path: Arc<Path>,
1293 language: Arc<Language>,
1294 cx: &mut ModelContext<Self>,
1295 ) {
1296 let key = (worktree_id, language.name());
1297 self.started_language_servers
1298 .entry(key.clone())
1299 .or_insert_with(|| {
1300 let server_id = post_inc(&mut self.next_language_server_id);
1301 let language_server = self.languages.start_language_server(
1302 language.clone(),
1303 worktree_path,
1304 self.client.http_client(),
1305 cx,
1306 );
1307 cx.spawn_weak(|this, mut cx| async move {
1308 let mut language_server = language_server?.await.log_err()?;
1309 let this = this.upgrade(&cx)?;
1310 let (language_server_events_tx, language_server_events_rx) =
1311 smol::channel::unbounded();
1312
1313 language_server
1314 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1315 let language_server_events_tx = language_server_events_tx.clone();
1316 move |params| {
1317 language_server_events_tx
1318 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1319 .ok();
1320 }
1321 })
1322 .detach();
1323
1324 language_server
1325 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1326 let settings = this
1327 .read_with(&cx, |this, _| this.language_server_settings.clone());
1328 move |params| {
1329 let settings = settings.lock();
1330 Ok(params
1331 .items
1332 .into_iter()
1333 .map(|item| {
1334 if let Some(section) = &item.section {
1335 settings
1336 .get(section)
1337 .cloned()
1338 .unwrap_or(serde_json::Value::Null)
1339 } else {
1340 settings.clone()
1341 }
1342 })
1343 .collect())
1344 }
1345 })
1346 .detach();
1347
1348 language_server
1349 .on_notification::<lsp::notification::Progress, _>(move |params| {
1350 let token = match params.token {
1351 lsp::NumberOrString::String(token) => token,
1352 lsp::NumberOrString::Number(token) => {
1353 log::info!("skipping numeric progress token {}", token);
1354 return;
1355 }
1356 };
1357
1358 match params.value {
1359 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1360 lsp::WorkDoneProgress::Begin(_) => {
1361 language_server_events_tx
1362 .try_send(LanguageServerEvent::WorkStart { token })
1363 .ok();
1364 }
1365 lsp::WorkDoneProgress::Report(report) => {
1366 language_server_events_tx
1367 .try_send(LanguageServerEvent::WorkProgress {
1368 token,
1369 progress: LanguageServerProgress {
1370 message: report.message,
1371 percentage: report
1372 .percentage
1373 .map(|p| p as usize),
1374 last_update_at: Instant::now(),
1375 },
1376 })
1377 .ok();
1378 }
1379 lsp::WorkDoneProgress::End(_) => {
1380 language_server_events_tx
1381 .try_send(LanguageServerEvent::WorkEnd { token })
1382 .ok();
1383 }
1384 },
1385 }
1386 })
1387 .detach();
1388
1389 // Process all the LSP events.
1390 cx.spawn(|mut cx| {
1391 let this = this.downgrade();
1392 async move {
1393 while let Ok(event) = language_server_events_rx.recv().await {
1394 let this = this.upgrade(&cx)?;
1395 this.update(&mut cx, |this, cx| {
1396 this.on_lsp_event(server_id, event, &language, cx)
1397 });
1398
1399 // Don't starve the main thread when lots of events arrive all at once.
1400 smol::future::yield_now().await;
1401 }
1402 Some(())
1403 }
1404 })
1405 .detach();
1406
1407 let language_server = language_server.initialize().await.log_err()?;
1408 this.update(&mut cx, |this, cx| {
1409 this.language_servers
1410 .insert(key.clone(), language_server.clone());
1411 this.language_server_statuses.insert(
1412 server_id,
1413 LanguageServerStatus {
1414 name: language_server.name().to_string(),
1415 pending_work: Default::default(),
1416 pending_diagnostic_updates: 0,
1417 },
1418 );
1419 language_server
1420 .notify::<lsp::notification::DidChangeConfiguration>(
1421 lsp::DidChangeConfigurationParams {
1422 settings: this.language_server_settings.lock().clone(),
1423 },
1424 )
1425 .ok();
1426
1427 if let Some(project_id) = this.remote_id() {
1428 this.client
1429 .send(proto::StartLanguageServer {
1430 project_id,
1431 server: Some(proto::LanguageServer {
1432 id: server_id as u64,
1433 name: language_server.name().to_string(),
1434 }),
1435 })
1436 .log_err();
1437 }
1438
1439 // Tell the language server about every open buffer in the worktree that matches the language.
1440 for buffer in this.opened_buffers.values() {
1441 if let Some(buffer_handle) = buffer.upgrade(cx) {
1442 let buffer = buffer_handle.read(cx);
1443 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1444 file
1445 } else {
1446 continue;
1447 };
1448 let language = if let Some(language) = buffer.language() {
1449 language
1450 } else {
1451 continue;
1452 };
1453 if (file.worktree.read(cx).id(), language.name()) != key {
1454 continue;
1455 }
1456
1457 let file = file.as_local()?;
1458 let versions = this
1459 .buffer_snapshots
1460 .entry(buffer.remote_id())
1461 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1462 let (version, initial_snapshot) = versions.last().unwrap();
1463 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1464 language_server
1465 .notify::<lsp::notification::DidOpenTextDocument>(
1466 lsp::DidOpenTextDocumentParams {
1467 text_document: lsp::TextDocumentItem::new(
1468 uri,
1469 Default::default(),
1470 *version,
1471 initial_snapshot.text(),
1472 ),
1473 },
1474 )
1475 .log_err()?;
1476 buffer_handle.update(cx, |buffer, cx| {
1477 buffer.set_completion_triggers(
1478 language_server
1479 .capabilities()
1480 .completion_provider
1481 .as_ref()
1482 .and_then(|provider| {
1483 provider.trigger_characters.clone()
1484 })
1485 .unwrap_or(Vec::new()),
1486 cx,
1487 )
1488 });
1489 }
1490 }
1491
1492 cx.notify();
1493 Some(())
1494 });
1495
1496 Some(language_server)
1497 })
1498 });
1499 }
1500
1501 fn on_lsp_event(
1502 &mut self,
1503 language_server_id: usize,
1504 event: LanguageServerEvent,
1505 language: &Arc<Language>,
1506 cx: &mut ModelContext<Self>,
1507 ) {
1508 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1509 let language_server_status =
1510 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1511 status
1512 } else {
1513 return;
1514 };
1515
1516 match event {
1517 LanguageServerEvent::WorkStart { token } => {
1518 if Some(&token) == disk_diagnostics_token {
1519 language_server_status.pending_diagnostic_updates += 1;
1520 if language_server_status.pending_diagnostic_updates == 1 {
1521 self.disk_based_diagnostics_started(cx);
1522 self.broadcast_language_server_update(
1523 language_server_id,
1524 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1525 proto::LspDiskBasedDiagnosticsUpdating {},
1526 ),
1527 );
1528 }
1529 } else {
1530 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1531 self.broadcast_language_server_update(
1532 language_server_id,
1533 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1534 token,
1535 }),
1536 );
1537 }
1538 }
1539 LanguageServerEvent::WorkProgress { token, progress } => {
1540 if Some(&token) != disk_diagnostics_token {
1541 self.on_lsp_work_progress(
1542 language_server_id,
1543 token.clone(),
1544 progress.clone(),
1545 cx,
1546 );
1547 self.broadcast_language_server_update(
1548 language_server_id,
1549 proto::update_language_server::Variant::WorkProgress(
1550 proto::LspWorkProgress {
1551 token,
1552 message: progress.message,
1553 percentage: progress.percentage.map(|p| p as u32),
1554 },
1555 ),
1556 );
1557 }
1558 }
1559 LanguageServerEvent::WorkEnd { token } => {
1560 if Some(&token) == disk_diagnostics_token {
1561 language_server_status.pending_diagnostic_updates -= 1;
1562 if language_server_status.pending_diagnostic_updates == 0 {
1563 self.disk_based_diagnostics_finished(cx);
1564 self.broadcast_language_server_update(
1565 language_server_id,
1566 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1567 proto::LspDiskBasedDiagnosticsUpdated {},
1568 ),
1569 );
1570 }
1571 } else {
1572 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1573 self.broadcast_language_server_update(
1574 language_server_id,
1575 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1576 token,
1577 }),
1578 );
1579 }
1580 }
1581 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1582 language.process_diagnostics(&mut params);
1583
1584 if disk_diagnostics_token.is_none() {
1585 self.disk_based_diagnostics_started(cx);
1586 self.broadcast_language_server_update(
1587 language_server_id,
1588 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1589 proto::LspDiskBasedDiagnosticsUpdating {},
1590 ),
1591 );
1592 }
1593 self.update_diagnostics(
1594 params,
1595 language
1596 .disk_based_diagnostic_sources()
1597 .unwrap_or(&Default::default()),
1598 cx,
1599 )
1600 .log_err();
1601 if disk_diagnostics_token.is_none() {
1602 self.disk_based_diagnostics_finished(cx);
1603 self.broadcast_language_server_update(
1604 language_server_id,
1605 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1606 proto::LspDiskBasedDiagnosticsUpdated {},
1607 ),
1608 );
1609 }
1610 }
1611 }
1612 }
1613
1614 fn on_lsp_work_start(
1615 &mut self,
1616 language_server_id: usize,
1617 token: String,
1618 cx: &mut ModelContext<Self>,
1619 ) {
1620 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1621 status.pending_work.insert(
1622 token,
1623 LanguageServerProgress {
1624 message: None,
1625 percentage: None,
1626 last_update_at: Instant::now(),
1627 },
1628 );
1629 cx.notify();
1630 }
1631 }
1632
1633 fn on_lsp_work_progress(
1634 &mut self,
1635 language_server_id: usize,
1636 token: String,
1637 progress: LanguageServerProgress,
1638 cx: &mut ModelContext<Self>,
1639 ) {
1640 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1641 status.pending_work.insert(token, progress);
1642 cx.notify();
1643 }
1644 }
1645
1646 fn on_lsp_work_end(
1647 &mut self,
1648 language_server_id: usize,
1649 token: String,
1650 cx: &mut ModelContext<Self>,
1651 ) {
1652 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1653 status.pending_work.remove(&token);
1654 cx.notify();
1655 }
1656 }
1657
1658 fn broadcast_language_server_update(
1659 &self,
1660 language_server_id: usize,
1661 event: proto::update_language_server::Variant,
1662 ) {
1663 if let Some(project_id) = self.remote_id() {
1664 self.client
1665 .send(proto::UpdateLanguageServer {
1666 project_id,
1667 language_server_id: language_server_id as u64,
1668 variant: Some(event),
1669 })
1670 .log_err();
1671 }
1672 }
1673
1674 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1675 for server in self.language_servers.values() {
1676 server
1677 .notify::<lsp::notification::DidChangeConfiguration>(
1678 lsp::DidChangeConfigurationParams {
1679 settings: settings.clone(),
1680 },
1681 )
1682 .ok();
1683 }
1684 *self.language_server_settings.lock() = settings;
1685 }
1686
1687 pub fn language_server_statuses(
1688 &self,
1689 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1690 self.language_server_statuses.values()
1691 }
1692
1693 pub fn update_diagnostics(
1694 &mut self,
1695 params: lsp::PublishDiagnosticsParams,
1696 disk_based_sources: &HashSet<String>,
1697 cx: &mut ModelContext<Self>,
1698 ) -> Result<()> {
1699 let abs_path = params
1700 .uri
1701 .to_file_path()
1702 .map_err(|_| anyhow!("URI is not a file"))?;
1703 let mut next_group_id = 0;
1704 let mut diagnostics = Vec::default();
1705 let mut primary_diagnostic_group_ids = HashMap::default();
1706 let mut sources_by_group_id = HashMap::default();
1707 let mut supporting_diagnostics = HashMap::default();
1708 for diagnostic in ¶ms.diagnostics {
1709 let source = diagnostic.source.as_ref();
1710 let code = diagnostic.code.as_ref().map(|code| match code {
1711 lsp::NumberOrString::Number(code) => code.to_string(),
1712 lsp::NumberOrString::String(code) => code.clone(),
1713 });
1714 let range = range_from_lsp(diagnostic.range);
1715 let is_supporting = diagnostic
1716 .related_information
1717 .as_ref()
1718 .map_or(false, |infos| {
1719 infos.iter().any(|info| {
1720 primary_diagnostic_group_ids.contains_key(&(
1721 source,
1722 code.clone(),
1723 range_from_lsp(info.location.range),
1724 ))
1725 })
1726 });
1727
1728 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1729 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1730 });
1731
1732 if is_supporting {
1733 supporting_diagnostics.insert(
1734 (source, code.clone(), range),
1735 (diagnostic.severity, is_unnecessary),
1736 );
1737 } else {
1738 let group_id = post_inc(&mut next_group_id);
1739 let is_disk_based =
1740 source.map_or(false, |source| disk_based_sources.contains(source));
1741
1742 sources_by_group_id.insert(group_id, source);
1743 primary_diagnostic_group_ids
1744 .insert((source, code.clone(), range.clone()), group_id);
1745
1746 diagnostics.push(DiagnosticEntry {
1747 range,
1748 diagnostic: Diagnostic {
1749 code: code.clone(),
1750 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1751 message: diagnostic.message.clone(),
1752 group_id,
1753 is_primary: true,
1754 is_valid: true,
1755 is_disk_based,
1756 is_unnecessary,
1757 },
1758 });
1759 if let Some(infos) = &diagnostic.related_information {
1760 for info in infos {
1761 if info.location.uri == params.uri && !info.message.is_empty() {
1762 let range = range_from_lsp(info.location.range);
1763 diagnostics.push(DiagnosticEntry {
1764 range,
1765 diagnostic: Diagnostic {
1766 code: code.clone(),
1767 severity: DiagnosticSeverity::INFORMATION,
1768 message: info.message.clone(),
1769 group_id,
1770 is_primary: false,
1771 is_valid: true,
1772 is_disk_based,
1773 is_unnecessary: false,
1774 },
1775 });
1776 }
1777 }
1778 }
1779 }
1780 }
1781
1782 for entry in &mut diagnostics {
1783 let diagnostic = &mut entry.diagnostic;
1784 if !diagnostic.is_primary {
1785 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1786 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1787 source,
1788 diagnostic.code.clone(),
1789 entry.range.clone(),
1790 )) {
1791 if let Some(severity) = severity {
1792 diagnostic.severity = severity;
1793 }
1794 diagnostic.is_unnecessary = is_unnecessary;
1795 }
1796 }
1797 }
1798
1799 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1800 Ok(())
1801 }
1802
1803 pub fn update_diagnostic_entries(
1804 &mut self,
1805 abs_path: PathBuf,
1806 version: Option<i32>,
1807 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1808 cx: &mut ModelContext<Project>,
1809 ) -> Result<(), anyhow::Error> {
1810 let (worktree, relative_path) = self
1811 .find_local_worktree(&abs_path, cx)
1812 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1813 if !worktree.read(cx).is_visible() {
1814 return Ok(());
1815 }
1816
1817 let project_path = ProjectPath {
1818 worktree_id: worktree.read(cx).id(),
1819 path: relative_path.into(),
1820 };
1821
1822 for buffer in self.opened_buffers.values() {
1823 if let Some(buffer) = buffer.upgrade(cx) {
1824 if buffer
1825 .read(cx)
1826 .file()
1827 .map_or(false, |file| *file.path() == project_path.path)
1828 {
1829 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1830 break;
1831 }
1832 }
1833 }
1834 worktree.update(cx, |worktree, cx| {
1835 worktree
1836 .as_local_mut()
1837 .ok_or_else(|| anyhow!("not a local worktree"))?
1838 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1839 })?;
1840 cx.emit(Event::DiagnosticsUpdated(project_path));
1841 Ok(())
1842 }
1843
1844 fn update_buffer_diagnostics(
1845 &mut self,
1846 buffer: &ModelHandle<Buffer>,
1847 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1848 version: Option<i32>,
1849 cx: &mut ModelContext<Self>,
1850 ) -> Result<()> {
1851 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1852 Ordering::Equal
1853 .then_with(|| b.is_primary.cmp(&a.is_primary))
1854 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1855 .then_with(|| a.severity.cmp(&b.severity))
1856 .then_with(|| a.message.cmp(&b.message))
1857 }
1858
1859 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1860
1861 diagnostics.sort_unstable_by(|a, b| {
1862 Ordering::Equal
1863 .then_with(|| a.range.start.cmp(&b.range.start))
1864 .then_with(|| b.range.end.cmp(&a.range.end))
1865 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1866 });
1867
1868 let mut sanitized_diagnostics = Vec::new();
1869 let mut edits_since_save = snapshot
1870 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1871 .peekable();
1872 let mut last_edit_old_end = PointUtf16::zero();
1873 let mut last_edit_new_end = PointUtf16::zero();
1874 'outer: for entry in diagnostics {
1875 let mut start = entry.range.start;
1876 let mut end = entry.range.end;
1877
1878 // Some diagnostics are based on files on disk instead of buffers'
1879 // current contents. Adjust these diagnostics' ranges to reflect
1880 // any unsaved edits.
1881 if entry.diagnostic.is_disk_based {
1882 while let Some(edit) = edits_since_save.peek() {
1883 if edit.old.end <= start {
1884 last_edit_old_end = edit.old.end;
1885 last_edit_new_end = edit.new.end;
1886 edits_since_save.next();
1887 } else if edit.old.start <= end && edit.old.end >= start {
1888 continue 'outer;
1889 } else {
1890 break;
1891 }
1892 }
1893
1894 let start_overshoot = start - last_edit_old_end;
1895 start = last_edit_new_end;
1896 start += start_overshoot;
1897
1898 let end_overshoot = end - last_edit_old_end;
1899 end = last_edit_new_end;
1900 end += end_overshoot;
1901 }
1902
1903 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1904 ..snapshot.clip_point_utf16(end, Bias::Right);
1905
1906 // Expand empty ranges by one character
1907 if range.start == range.end {
1908 range.end.column += 1;
1909 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1910 if range.start == range.end && range.end.column > 0 {
1911 range.start.column -= 1;
1912 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1913 }
1914 }
1915
1916 sanitized_diagnostics.push(DiagnosticEntry {
1917 range,
1918 diagnostic: entry.diagnostic,
1919 });
1920 }
1921 drop(edits_since_save);
1922
1923 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1924 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1925 Ok(())
1926 }
1927
1928 pub fn format(
1929 &self,
1930 buffers: HashSet<ModelHandle<Buffer>>,
1931 push_to_history: bool,
1932 cx: &mut ModelContext<Project>,
1933 ) -> Task<Result<ProjectTransaction>> {
1934 let mut local_buffers = Vec::new();
1935 let mut remote_buffers = None;
1936 for buffer_handle in buffers {
1937 let buffer = buffer_handle.read(cx);
1938 if let Some(file) = File::from_dyn(buffer.file()) {
1939 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1940 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1941 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1942 }
1943 } else {
1944 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1945 }
1946 } else {
1947 return Task::ready(Ok(Default::default()));
1948 }
1949 }
1950
1951 let remote_buffers = self.remote_id().zip(remote_buffers);
1952 let client = self.client.clone();
1953
1954 cx.spawn(|this, mut cx| async move {
1955 let mut project_transaction = ProjectTransaction::default();
1956
1957 if let Some((project_id, remote_buffers)) = remote_buffers {
1958 let response = client
1959 .request(proto::FormatBuffers {
1960 project_id,
1961 buffer_ids: remote_buffers
1962 .iter()
1963 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1964 .collect(),
1965 })
1966 .await?
1967 .transaction
1968 .ok_or_else(|| anyhow!("missing transaction"))?;
1969 project_transaction = this
1970 .update(&mut cx, |this, cx| {
1971 this.deserialize_project_transaction(response, push_to_history, cx)
1972 })
1973 .await?;
1974 }
1975
1976 for (buffer, buffer_abs_path, language_server) in local_buffers {
1977 let text_document = lsp::TextDocumentIdentifier::new(
1978 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1979 );
1980 let capabilities = &language_server.capabilities();
1981 let lsp_edits = if capabilities
1982 .document_formatting_provider
1983 .as_ref()
1984 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1985 {
1986 language_server
1987 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1988 text_document,
1989 options: Default::default(),
1990 work_done_progress_params: Default::default(),
1991 })
1992 .await?
1993 } else if capabilities
1994 .document_range_formatting_provider
1995 .as_ref()
1996 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1997 {
1998 let buffer_start = lsp::Position::new(0, 0);
1999 let buffer_end = buffer
2000 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
2001 .to_lsp_position();
2002 language_server
2003 .request::<lsp::request::RangeFormatting>(
2004 lsp::DocumentRangeFormattingParams {
2005 text_document,
2006 range: lsp::Range::new(buffer_start, buffer_end),
2007 options: Default::default(),
2008 work_done_progress_params: Default::default(),
2009 },
2010 )
2011 .await?
2012 } else {
2013 continue;
2014 };
2015
2016 if let Some(lsp_edits) = lsp_edits {
2017 let edits = this
2018 .update(&mut cx, |this, cx| {
2019 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2020 })
2021 .await?;
2022 buffer.update(&mut cx, |buffer, cx| {
2023 buffer.finalize_last_transaction();
2024 buffer.start_transaction();
2025 for (range, text) in edits {
2026 buffer.edit([range], text, cx);
2027 }
2028 if buffer.end_transaction(cx).is_some() {
2029 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2030 if !push_to_history {
2031 buffer.forget_transaction(transaction.id);
2032 }
2033 project_transaction.0.insert(cx.handle(), transaction);
2034 }
2035 });
2036 }
2037 }
2038
2039 Ok(project_transaction)
2040 })
2041 }
2042
2043 pub fn definition<T: ToPointUtf16>(
2044 &self,
2045 buffer: &ModelHandle<Buffer>,
2046 position: T,
2047 cx: &mut ModelContext<Self>,
2048 ) -> Task<Result<Vec<Location>>> {
2049 let position = position.to_point_utf16(buffer.read(cx));
2050 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2051 }
2052
2053 pub fn references<T: ToPointUtf16>(
2054 &self,
2055 buffer: &ModelHandle<Buffer>,
2056 position: T,
2057 cx: &mut ModelContext<Self>,
2058 ) -> Task<Result<Vec<Location>>> {
2059 let position = position.to_point_utf16(buffer.read(cx));
2060 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2061 }
2062
2063 pub fn document_highlights<T: ToPointUtf16>(
2064 &self,
2065 buffer: &ModelHandle<Buffer>,
2066 position: T,
2067 cx: &mut ModelContext<Self>,
2068 ) -> Task<Result<Vec<DocumentHighlight>>> {
2069 let position = position.to_point_utf16(buffer.read(cx));
2070
2071 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2072 }
2073
2074 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2075 if self.is_local() {
2076 let mut language_servers = HashMap::default();
2077 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2078 if let Some((worktree, language)) = self
2079 .worktree_for_id(*worktree_id, cx)
2080 .and_then(|worktree| worktree.read(cx).as_local())
2081 .zip(self.languages.get_language(language_name))
2082 {
2083 language_servers
2084 .entry(Arc::as_ptr(language_server))
2085 .or_insert((
2086 language_server.clone(),
2087 *worktree_id,
2088 worktree.abs_path().clone(),
2089 language.clone(),
2090 ));
2091 }
2092 }
2093
2094 let mut requests = Vec::new();
2095 for (language_server, _, _, _) in language_servers.values() {
2096 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2097 lsp::WorkspaceSymbolParams {
2098 query: query.to_string(),
2099 ..Default::default()
2100 },
2101 ));
2102 }
2103
2104 cx.spawn_weak(|this, cx| async move {
2105 let responses = futures::future::try_join_all(requests).await?;
2106
2107 let mut symbols = Vec::new();
2108 if let Some(this) = this.upgrade(&cx) {
2109 this.read_with(&cx, |this, cx| {
2110 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2111 language_servers.into_values().zip(responses)
2112 {
2113 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2114 |lsp_symbol| {
2115 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2116 let mut worktree_id = source_worktree_id;
2117 let path;
2118 if let Some((worktree, rel_path)) =
2119 this.find_local_worktree(&abs_path, cx)
2120 {
2121 worktree_id = worktree.read(cx).id();
2122 path = rel_path;
2123 } else {
2124 path = relativize_path(&worktree_abs_path, &abs_path);
2125 }
2126
2127 let label = language
2128 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2129 .unwrap_or_else(|| {
2130 CodeLabel::plain(lsp_symbol.name.clone(), None)
2131 });
2132 let signature = this.symbol_signature(worktree_id, &path);
2133
2134 Some(Symbol {
2135 source_worktree_id,
2136 worktree_id,
2137 language_name: language.name().to_string(),
2138 name: lsp_symbol.name,
2139 kind: lsp_symbol.kind,
2140 label,
2141 path,
2142 range: range_from_lsp(lsp_symbol.location.range),
2143 signature,
2144 })
2145 },
2146 ));
2147 }
2148 })
2149 }
2150
2151 Ok(symbols)
2152 })
2153 } else if let Some(project_id) = self.remote_id() {
2154 let request = self.client.request(proto::GetProjectSymbols {
2155 project_id,
2156 query: query.to_string(),
2157 });
2158 cx.spawn_weak(|this, cx| async move {
2159 let response = request.await?;
2160 let mut symbols = Vec::new();
2161 if let Some(this) = this.upgrade(&cx) {
2162 this.read_with(&cx, |this, _| {
2163 symbols.extend(
2164 response
2165 .symbols
2166 .into_iter()
2167 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2168 );
2169 })
2170 }
2171 Ok(symbols)
2172 })
2173 } else {
2174 Task::ready(Ok(Default::default()))
2175 }
2176 }
2177
2178 pub fn open_buffer_for_symbol(
2179 &mut self,
2180 symbol: &Symbol,
2181 cx: &mut ModelContext<Self>,
2182 ) -> Task<Result<ModelHandle<Buffer>>> {
2183 if self.is_local() {
2184 let language_server = if let Some(server) = self.language_servers.get(&(
2185 symbol.source_worktree_id,
2186 Arc::from(symbol.language_name.as_str()),
2187 )) {
2188 server.clone()
2189 } else {
2190 return Task::ready(Err(anyhow!(
2191 "language server for worktree and language not found"
2192 )));
2193 };
2194
2195 let worktree_abs_path = if let Some(worktree_abs_path) = self
2196 .worktree_for_id(symbol.worktree_id, cx)
2197 .and_then(|worktree| worktree.read(cx).as_local())
2198 .map(|local_worktree| local_worktree.abs_path())
2199 {
2200 worktree_abs_path
2201 } else {
2202 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2203 };
2204 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2205 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2206 uri
2207 } else {
2208 return Task::ready(Err(anyhow!("invalid symbol path")));
2209 };
2210
2211 self.open_local_buffer_via_lsp(
2212 symbol_uri,
2213 Arc::from(symbol.language_name.as_str()),
2214 language_server,
2215 cx,
2216 )
2217 } else if let Some(project_id) = self.remote_id() {
2218 let request = self.client.request(proto::OpenBufferForSymbol {
2219 project_id,
2220 symbol: Some(serialize_symbol(symbol)),
2221 });
2222 cx.spawn(|this, mut cx| async move {
2223 let response = request.await?;
2224 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2225 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2226 .await
2227 })
2228 } else {
2229 Task::ready(Err(anyhow!("project does not have a remote id")))
2230 }
2231 }
2232
2233 pub fn completions<T: ToPointUtf16>(
2234 &self,
2235 source_buffer_handle: &ModelHandle<Buffer>,
2236 position: T,
2237 cx: &mut ModelContext<Self>,
2238 ) -> Task<Result<Vec<Completion>>> {
2239 let source_buffer_handle = source_buffer_handle.clone();
2240 let source_buffer = source_buffer_handle.read(cx);
2241 let buffer_id = source_buffer.remote_id();
2242 let language = source_buffer.language().cloned();
2243 let worktree;
2244 let buffer_abs_path;
2245 if let Some(file) = File::from_dyn(source_buffer.file()) {
2246 worktree = file.worktree.clone();
2247 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2248 } else {
2249 return Task::ready(Ok(Default::default()));
2250 };
2251
2252 let position = position.to_point_utf16(source_buffer);
2253 let anchor = source_buffer.anchor_after(position);
2254
2255 if worktree.read(cx).as_local().is_some() {
2256 let buffer_abs_path = buffer_abs_path.unwrap();
2257 let lang_server =
2258 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2259 server.clone()
2260 } else {
2261 return Task::ready(Ok(Default::default()));
2262 };
2263
2264 cx.spawn(|_, cx| async move {
2265 let completions = lang_server
2266 .request::<lsp::request::Completion>(lsp::CompletionParams {
2267 text_document_position: lsp::TextDocumentPositionParams::new(
2268 lsp::TextDocumentIdentifier::new(
2269 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2270 ),
2271 position.to_lsp_position(),
2272 ),
2273 context: Default::default(),
2274 work_done_progress_params: Default::default(),
2275 partial_result_params: Default::default(),
2276 })
2277 .await
2278 .context("lsp completion request failed")?;
2279
2280 let completions = if let Some(completions) = completions {
2281 match completions {
2282 lsp::CompletionResponse::Array(completions) => completions,
2283 lsp::CompletionResponse::List(list) => list.items,
2284 }
2285 } else {
2286 Default::default()
2287 };
2288
2289 source_buffer_handle.read_with(&cx, |this, _| {
2290 Ok(completions
2291 .into_iter()
2292 .filter_map(|lsp_completion| {
2293 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2294 lsp::CompletionTextEdit::Edit(edit) => {
2295 (range_from_lsp(edit.range), edit.new_text.clone())
2296 }
2297 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2298 log::info!("unsupported insert/replace completion");
2299 return None;
2300 }
2301 };
2302
2303 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2304 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2305 if clipped_start == old_range.start && clipped_end == old_range.end {
2306 Some(Completion {
2307 old_range: this.anchor_before(old_range.start)
2308 ..this.anchor_after(old_range.end),
2309 new_text,
2310 label: language
2311 .as_ref()
2312 .and_then(|l| l.label_for_completion(&lsp_completion))
2313 .unwrap_or_else(|| {
2314 CodeLabel::plain(
2315 lsp_completion.label.clone(),
2316 lsp_completion.filter_text.as_deref(),
2317 )
2318 }),
2319 lsp_completion,
2320 })
2321 } else {
2322 None
2323 }
2324 })
2325 .collect())
2326 })
2327 })
2328 } else if let Some(project_id) = self.remote_id() {
2329 let rpc = self.client.clone();
2330 let message = proto::GetCompletions {
2331 project_id,
2332 buffer_id,
2333 position: Some(language::proto::serialize_anchor(&anchor)),
2334 version: serialize_version(&source_buffer.version()),
2335 };
2336 cx.spawn_weak(|_, mut cx| async move {
2337 let response = rpc.request(message).await?;
2338
2339 source_buffer_handle
2340 .update(&mut cx, |buffer, _| {
2341 buffer.wait_for_version(deserialize_version(response.version))
2342 })
2343 .await;
2344
2345 response
2346 .completions
2347 .into_iter()
2348 .map(|completion| {
2349 language::proto::deserialize_completion(completion, language.as_ref())
2350 })
2351 .collect()
2352 })
2353 } else {
2354 Task::ready(Ok(Default::default()))
2355 }
2356 }
2357
2358 pub fn apply_additional_edits_for_completion(
2359 &self,
2360 buffer_handle: ModelHandle<Buffer>,
2361 completion: Completion,
2362 push_to_history: bool,
2363 cx: &mut ModelContext<Self>,
2364 ) -> Task<Result<Option<Transaction>>> {
2365 let buffer = buffer_handle.read(cx);
2366 let buffer_id = buffer.remote_id();
2367
2368 if self.is_local() {
2369 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2370 server.clone()
2371 } else {
2372 return Task::ready(Ok(Default::default()));
2373 };
2374
2375 cx.spawn(|this, mut cx| async move {
2376 let resolved_completion = lang_server
2377 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2378 .await?;
2379 if let Some(edits) = resolved_completion.additional_text_edits {
2380 let edits = this
2381 .update(&mut cx, |this, cx| {
2382 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2383 })
2384 .await?;
2385 buffer_handle.update(&mut cx, |buffer, cx| {
2386 buffer.finalize_last_transaction();
2387 buffer.start_transaction();
2388 for (range, text) in edits {
2389 buffer.edit([range], text, cx);
2390 }
2391 let transaction = if buffer.end_transaction(cx).is_some() {
2392 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2393 if !push_to_history {
2394 buffer.forget_transaction(transaction.id);
2395 }
2396 Some(transaction)
2397 } else {
2398 None
2399 };
2400 Ok(transaction)
2401 })
2402 } else {
2403 Ok(None)
2404 }
2405 })
2406 } else if let Some(project_id) = self.remote_id() {
2407 let client = self.client.clone();
2408 cx.spawn(|_, mut cx| async move {
2409 let response = client
2410 .request(proto::ApplyCompletionAdditionalEdits {
2411 project_id,
2412 buffer_id,
2413 completion: Some(language::proto::serialize_completion(&completion)),
2414 })
2415 .await?;
2416
2417 if let Some(transaction) = response.transaction {
2418 let transaction = language::proto::deserialize_transaction(transaction)?;
2419 buffer_handle
2420 .update(&mut cx, |buffer, _| {
2421 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2422 })
2423 .await;
2424 if push_to_history {
2425 buffer_handle.update(&mut cx, |buffer, _| {
2426 buffer.push_transaction(transaction.clone(), Instant::now());
2427 });
2428 }
2429 Ok(Some(transaction))
2430 } else {
2431 Ok(None)
2432 }
2433 })
2434 } else {
2435 Task::ready(Err(anyhow!("project does not have a remote id")))
2436 }
2437 }
2438
2439 pub fn code_actions<T: ToOffset>(
2440 &self,
2441 buffer_handle: &ModelHandle<Buffer>,
2442 range: Range<T>,
2443 cx: &mut ModelContext<Self>,
2444 ) -> Task<Result<Vec<CodeAction>>> {
2445 let buffer_handle = buffer_handle.clone();
2446 let buffer = buffer_handle.read(cx);
2447 let buffer_id = buffer.remote_id();
2448 let worktree;
2449 let buffer_abs_path;
2450 if let Some(file) = File::from_dyn(buffer.file()) {
2451 worktree = file.worktree.clone();
2452 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2453 } else {
2454 return Task::ready(Ok(Default::default()));
2455 };
2456 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2457
2458 if worktree.read(cx).as_local().is_some() {
2459 let buffer_abs_path = buffer_abs_path.unwrap();
2460 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2461 server.clone()
2462 } else {
2463 return Task::ready(Ok(Default::default()));
2464 };
2465
2466 let lsp_range = lsp::Range::new(
2467 range.start.to_point_utf16(buffer).to_lsp_position(),
2468 range.end.to_point_utf16(buffer).to_lsp_position(),
2469 );
2470 cx.foreground().spawn(async move {
2471 if !lang_server.capabilities().code_action_provider.is_some() {
2472 return Ok(Default::default());
2473 }
2474
2475 Ok(lang_server
2476 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2477 text_document: lsp::TextDocumentIdentifier::new(
2478 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2479 ),
2480 range: lsp_range,
2481 work_done_progress_params: Default::default(),
2482 partial_result_params: Default::default(),
2483 context: lsp::CodeActionContext {
2484 diagnostics: Default::default(),
2485 only: Some(vec![
2486 lsp::CodeActionKind::QUICKFIX,
2487 lsp::CodeActionKind::REFACTOR,
2488 lsp::CodeActionKind::REFACTOR_EXTRACT,
2489 ]),
2490 },
2491 })
2492 .await?
2493 .unwrap_or_default()
2494 .into_iter()
2495 .filter_map(|entry| {
2496 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2497 Some(CodeAction {
2498 range: range.clone(),
2499 lsp_action,
2500 })
2501 } else {
2502 None
2503 }
2504 })
2505 .collect())
2506 })
2507 } else if let Some(project_id) = self.remote_id() {
2508 let rpc = self.client.clone();
2509 let version = buffer.version();
2510 cx.spawn_weak(|_, mut cx| async move {
2511 let response = rpc
2512 .request(proto::GetCodeActions {
2513 project_id,
2514 buffer_id,
2515 start: Some(language::proto::serialize_anchor(&range.start)),
2516 end: Some(language::proto::serialize_anchor(&range.end)),
2517 version: serialize_version(&version),
2518 })
2519 .await?;
2520
2521 buffer_handle
2522 .update(&mut cx, |buffer, _| {
2523 buffer.wait_for_version(deserialize_version(response.version))
2524 })
2525 .await;
2526
2527 response
2528 .actions
2529 .into_iter()
2530 .map(language::proto::deserialize_code_action)
2531 .collect()
2532 })
2533 } else {
2534 Task::ready(Ok(Default::default()))
2535 }
2536 }
2537
2538 pub fn apply_code_action(
2539 &self,
2540 buffer_handle: ModelHandle<Buffer>,
2541 mut action: CodeAction,
2542 push_to_history: bool,
2543 cx: &mut ModelContext<Self>,
2544 ) -> Task<Result<ProjectTransaction>> {
2545 if self.is_local() {
2546 let buffer = buffer_handle.read(cx);
2547 let lang_name = if let Some(lang) = buffer.language() {
2548 lang.name()
2549 } else {
2550 return Task::ready(Ok(Default::default()));
2551 };
2552 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2553 server.clone()
2554 } else {
2555 return Task::ready(Ok(Default::default()));
2556 };
2557 let range = action.range.to_point_utf16(buffer);
2558
2559 cx.spawn(|this, mut cx| async move {
2560 if let Some(lsp_range) = action
2561 .lsp_action
2562 .data
2563 .as_mut()
2564 .and_then(|d| d.get_mut("codeActionParams"))
2565 .and_then(|d| d.get_mut("range"))
2566 {
2567 *lsp_range = serde_json::to_value(&lsp::Range::new(
2568 range.start.to_lsp_position(),
2569 range.end.to_lsp_position(),
2570 ))
2571 .unwrap();
2572 action.lsp_action = lang_server
2573 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2574 .await?;
2575 } else {
2576 let actions = this
2577 .update(&mut cx, |this, cx| {
2578 this.code_actions(&buffer_handle, action.range, cx)
2579 })
2580 .await?;
2581 action.lsp_action = actions
2582 .into_iter()
2583 .find(|a| a.lsp_action.title == action.lsp_action.title)
2584 .ok_or_else(|| anyhow!("code action is outdated"))?
2585 .lsp_action;
2586 }
2587
2588 if let Some(edit) = action.lsp_action.edit {
2589 Self::deserialize_workspace_edit(
2590 this,
2591 edit,
2592 push_to_history,
2593 lang_name,
2594 lang_server,
2595 &mut cx,
2596 )
2597 .await
2598 } else {
2599 Ok(ProjectTransaction::default())
2600 }
2601 })
2602 } else if let Some(project_id) = self.remote_id() {
2603 let client = self.client.clone();
2604 let request = proto::ApplyCodeAction {
2605 project_id,
2606 buffer_id: buffer_handle.read(cx).remote_id(),
2607 action: Some(language::proto::serialize_code_action(&action)),
2608 };
2609 cx.spawn(|this, mut cx| async move {
2610 let response = client
2611 .request(request)
2612 .await?
2613 .transaction
2614 .ok_or_else(|| anyhow!("missing transaction"))?;
2615 this.update(&mut cx, |this, cx| {
2616 this.deserialize_project_transaction(response, push_to_history, cx)
2617 })
2618 .await
2619 })
2620 } else {
2621 Task::ready(Err(anyhow!("project does not have a remote id")))
2622 }
2623 }
2624
2625 async fn deserialize_workspace_edit(
2626 this: ModelHandle<Self>,
2627 edit: lsp::WorkspaceEdit,
2628 push_to_history: bool,
2629 language_name: Arc<str>,
2630 language_server: Arc<LanguageServer>,
2631 cx: &mut AsyncAppContext,
2632 ) -> Result<ProjectTransaction> {
2633 let fs = this.read_with(cx, |this, _| this.fs.clone());
2634 let mut operations = Vec::new();
2635 if let Some(document_changes) = edit.document_changes {
2636 match document_changes {
2637 lsp::DocumentChanges::Edits(edits) => {
2638 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2639 }
2640 lsp::DocumentChanges::Operations(ops) => operations = ops,
2641 }
2642 } else if let Some(changes) = edit.changes {
2643 operations.extend(changes.into_iter().map(|(uri, edits)| {
2644 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2645 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2646 uri,
2647 version: None,
2648 },
2649 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2650 })
2651 }));
2652 }
2653
2654 let mut project_transaction = ProjectTransaction::default();
2655 for operation in operations {
2656 match operation {
2657 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2658 let abs_path = op
2659 .uri
2660 .to_file_path()
2661 .map_err(|_| anyhow!("can't convert URI to path"))?;
2662
2663 if let Some(parent_path) = abs_path.parent() {
2664 fs.create_dir(parent_path).await?;
2665 }
2666 if abs_path.ends_with("/") {
2667 fs.create_dir(&abs_path).await?;
2668 } else {
2669 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2670 .await?;
2671 }
2672 }
2673 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2674 let source_abs_path = op
2675 .old_uri
2676 .to_file_path()
2677 .map_err(|_| anyhow!("can't convert URI to path"))?;
2678 let target_abs_path = op
2679 .new_uri
2680 .to_file_path()
2681 .map_err(|_| anyhow!("can't convert URI to path"))?;
2682 fs.rename(
2683 &source_abs_path,
2684 &target_abs_path,
2685 op.options.map(Into::into).unwrap_or_default(),
2686 )
2687 .await?;
2688 }
2689 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2690 let abs_path = op
2691 .uri
2692 .to_file_path()
2693 .map_err(|_| anyhow!("can't convert URI to path"))?;
2694 let options = op.options.map(Into::into).unwrap_or_default();
2695 if abs_path.ends_with("/") {
2696 fs.remove_dir(&abs_path, options).await?;
2697 } else {
2698 fs.remove_file(&abs_path, options).await?;
2699 }
2700 }
2701 lsp::DocumentChangeOperation::Edit(op) => {
2702 let buffer_to_edit = this
2703 .update(cx, |this, cx| {
2704 this.open_local_buffer_via_lsp(
2705 op.text_document.uri,
2706 language_name.clone(),
2707 language_server.clone(),
2708 cx,
2709 )
2710 })
2711 .await?;
2712
2713 let edits = this
2714 .update(cx, |this, cx| {
2715 let edits = op.edits.into_iter().map(|edit| match edit {
2716 lsp::OneOf::Left(edit) => edit,
2717 lsp::OneOf::Right(edit) => edit.text_edit,
2718 });
2719 this.edits_from_lsp(
2720 &buffer_to_edit,
2721 edits,
2722 op.text_document.version,
2723 cx,
2724 )
2725 })
2726 .await?;
2727
2728 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2729 buffer.finalize_last_transaction();
2730 buffer.start_transaction();
2731 for (range, text) in edits {
2732 buffer.edit([range], text, cx);
2733 }
2734 let transaction = if buffer.end_transaction(cx).is_some() {
2735 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2736 if !push_to_history {
2737 buffer.forget_transaction(transaction.id);
2738 }
2739 Some(transaction)
2740 } else {
2741 None
2742 };
2743
2744 transaction
2745 });
2746 if let Some(transaction) = transaction {
2747 project_transaction.0.insert(buffer_to_edit, transaction);
2748 }
2749 }
2750 }
2751 }
2752
2753 Ok(project_transaction)
2754 }
2755
2756 pub fn prepare_rename<T: ToPointUtf16>(
2757 &self,
2758 buffer: ModelHandle<Buffer>,
2759 position: T,
2760 cx: &mut ModelContext<Self>,
2761 ) -> Task<Result<Option<Range<Anchor>>>> {
2762 let position = position.to_point_utf16(buffer.read(cx));
2763 self.request_lsp(buffer, PrepareRename { position }, cx)
2764 }
2765
2766 pub fn perform_rename<T: ToPointUtf16>(
2767 &self,
2768 buffer: ModelHandle<Buffer>,
2769 position: T,
2770 new_name: String,
2771 push_to_history: bool,
2772 cx: &mut ModelContext<Self>,
2773 ) -> Task<Result<ProjectTransaction>> {
2774 let position = position.to_point_utf16(buffer.read(cx));
2775 self.request_lsp(
2776 buffer,
2777 PerformRename {
2778 position,
2779 new_name,
2780 push_to_history,
2781 },
2782 cx,
2783 )
2784 }
2785
2786 pub fn search(
2787 &self,
2788 query: SearchQuery,
2789 cx: &mut ModelContext<Self>,
2790 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2791 if self.is_local() {
2792 let snapshots = self
2793 .visible_worktrees(cx)
2794 .filter_map(|tree| {
2795 let tree = tree.read(cx).as_local()?;
2796 Some(tree.snapshot())
2797 })
2798 .collect::<Vec<_>>();
2799
2800 let background = cx.background().clone();
2801 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2802 if path_count == 0 {
2803 return Task::ready(Ok(Default::default()));
2804 }
2805 let workers = background.num_cpus().min(path_count);
2806 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2807 cx.background()
2808 .spawn({
2809 let fs = self.fs.clone();
2810 let background = cx.background().clone();
2811 let query = query.clone();
2812 async move {
2813 let fs = &fs;
2814 let query = &query;
2815 let matching_paths_tx = &matching_paths_tx;
2816 let paths_per_worker = (path_count + workers - 1) / workers;
2817 let snapshots = &snapshots;
2818 background
2819 .scoped(|scope| {
2820 for worker_ix in 0..workers {
2821 let worker_start_ix = worker_ix * paths_per_worker;
2822 let worker_end_ix = worker_start_ix + paths_per_worker;
2823 scope.spawn(async move {
2824 let mut snapshot_start_ix = 0;
2825 let mut abs_path = PathBuf::new();
2826 for snapshot in snapshots {
2827 let snapshot_end_ix =
2828 snapshot_start_ix + snapshot.visible_file_count();
2829 if worker_end_ix <= snapshot_start_ix {
2830 break;
2831 } else if worker_start_ix > snapshot_end_ix {
2832 snapshot_start_ix = snapshot_end_ix;
2833 continue;
2834 } else {
2835 let start_in_snapshot = worker_start_ix
2836 .saturating_sub(snapshot_start_ix);
2837 let end_in_snapshot =
2838 cmp::min(worker_end_ix, snapshot_end_ix)
2839 - snapshot_start_ix;
2840
2841 for entry in snapshot
2842 .files(false, start_in_snapshot)
2843 .take(end_in_snapshot - start_in_snapshot)
2844 {
2845 if matching_paths_tx.is_closed() {
2846 break;
2847 }
2848
2849 abs_path.clear();
2850 abs_path.push(&snapshot.abs_path());
2851 abs_path.push(&entry.path);
2852 let matches = if let Some(file) =
2853 fs.open_sync(&abs_path).await.log_err()
2854 {
2855 query.detect(file).unwrap_or(false)
2856 } else {
2857 false
2858 };
2859
2860 if matches {
2861 let project_path =
2862 (snapshot.id(), entry.path.clone());
2863 if matching_paths_tx
2864 .send(project_path)
2865 .await
2866 .is_err()
2867 {
2868 break;
2869 }
2870 }
2871 }
2872
2873 snapshot_start_ix = snapshot_end_ix;
2874 }
2875 }
2876 });
2877 }
2878 })
2879 .await;
2880 }
2881 })
2882 .detach();
2883
2884 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2885 let open_buffers = self
2886 .opened_buffers
2887 .values()
2888 .filter_map(|b| b.upgrade(cx))
2889 .collect::<HashSet<_>>();
2890 cx.spawn(|this, cx| async move {
2891 for buffer in &open_buffers {
2892 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2893 buffers_tx.send((buffer.clone(), snapshot)).await?;
2894 }
2895
2896 let open_buffers = Rc::new(RefCell::new(open_buffers));
2897 while let Some(project_path) = matching_paths_rx.next().await {
2898 if buffers_tx.is_closed() {
2899 break;
2900 }
2901
2902 let this = this.clone();
2903 let open_buffers = open_buffers.clone();
2904 let buffers_tx = buffers_tx.clone();
2905 cx.spawn(|mut cx| async move {
2906 if let Some(buffer) = this
2907 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2908 .await
2909 .log_err()
2910 {
2911 if open_buffers.borrow_mut().insert(buffer.clone()) {
2912 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2913 buffers_tx.send((buffer, snapshot)).await?;
2914 }
2915 }
2916
2917 Ok::<_, anyhow::Error>(())
2918 })
2919 .detach();
2920 }
2921
2922 Ok::<_, anyhow::Error>(())
2923 })
2924 .detach_and_log_err(cx);
2925
2926 let background = cx.background().clone();
2927 cx.background().spawn(async move {
2928 let query = &query;
2929 let mut matched_buffers = Vec::new();
2930 for _ in 0..workers {
2931 matched_buffers.push(HashMap::default());
2932 }
2933 background
2934 .scoped(|scope| {
2935 for worker_matched_buffers in matched_buffers.iter_mut() {
2936 let mut buffers_rx = buffers_rx.clone();
2937 scope.spawn(async move {
2938 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2939 let buffer_matches = query
2940 .search(snapshot.as_rope())
2941 .await
2942 .iter()
2943 .map(|range| {
2944 snapshot.anchor_before(range.start)
2945 ..snapshot.anchor_after(range.end)
2946 })
2947 .collect::<Vec<_>>();
2948 if !buffer_matches.is_empty() {
2949 worker_matched_buffers
2950 .insert(buffer.clone(), buffer_matches);
2951 }
2952 }
2953 });
2954 }
2955 })
2956 .await;
2957 Ok(matched_buffers.into_iter().flatten().collect())
2958 })
2959 } else if let Some(project_id) = self.remote_id() {
2960 let request = self.client.request(query.to_proto(project_id));
2961 cx.spawn(|this, mut cx| async move {
2962 let response = request.await?;
2963 let mut result = HashMap::default();
2964 for location in response.locations {
2965 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2966 let target_buffer = this
2967 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2968 .await?;
2969 let start = location
2970 .start
2971 .and_then(deserialize_anchor)
2972 .ok_or_else(|| anyhow!("missing target start"))?;
2973 let end = location
2974 .end
2975 .and_then(deserialize_anchor)
2976 .ok_or_else(|| anyhow!("missing target end"))?;
2977 result
2978 .entry(target_buffer)
2979 .or_insert(Vec::new())
2980 .push(start..end)
2981 }
2982 Ok(result)
2983 })
2984 } else {
2985 Task::ready(Ok(Default::default()))
2986 }
2987 }
2988
2989 fn request_lsp<R: LspCommand>(
2990 &self,
2991 buffer_handle: ModelHandle<Buffer>,
2992 request: R,
2993 cx: &mut ModelContext<Self>,
2994 ) -> Task<Result<R::Response>>
2995 where
2996 <R::LspRequest as lsp::request::Request>::Result: Send,
2997 {
2998 let buffer = buffer_handle.read(cx);
2999 if self.is_local() {
3000 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3001 if let Some((file, language_server)) =
3002 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3003 {
3004 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3005 return cx.spawn(|this, cx| async move {
3006 if !request.check_capabilities(&language_server.capabilities()) {
3007 return Ok(Default::default());
3008 }
3009
3010 let response = language_server
3011 .request::<R::LspRequest>(lsp_params)
3012 .await
3013 .context("lsp request failed")?;
3014 request
3015 .response_from_lsp(response, this, buffer_handle, cx)
3016 .await
3017 });
3018 }
3019 } else if let Some(project_id) = self.remote_id() {
3020 let rpc = self.client.clone();
3021 let message = request.to_proto(project_id, buffer);
3022 return cx.spawn(|this, cx| async move {
3023 let response = rpc.request(message).await?;
3024 request
3025 .response_from_proto(response, this, buffer_handle, cx)
3026 .await
3027 });
3028 }
3029 Task::ready(Ok(Default::default()))
3030 }
3031
3032 pub fn find_or_create_local_worktree(
3033 &mut self,
3034 abs_path: impl AsRef<Path>,
3035 visible: bool,
3036 cx: &mut ModelContext<Self>,
3037 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3038 let abs_path = abs_path.as_ref();
3039 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3040 Task::ready(Ok((tree.clone(), relative_path.into())))
3041 } else {
3042 let worktree = self.create_local_worktree(abs_path, visible, cx);
3043 cx.foreground()
3044 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3045 }
3046 }
3047
3048 pub fn find_local_worktree(
3049 &self,
3050 abs_path: &Path,
3051 cx: &AppContext,
3052 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3053 for tree in self.worktrees(cx) {
3054 if let Some(relative_path) = tree
3055 .read(cx)
3056 .as_local()
3057 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3058 {
3059 return Some((tree.clone(), relative_path.into()));
3060 }
3061 }
3062 None
3063 }
3064
3065 pub fn is_shared(&self) -> bool {
3066 match &self.client_state {
3067 ProjectClientState::Local { is_shared, .. } => *is_shared,
3068 ProjectClientState::Remote { .. } => false,
3069 }
3070 }
3071
3072 fn create_local_worktree(
3073 &mut self,
3074 abs_path: impl AsRef<Path>,
3075 visible: bool,
3076 cx: &mut ModelContext<Self>,
3077 ) -> Task<Result<ModelHandle<Worktree>>> {
3078 let fs = self.fs.clone();
3079 let client = self.client.clone();
3080 let next_entry_id = self.next_entry_id.clone();
3081 let path: Arc<Path> = abs_path.as_ref().into();
3082 let task = self
3083 .loading_local_worktrees
3084 .entry(path.clone())
3085 .or_insert_with(|| {
3086 cx.spawn(|project, mut cx| {
3087 async move {
3088 let worktree = Worktree::local(
3089 client.clone(),
3090 path.clone(),
3091 visible,
3092 fs,
3093 next_entry_id,
3094 &mut cx,
3095 )
3096 .await;
3097 project.update(&mut cx, |project, _| {
3098 project.loading_local_worktrees.remove(&path);
3099 });
3100 let worktree = worktree?;
3101
3102 let (remote_project_id, is_shared) =
3103 project.update(&mut cx, |project, cx| {
3104 project.add_worktree(&worktree, cx);
3105 (project.remote_id(), project.is_shared())
3106 });
3107
3108 if let Some(project_id) = remote_project_id {
3109 if is_shared {
3110 worktree
3111 .update(&mut cx, |worktree, cx| {
3112 worktree.as_local_mut().unwrap().share(project_id, cx)
3113 })
3114 .await?;
3115 } else {
3116 worktree
3117 .update(&mut cx, |worktree, cx| {
3118 worktree.as_local_mut().unwrap().register(project_id, cx)
3119 })
3120 .await?;
3121 }
3122 }
3123
3124 Ok(worktree)
3125 }
3126 .map_err(|err| Arc::new(err))
3127 })
3128 .shared()
3129 })
3130 .clone();
3131 cx.foreground().spawn(async move {
3132 match task.await {
3133 Ok(worktree) => Ok(worktree),
3134 Err(err) => Err(anyhow!("{}", err)),
3135 }
3136 })
3137 }
3138
3139 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3140 self.worktrees.retain(|worktree| {
3141 worktree
3142 .upgrade(cx)
3143 .map_or(false, |w| w.read(cx).id() != id)
3144 });
3145 cx.notify();
3146 }
3147
3148 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3149 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3150 if worktree.read(cx).is_local() {
3151 cx.subscribe(&worktree, |this, worktree, _, cx| {
3152 this.update_local_worktree_buffers(worktree, cx);
3153 })
3154 .detach();
3155 }
3156
3157 let push_strong_handle = {
3158 let worktree = worktree.read(cx);
3159 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3160 };
3161 if push_strong_handle {
3162 self.worktrees
3163 .push(WorktreeHandle::Strong(worktree.clone()));
3164 } else {
3165 cx.observe_release(&worktree, |this, _, cx| {
3166 this.worktrees
3167 .retain(|worktree| worktree.upgrade(cx).is_some());
3168 cx.notify();
3169 })
3170 .detach();
3171 self.worktrees
3172 .push(WorktreeHandle::Weak(worktree.downgrade()));
3173 }
3174 cx.notify();
3175 }
3176
3177 fn update_local_worktree_buffers(
3178 &mut self,
3179 worktree_handle: ModelHandle<Worktree>,
3180 cx: &mut ModelContext<Self>,
3181 ) {
3182 let snapshot = worktree_handle.read(cx).snapshot();
3183 let mut buffers_to_delete = Vec::new();
3184 for (buffer_id, buffer) in &self.opened_buffers {
3185 if let Some(buffer) = buffer.upgrade(cx) {
3186 buffer.update(cx, |buffer, cx| {
3187 if let Some(old_file) = File::from_dyn(buffer.file()) {
3188 if old_file.worktree != worktree_handle {
3189 return;
3190 }
3191
3192 let new_file = if let Some(entry) = old_file
3193 .entry_id
3194 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3195 {
3196 File {
3197 is_local: true,
3198 entry_id: Some(entry.id),
3199 mtime: entry.mtime,
3200 path: entry.path.clone(),
3201 worktree: worktree_handle.clone(),
3202 }
3203 } else if let Some(entry) =
3204 snapshot.entry_for_path(old_file.path().as_ref())
3205 {
3206 File {
3207 is_local: true,
3208 entry_id: Some(entry.id),
3209 mtime: entry.mtime,
3210 path: entry.path.clone(),
3211 worktree: worktree_handle.clone(),
3212 }
3213 } else {
3214 File {
3215 is_local: true,
3216 entry_id: None,
3217 path: old_file.path().clone(),
3218 mtime: old_file.mtime(),
3219 worktree: worktree_handle.clone(),
3220 }
3221 };
3222
3223 if let Some(project_id) = self.remote_id() {
3224 self.client
3225 .send(proto::UpdateBufferFile {
3226 project_id,
3227 buffer_id: *buffer_id as u64,
3228 file: Some(new_file.to_proto()),
3229 })
3230 .log_err();
3231 }
3232 buffer.file_updated(Box::new(new_file), cx).detach();
3233 }
3234 });
3235 } else {
3236 buffers_to_delete.push(*buffer_id);
3237 }
3238 }
3239
3240 for buffer_id in buffers_to_delete {
3241 self.opened_buffers.remove(&buffer_id);
3242 }
3243 }
3244
3245 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3246 let new_active_entry = entry.and_then(|project_path| {
3247 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3248 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3249 Some(entry.id)
3250 });
3251 if new_active_entry != self.active_entry {
3252 self.active_entry = new_active_entry;
3253 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3254 }
3255 }
3256
3257 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3258 self.language_servers_with_diagnostics_running > 0
3259 }
3260
3261 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3262 let mut summary = DiagnosticSummary::default();
3263 for (_, path_summary) in self.diagnostic_summaries(cx) {
3264 summary.error_count += path_summary.error_count;
3265 summary.warning_count += path_summary.warning_count;
3266 summary.info_count += path_summary.info_count;
3267 summary.hint_count += path_summary.hint_count;
3268 }
3269 summary
3270 }
3271
3272 pub fn diagnostic_summaries<'a>(
3273 &'a self,
3274 cx: &'a AppContext,
3275 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3276 self.worktrees(cx).flat_map(move |worktree| {
3277 let worktree = worktree.read(cx);
3278 let worktree_id = worktree.id();
3279 worktree
3280 .diagnostic_summaries()
3281 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3282 })
3283 }
3284
3285 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3286 self.language_servers_with_diagnostics_running += 1;
3287 if self.language_servers_with_diagnostics_running == 1 {
3288 cx.emit(Event::DiskBasedDiagnosticsStarted);
3289 }
3290 }
3291
3292 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3293 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3294 self.language_servers_with_diagnostics_running -= 1;
3295 if self.language_servers_with_diagnostics_running == 0 {
3296 cx.emit(Event::DiskBasedDiagnosticsFinished);
3297 }
3298 }
3299
3300 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3301 self.active_entry
3302 }
3303
3304 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3305 self.worktree_for_id(path.worktree_id, cx)?
3306 .read(cx)
3307 .entry_for_path(&path.path)
3308 .map(|entry| entry.id)
3309 }
3310
3311 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3312 let worktree = self.worktree_for_entry(entry_id, cx)?;
3313 let worktree = worktree.read(cx);
3314 let worktree_id = worktree.id();
3315 let path = worktree.entry_for_id(entry_id)?.path.clone();
3316 Some(ProjectPath { worktree_id, path })
3317 }
3318
3319 // RPC message handlers
3320
3321 async fn handle_unshare_project(
3322 this: ModelHandle<Self>,
3323 _: TypedEnvelope<proto::UnshareProject>,
3324 _: Arc<Client>,
3325 mut cx: AsyncAppContext,
3326 ) -> Result<()> {
3327 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3328 Ok(())
3329 }
3330
3331 async fn handle_add_collaborator(
3332 this: ModelHandle<Self>,
3333 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3334 _: Arc<Client>,
3335 mut cx: AsyncAppContext,
3336 ) -> Result<()> {
3337 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3338 let collaborator = envelope
3339 .payload
3340 .collaborator
3341 .take()
3342 .ok_or_else(|| anyhow!("empty collaborator"))?;
3343
3344 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3345 this.update(&mut cx, |this, cx| {
3346 this.collaborators
3347 .insert(collaborator.peer_id, collaborator);
3348 cx.notify();
3349 });
3350
3351 Ok(())
3352 }
3353
3354 async fn handle_remove_collaborator(
3355 this: ModelHandle<Self>,
3356 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3357 _: Arc<Client>,
3358 mut cx: AsyncAppContext,
3359 ) -> Result<()> {
3360 this.update(&mut cx, |this, cx| {
3361 let peer_id = PeerId(envelope.payload.peer_id);
3362 let replica_id = this
3363 .collaborators
3364 .remove(&peer_id)
3365 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3366 .replica_id;
3367 for (_, buffer) in &this.opened_buffers {
3368 if let Some(buffer) = buffer.upgrade(cx) {
3369 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3370 }
3371 }
3372 cx.emit(Event::CollaboratorLeft(peer_id));
3373 cx.notify();
3374 Ok(())
3375 })
3376 }
3377
3378 async fn handle_register_worktree(
3379 this: ModelHandle<Self>,
3380 envelope: TypedEnvelope<proto::RegisterWorktree>,
3381 client: Arc<Client>,
3382 mut cx: AsyncAppContext,
3383 ) -> Result<()> {
3384 this.update(&mut cx, |this, cx| {
3385 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3386 let replica_id = this.replica_id();
3387 let worktree = proto::Worktree {
3388 id: envelope.payload.worktree_id,
3389 root_name: envelope.payload.root_name,
3390 entries: Default::default(),
3391 diagnostic_summaries: Default::default(),
3392 visible: envelope.payload.visible,
3393 };
3394 let (worktree, load_task) =
3395 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3396 this.add_worktree(&worktree, cx);
3397 load_task.detach();
3398 Ok(())
3399 })
3400 }
3401
3402 async fn handle_unregister_worktree(
3403 this: ModelHandle<Self>,
3404 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3405 _: Arc<Client>,
3406 mut cx: AsyncAppContext,
3407 ) -> Result<()> {
3408 this.update(&mut cx, |this, cx| {
3409 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3410 this.remove_worktree(worktree_id, cx);
3411 Ok(())
3412 })
3413 }
3414
3415 async fn handle_update_worktree(
3416 this: ModelHandle<Self>,
3417 envelope: TypedEnvelope<proto::UpdateWorktree>,
3418 _: Arc<Client>,
3419 mut cx: AsyncAppContext,
3420 ) -> Result<()> {
3421 this.update(&mut cx, |this, cx| {
3422 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3423 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3424 worktree.update(cx, |worktree, _| {
3425 let worktree = worktree.as_remote_mut().unwrap();
3426 worktree.update_from_remote(envelope)
3427 })?;
3428 }
3429 Ok(())
3430 })
3431 }
3432
3433 async fn handle_update_diagnostic_summary(
3434 this: ModelHandle<Self>,
3435 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3436 _: Arc<Client>,
3437 mut cx: AsyncAppContext,
3438 ) -> Result<()> {
3439 this.update(&mut cx, |this, cx| {
3440 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3441 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3442 if let Some(summary) = envelope.payload.summary {
3443 let project_path = ProjectPath {
3444 worktree_id,
3445 path: Path::new(&summary.path).into(),
3446 };
3447 worktree.update(cx, |worktree, _| {
3448 worktree
3449 .as_remote_mut()
3450 .unwrap()
3451 .update_diagnostic_summary(project_path.path.clone(), &summary);
3452 });
3453 cx.emit(Event::DiagnosticsUpdated(project_path));
3454 }
3455 }
3456 Ok(())
3457 })
3458 }
3459
3460 async fn handle_start_language_server(
3461 this: ModelHandle<Self>,
3462 envelope: TypedEnvelope<proto::StartLanguageServer>,
3463 _: Arc<Client>,
3464 mut cx: AsyncAppContext,
3465 ) -> Result<()> {
3466 let server = envelope
3467 .payload
3468 .server
3469 .ok_or_else(|| anyhow!("invalid server"))?;
3470 this.update(&mut cx, |this, cx| {
3471 this.language_server_statuses.insert(
3472 server.id as usize,
3473 LanguageServerStatus {
3474 name: server.name,
3475 pending_work: Default::default(),
3476 pending_diagnostic_updates: 0,
3477 },
3478 );
3479 cx.notify();
3480 });
3481 Ok(())
3482 }
3483
3484 async fn handle_update_language_server(
3485 this: ModelHandle<Self>,
3486 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3487 _: Arc<Client>,
3488 mut cx: AsyncAppContext,
3489 ) -> Result<()> {
3490 let language_server_id = envelope.payload.language_server_id as usize;
3491 match envelope
3492 .payload
3493 .variant
3494 .ok_or_else(|| anyhow!("invalid variant"))?
3495 {
3496 proto::update_language_server::Variant::WorkStart(payload) => {
3497 this.update(&mut cx, |this, cx| {
3498 this.on_lsp_work_start(language_server_id, payload.token, cx);
3499 })
3500 }
3501 proto::update_language_server::Variant::WorkProgress(payload) => {
3502 this.update(&mut cx, |this, cx| {
3503 this.on_lsp_work_progress(
3504 language_server_id,
3505 payload.token,
3506 LanguageServerProgress {
3507 message: payload.message,
3508 percentage: payload.percentage.map(|p| p as usize),
3509 last_update_at: Instant::now(),
3510 },
3511 cx,
3512 );
3513 })
3514 }
3515 proto::update_language_server::Variant::WorkEnd(payload) => {
3516 this.update(&mut cx, |this, cx| {
3517 this.on_lsp_work_end(language_server_id, payload.token, cx);
3518 })
3519 }
3520 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3521 this.update(&mut cx, |this, cx| {
3522 this.disk_based_diagnostics_started(cx);
3523 })
3524 }
3525 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3526 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3527 }
3528 }
3529
3530 Ok(())
3531 }
3532
3533 async fn handle_update_buffer(
3534 this: ModelHandle<Self>,
3535 envelope: TypedEnvelope<proto::UpdateBuffer>,
3536 _: Arc<Client>,
3537 mut cx: AsyncAppContext,
3538 ) -> Result<()> {
3539 this.update(&mut cx, |this, cx| {
3540 let payload = envelope.payload.clone();
3541 let buffer_id = payload.buffer_id;
3542 let ops = payload
3543 .operations
3544 .into_iter()
3545 .map(|op| language::proto::deserialize_operation(op))
3546 .collect::<Result<Vec<_>, _>>()?;
3547 match this.opened_buffers.entry(buffer_id) {
3548 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3549 OpenBuffer::Strong(buffer) => {
3550 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3551 }
3552 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3553 OpenBuffer::Weak(_) => {}
3554 },
3555 hash_map::Entry::Vacant(e) => {
3556 e.insert(OpenBuffer::Loading(ops));
3557 }
3558 }
3559 Ok(())
3560 })
3561 }
3562
3563 async fn handle_update_buffer_file(
3564 this: ModelHandle<Self>,
3565 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3566 _: Arc<Client>,
3567 mut cx: AsyncAppContext,
3568 ) -> Result<()> {
3569 this.update(&mut cx, |this, cx| {
3570 let payload = envelope.payload.clone();
3571 let buffer_id = payload.buffer_id;
3572 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3573 let worktree = this
3574 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3575 .ok_or_else(|| anyhow!("no such worktree"))?;
3576 let file = File::from_proto(file, worktree.clone(), cx)?;
3577 let buffer = this
3578 .opened_buffers
3579 .get_mut(&buffer_id)
3580 .and_then(|b| b.upgrade(cx))
3581 .ok_or_else(|| anyhow!("no such buffer"))?;
3582 buffer.update(cx, |buffer, cx| {
3583 buffer.file_updated(Box::new(file), cx).detach();
3584 });
3585 Ok(())
3586 })
3587 }
3588
3589 async fn handle_save_buffer(
3590 this: ModelHandle<Self>,
3591 envelope: TypedEnvelope<proto::SaveBuffer>,
3592 _: Arc<Client>,
3593 mut cx: AsyncAppContext,
3594 ) -> Result<proto::BufferSaved> {
3595 let buffer_id = envelope.payload.buffer_id;
3596 let requested_version = deserialize_version(envelope.payload.version);
3597
3598 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3599 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3600 let buffer = this
3601 .opened_buffers
3602 .get(&buffer_id)
3603 .map(|buffer| buffer.upgrade(cx).unwrap())
3604 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3605 Ok::<_, anyhow::Error>((project_id, buffer))
3606 })?;
3607 buffer
3608 .update(&mut cx, |buffer, _| {
3609 buffer.wait_for_version(requested_version)
3610 })
3611 .await;
3612
3613 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3614 Ok(proto::BufferSaved {
3615 project_id,
3616 buffer_id,
3617 version: serialize_version(&saved_version),
3618 mtime: Some(mtime.into()),
3619 })
3620 }
3621
3622 async fn handle_format_buffers(
3623 this: ModelHandle<Self>,
3624 envelope: TypedEnvelope<proto::FormatBuffers>,
3625 _: Arc<Client>,
3626 mut cx: AsyncAppContext,
3627 ) -> Result<proto::FormatBuffersResponse> {
3628 let sender_id = envelope.original_sender_id()?;
3629 let format = this.update(&mut cx, |this, cx| {
3630 let mut buffers = HashSet::default();
3631 for buffer_id in &envelope.payload.buffer_ids {
3632 buffers.insert(
3633 this.opened_buffers
3634 .get(buffer_id)
3635 .map(|buffer| buffer.upgrade(cx).unwrap())
3636 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3637 );
3638 }
3639 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3640 })?;
3641
3642 let project_transaction = format.await?;
3643 let project_transaction = this.update(&mut cx, |this, cx| {
3644 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3645 });
3646 Ok(proto::FormatBuffersResponse {
3647 transaction: Some(project_transaction),
3648 })
3649 }
3650
3651 async fn handle_get_completions(
3652 this: ModelHandle<Self>,
3653 envelope: TypedEnvelope<proto::GetCompletions>,
3654 _: Arc<Client>,
3655 mut cx: AsyncAppContext,
3656 ) -> Result<proto::GetCompletionsResponse> {
3657 let position = envelope
3658 .payload
3659 .position
3660 .and_then(language::proto::deserialize_anchor)
3661 .ok_or_else(|| anyhow!("invalid position"))?;
3662 let version = deserialize_version(envelope.payload.version);
3663 let buffer = this.read_with(&cx, |this, cx| {
3664 this.opened_buffers
3665 .get(&envelope.payload.buffer_id)
3666 .map(|buffer| buffer.upgrade(cx).unwrap())
3667 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3668 })?;
3669 buffer
3670 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3671 .await;
3672 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3673 let completions = this
3674 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3675 .await?;
3676
3677 Ok(proto::GetCompletionsResponse {
3678 completions: completions
3679 .iter()
3680 .map(language::proto::serialize_completion)
3681 .collect(),
3682 version: serialize_version(&version),
3683 })
3684 }
3685
3686 async fn handle_apply_additional_edits_for_completion(
3687 this: ModelHandle<Self>,
3688 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3689 _: Arc<Client>,
3690 mut cx: AsyncAppContext,
3691 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3692 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3693 let buffer = this
3694 .opened_buffers
3695 .get(&envelope.payload.buffer_id)
3696 .map(|buffer| buffer.upgrade(cx).unwrap())
3697 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3698 let language = buffer.read(cx).language();
3699 let completion = language::proto::deserialize_completion(
3700 envelope
3701 .payload
3702 .completion
3703 .ok_or_else(|| anyhow!("invalid completion"))?,
3704 language,
3705 )?;
3706 Ok::<_, anyhow::Error>(
3707 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3708 )
3709 })?;
3710
3711 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3712 transaction: apply_additional_edits
3713 .await?
3714 .as_ref()
3715 .map(language::proto::serialize_transaction),
3716 })
3717 }
3718
3719 async fn handle_get_code_actions(
3720 this: ModelHandle<Self>,
3721 envelope: TypedEnvelope<proto::GetCodeActions>,
3722 _: Arc<Client>,
3723 mut cx: AsyncAppContext,
3724 ) -> Result<proto::GetCodeActionsResponse> {
3725 let start = envelope
3726 .payload
3727 .start
3728 .and_then(language::proto::deserialize_anchor)
3729 .ok_or_else(|| anyhow!("invalid start"))?;
3730 let end = envelope
3731 .payload
3732 .end
3733 .and_then(language::proto::deserialize_anchor)
3734 .ok_or_else(|| anyhow!("invalid end"))?;
3735 let buffer = this.update(&mut cx, |this, cx| {
3736 this.opened_buffers
3737 .get(&envelope.payload.buffer_id)
3738 .map(|buffer| buffer.upgrade(cx).unwrap())
3739 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3740 })?;
3741 buffer
3742 .update(&mut cx, |buffer, _| {
3743 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3744 })
3745 .await;
3746
3747 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3748 let code_actions = this.update(&mut cx, |this, cx| {
3749 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3750 })?;
3751
3752 Ok(proto::GetCodeActionsResponse {
3753 actions: code_actions
3754 .await?
3755 .iter()
3756 .map(language::proto::serialize_code_action)
3757 .collect(),
3758 version: serialize_version(&version),
3759 })
3760 }
3761
3762 async fn handle_apply_code_action(
3763 this: ModelHandle<Self>,
3764 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3765 _: Arc<Client>,
3766 mut cx: AsyncAppContext,
3767 ) -> Result<proto::ApplyCodeActionResponse> {
3768 let sender_id = envelope.original_sender_id()?;
3769 let action = language::proto::deserialize_code_action(
3770 envelope
3771 .payload
3772 .action
3773 .ok_or_else(|| anyhow!("invalid action"))?,
3774 )?;
3775 let apply_code_action = this.update(&mut cx, |this, cx| {
3776 let buffer = this
3777 .opened_buffers
3778 .get(&envelope.payload.buffer_id)
3779 .map(|buffer| buffer.upgrade(cx).unwrap())
3780 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3781 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3782 })?;
3783
3784 let project_transaction = apply_code_action.await?;
3785 let project_transaction = this.update(&mut cx, |this, cx| {
3786 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3787 });
3788 Ok(proto::ApplyCodeActionResponse {
3789 transaction: Some(project_transaction),
3790 })
3791 }
3792
3793 async fn handle_lsp_command<T: LspCommand>(
3794 this: ModelHandle<Self>,
3795 envelope: TypedEnvelope<T::ProtoRequest>,
3796 _: Arc<Client>,
3797 mut cx: AsyncAppContext,
3798 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3799 where
3800 <T::LspRequest as lsp::request::Request>::Result: Send,
3801 {
3802 let sender_id = envelope.original_sender_id()?;
3803 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3804 let buffer_handle = this.read_with(&cx, |this, _| {
3805 this.opened_buffers
3806 .get(&buffer_id)
3807 .and_then(|buffer| buffer.upgrade(&cx))
3808 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3809 })?;
3810 let request = T::from_proto(
3811 envelope.payload,
3812 this.clone(),
3813 buffer_handle.clone(),
3814 cx.clone(),
3815 )
3816 .await?;
3817 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3818 let response = this
3819 .update(&mut cx, |this, cx| {
3820 this.request_lsp(buffer_handle, request, cx)
3821 })
3822 .await?;
3823 this.update(&mut cx, |this, cx| {
3824 Ok(T::response_to_proto(
3825 response,
3826 this,
3827 sender_id,
3828 &buffer_version,
3829 cx,
3830 ))
3831 })
3832 }
3833
3834 async fn handle_get_project_symbols(
3835 this: ModelHandle<Self>,
3836 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3837 _: Arc<Client>,
3838 mut cx: AsyncAppContext,
3839 ) -> Result<proto::GetProjectSymbolsResponse> {
3840 let symbols = this
3841 .update(&mut cx, |this, cx| {
3842 this.symbols(&envelope.payload.query, cx)
3843 })
3844 .await?;
3845
3846 Ok(proto::GetProjectSymbolsResponse {
3847 symbols: symbols.iter().map(serialize_symbol).collect(),
3848 })
3849 }
3850
3851 async fn handle_search_project(
3852 this: ModelHandle<Self>,
3853 envelope: TypedEnvelope<proto::SearchProject>,
3854 _: Arc<Client>,
3855 mut cx: AsyncAppContext,
3856 ) -> Result<proto::SearchProjectResponse> {
3857 let peer_id = envelope.original_sender_id()?;
3858 let query = SearchQuery::from_proto(envelope.payload)?;
3859 let result = this
3860 .update(&mut cx, |this, cx| this.search(query, cx))
3861 .await?;
3862
3863 this.update(&mut cx, |this, cx| {
3864 let mut locations = Vec::new();
3865 for (buffer, ranges) in result {
3866 for range in ranges {
3867 let start = serialize_anchor(&range.start);
3868 let end = serialize_anchor(&range.end);
3869 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3870 locations.push(proto::Location {
3871 buffer: Some(buffer),
3872 start: Some(start),
3873 end: Some(end),
3874 });
3875 }
3876 }
3877 Ok(proto::SearchProjectResponse { locations })
3878 })
3879 }
3880
3881 async fn handle_open_buffer_for_symbol(
3882 this: ModelHandle<Self>,
3883 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3884 _: Arc<Client>,
3885 mut cx: AsyncAppContext,
3886 ) -> Result<proto::OpenBufferForSymbolResponse> {
3887 let peer_id = envelope.original_sender_id()?;
3888 let symbol = envelope
3889 .payload
3890 .symbol
3891 .ok_or_else(|| anyhow!("invalid symbol"))?;
3892 let symbol = this.read_with(&cx, |this, _| {
3893 let symbol = this.deserialize_symbol(symbol)?;
3894 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3895 if signature == symbol.signature {
3896 Ok(symbol)
3897 } else {
3898 Err(anyhow!("invalid symbol signature"))
3899 }
3900 })?;
3901 let buffer = this
3902 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3903 .await?;
3904
3905 Ok(proto::OpenBufferForSymbolResponse {
3906 buffer: Some(this.update(&mut cx, |this, cx| {
3907 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3908 })),
3909 })
3910 }
3911
3912 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3913 let mut hasher = Sha256::new();
3914 hasher.update(worktree_id.to_proto().to_be_bytes());
3915 hasher.update(path.to_string_lossy().as_bytes());
3916 hasher.update(self.nonce.to_be_bytes());
3917 hasher.finalize().as_slice().try_into().unwrap()
3918 }
3919
3920 async fn handle_open_buffer_by_id(
3921 this: ModelHandle<Self>,
3922 envelope: TypedEnvelope<proto::OpenBufferById>,
3923 _: Arc<Client>,
3924 mut cx: AsyncAppContext,
3925 ) -> Result<proto::OpenBufferResponse> {
3926 let peer_id = envelope.original_sender_id()?;
3927 let buffer = this
3928 .update(&mut cx, |this, cx| {
3929 this.open_buffer_by_id(envelope.payload.id, cx)
3930 })
3931 .await?;
3932 this.update(&mut cx, |this, cx| {
3933 Ok(proto::OpenBufferResponse {
3934 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3935 })
3936 })
3937 }
3938
3939 async fn handle_open_buffer_by_path(
3940 this: ModelHandle<Self>,
3941 envelope: TypedEnvelope<proto::OpenBufferByPath>,
3942 _: Arc<Client>,
3943 mut cx: AsyncAppContext,
3944 ) -> Result<proto::OpenBufferResponse> {
3945 let peer_id = envelope.original_sender_id()?;
3946 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3947 let open_buffer = this.update(&mut cx, |this, cx| {
3948 this.open_buffer(
3949 ProjectPath {
3950 worktree_id,
3951 path: PathBuf::from(envelope.payload.path).into(),
3952 },
3953 cx,
3954 )
3955 });
3956
3957 let buffer = open_buffer.await?;
3958 this.update(&mut cx, |this, cx| {
3959 Ok(proto::OpenBufferResponse {
3960 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3961 })
3962 })
3963 }
3964
3965 fn serialize_project_transaction_for_peer(
3966 &mut self,
3967 project_transaction: ProjectTransaction,
3968 peer_id: PeerId,
3969 cx: &AppContext,
3970 ) -> proto::ProjectTransaction {
3971 let mut serialized_transaction = proto::ProjectTransaction {
3972 buffers: Default::default(),
3973 transactions: Default::default(),
3974 };
3975 for (buffer, transaction) in project_transaction.0 {
3976 serialized_transaction
3977 .buffers
3978 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3979 serialized_transaction
3980 .transactions
3981 .push(language::proto::serialize_transaction(&transaction));
3982 }
3983 serialized_transaction
3984 }
3985
3986 fn deserialize_project_transaction(
3987 &mut self,
3988 message: proto::ProjectTransaction,
3989 push_to_history: bool,
3990 cx: &mut ModelContext<Self>,
3991 ) -> Task<Result<ProjectTransaction>> {
3992 cx.spawn(|this, mut cx| async move {
3993 let mut project_transaction = ProjectTransaction::default();
3994 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3995 let buffer = this
3996 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3997 .await?;
3998 let transaction = language::proto::deserialize_transaction(transaction)?;
3999 project_transaction.0.insert(buffer, transaction);
4000 }
4001
4002 for (buffer, transaction) in &project_transaction.0 {
4003 buffer
4004 .update(&mut cx, |buffer, _| {
4005 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4006 })
4007 .await;
4008
4009 if push_to_history {
4010 buffer.update(&mut cx, |buffer, _| {
4011 buffer.push_transaction(transaction.clone(), Instant::now());
4012 });
4013 }
4014 }
4015
4016 Ok(project_transaction)
4017 })
4018 }
4019
4020 fn serialize_buffer_for_peer(
4021 &mut self,
4022 buffer: &ModelHandle<Buffer>,
4023 peer_id: PeerId,
4024 cx: &AppContext,
4025 ) -> proto::Buffer {
4026 let buffer_id = buffer.read(cx).remote_id();
4027 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4028 if shared_buffers.insert(buffer_id) {
4029 proto::Buffer {
4030 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4031 }
4032 } else {
4033 proto::Buffer {
4034 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4035 }
4036 }
4037 }
4038
4039 fn deserialize_buffer(
4040 &mut self,
4041 buffer: proto::Buffer,
4042 cx: &mut ModelContext<Self>,
4043 ) -> Task<Result<ModelHandle<Buffer>>> {
4044 let replica_id = self.replica_id();
4045
4046 let opened_buffer_tx = self.opened_buffer.0.clone();
4047 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4048 cx.spawn(|this, mut cx| async move {
4049 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4050 proto::buffer::Variant::Id(id) => {
4051 let buffer = loop {
4052 let buffer = this.read_with(&cx, |this, cx| {
4053 this.opened_buffers
4054 .get(&id)
4055 .and_then(|buffer| buffer.upgrade(cx))
4056 });
4057 if let Some(buffer) = buffer {
4058 break buffer;
4059 }
4060 opened_buffer_rx
4061 .next()
4062 .await
4063 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4064 };
4065 Ok(buffer)
4066 }
4067 proto::buffer::Variant::State(mut buffer) => {
4068 let mut buffer_worktree = None;
4069 let mut buffer_file = None;
4070 if let Some(file) = buffer.file.take() {
4071 this.read_with(&cx, |this, cx| {
4072 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4073 let worktree =
4074 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4075 anyhow!("no worktree found for id {}", file.worktree_id)
4076 })?;
4077 buffer_file =
4078 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4079 as Box<dyn language::File>);
4080 buffer_worktree = Some(worktree);
4081 Ok::<_, anyhow::Error>(())
4082 })?;
4083 }
4084
4085 let buffer = cx.add_model(|cx| {
4086 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4087 });
4088
4089 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4090
4091 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4092 Ok(buffer)
4093 }
4094 }
4095 })
4096 }
4097
4098 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4099 let language = self
4100 .languages
4101 .get_language(&serialized_symbol.language_name);
4102 let start = serialized_symbol
4103 .start
4104 .ok_or_else(|| anyhow!("invalid start"))?;
4105 let end = serialized_symbol
4106 .end
4107 .ok_or_else(|| anyhow!("invalid end"))?;
4108 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4109 Ok(Symbol {
4110 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4111 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4112 language_name: serialized_symbol.language_name.clone(),
4113 label: language
4114 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4115 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4116 name: serialized_symbol.name,
4117 path: PathBuf::from(serialized_symbol.path),
4118 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4119 kind,
4120 signature: serialized_symbol
4121 .signature
4122 .try_into()
4123 .map_err(|_| anyhow!("invalid signature"))?,
4124 })
4125 }
4126
4127 async fn handle_buffer_saved(
4128 this: ModelHandle<Self>,
4129 envelope: TypedEnvelope<proto::BufferSaved>,
4130 _: Arc<Client>,
4131 mut cx: AsyncAppContext,
4132 ) -> Result<()> {
4133 let version = deserialize_version(envelope.payload.version);
4134 let mtime = envelope
4135 .payload
4136 .mtime
4137 .ok_or_else(|| anyhow!("missing mtime"))?
4138 .into();
4139
4140 this.update(&mut cx, |this, cx| {
4141 let buffer = this
4142 .opened_buffers
4143 .get(&envelope.payload.buffer_id)
4144 .and_then(|buffer| buffer.upgrade(cx));
4145 if let Some(buffer) = buffer {
4146 buffer.update(cx, |buffer, cx| {
4147 buffer.did_save(version, mtime, None, cx);
4148 });
4149 }
4150 Ok(())
4151 })
4152 }
4153
4154 async fn handle_buffer_reloaded(
4155 this: ModelHandle<Self>,
4156 envelope: TypedEnvelope<proto::BufferReloaded>,
4157 _: Arc<Client>,
4158 mut cx: AsyncAppContext,
4159 ) -> Result<()> {
4160 let payload = envelope.payload.clone();
4161 let version = deserialize_version(payload.version);
4162 let mtime = payload
4163 .mtime
4164 .ok_or_else(|| anyhow!("missing mtime"))?
4165 .into();
4166 this.update(&mut cx, |this, cx| {
4167 let buffer = this
4168 .opened_buffers
4169 .get(&payload.buffer_id)
4170 .and_then(|buffer| buffer.upgrade(cx));
4171 if let Some(buffer) = buffer {
4172 buffer.update(cx, |buffer, cx| {
4173 buffer.did_reload(version, mtime, cx);
4174 });
4175 }
4176 Ok(())
4177 })
4178 }
4179
4180 pub fn match_paths<'a>(
4181 &self,
4182 query: &'a str,
4183 include_ignored: bool,
4184 smart_case: bool,
4185 max_results: usize,
4186 cancel_flag: &'a AtomicBool,
4187 cx: &AppContext,
4188 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4189 let worktrees = self
4190 .worktrees(cx)
4191 .filter(|worktree| worktree.read(cx).is_visible())
4192 .collect::<Vec<_>>();
4193 let include_root_name = worktrees.len() > 1;
4194 let candidate_sets = worktrees
4195 .into_iter()
4196 .map(|worktree| CandidateSet {
4197 snapshot: worktree.read(cx).snapshot(),
4198 include_ignored,
4199 include_root_name,
4200 })
4201 .collect::<Vec<_>>();
4202
4203 let background = cx.background().clone();
4204 async move {
4205 fuzzy::match_paths(
4206 candidate_sets.as_slice(),
4207 query,
4208 smart_case,
4209 max_results,
4210 cancel_flag,
4211 background,
4212 )
4213 .await
4214 }
4215 }
4216
4217 fn edits_from_lsp(
4218 &mut self,
4219 buffer: &ModelHandle<Buffer>,
4220 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4221 version: Option<i32>,
4222 cx: &mut ModelContext<Self>,
4223 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4224 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4225 cx.background().spawn(async move {
4226 let snapshot = snapshot?;
4227 let mut lsp_edits = lsp_edits
4228 .into_iter()
4229 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4230 .peekable();
4231
4232 let mut edits = Vec::new();
4233 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4234 // Combine any LSP edits that are adjacent.
4235 //
4236 // Also, combine LSP edits that are separated from each other by only
4237 // a newline. This is important because for some code actions,
4238 // Rust-analyzer rewrites the entire buffer via a series of edits that
4239 // are separated by unchanged newline characters.
4240 //
4241 // In order for the diffing logic below to work properly, any edits that
4242 // cancel each other out must be combined into one.
4243 while let Some((next_range, next_text)) = lsp_edits.peek() {
4244 if next_range.start > range.end {
4245 if next_range.start.row > range.end.row + 1
4246 || next_range.start.column > 0
4247 || snapshot.clip_point_utf16(
4248 PointUtf16::new(range.end.row, u32::MAX),
4249 Bias::Left,
4250 ) > range.end
4251 {
4252 break;
4253 }
4254 new_text.push('\n');
4255 }
4256 range.end = next_range.end;
4257 new_text.push_str(&next_text);
4258 lsp_edits.next();
4259 }
4260
4261 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4262 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4263 {
4264 return Err(anyhow!("invalid edits received from language server"));
4265 }
4266
4267 // For multiline edits, perform a diff of the old and new text so that
4268 // we can identify the changes more precisely, preserving the locations
4269 // of any anchors positioned in the unchanged regions.
4270 if range.end.row > range.start.row {
4271 let mut offset = range.start.to_offset(&snapshot);
4272 let old_text = snapshot.text_for_range(range).collect::<String>();
4273
4274 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4275 let mut moved_since_edit = true;
4276 for change in diff.iter_all_changes() {
4277 let tag = change.tag();
4278 let value = change.value();
4279 match tag {
4280 ChangeTag::Equal => {
4281 offset += value.len();
4282 moved_since_edit = true;
4283 }
4284 ChangeTag::Delete => {
4285 let start = snapshot.anchor_after(offset);
4286 let end = snapshot.anchor_before(offset + value.len());
4287 if moved_since_edit {
4288 edits.push((start..end, String::new()));
4289 } else {
4290 edits.last_mut().unwrap().0.end = end;
4291 }
4292 offset += value.len();
4293 moved_since_edit = false;
4294 }
4295 ChangeTag::Insert => {
4296 if moved_since_edit {
4297 let anchor = snapshot.anchor_after(offset);
4298 edits.push((anchor.clone()..anchor, value.to_string()));
4299 } else {
4300 edits.last_mut().unwrap().1.push_str(value);
4301 }
4302 moved_since_edit = false;
4303 }
4304 }
4305 }
4306 } else if range.end == range.start {
4307 let anchor = snapshot.anchor_after(range.start);
4308 edits.push((anchor.clone()..anchor, new_text));
4309 } else {
4310 let edit_start = snapshot.anchor_after(range.start);
4311 let edit_end = snapshot.anchor_before(range.end);
4312 edits.push((edit_start..edit_end, new_text));
4313 }
4314 }
4315
4316 Ok(edits)
4317 })
4318 }
4319
4320 fn buffer_snapshot_for_lsp_version(
4321 &mut self,
4322 buffer: &ModelHandle<Buffer>,
4323 version: Option<i32>,
4324 cx: &AppContext,
4325 ) -> Result<TextBufferSnapshot> {
4326 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4327
4328 if let Some(version) = version {
4329 let buffer_id = buffer.read(cx).remote_id();
4330 let snapshots = self
4331 .buffer_snapshots
4332 .get_mut(&buffer_id)
4333 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4334 let mut found_snapshot = None;
4335 snapshots.retain(|(snapshot_version, snapshot)| {
4336 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4337 false
4338 } else {
4339 if *snapshot_version == version {
4340 found_snapshot = Some(snapshot.clone());
4341 }
4342 true
4343 }
4344 });
4345
4346 found_snapshot.ok_or_else(|| {
4347 anyhow!(
4348 "snapshot not found for buffer {} at version {}",
4349 buffer_id,
4350 version
4351 )
4352 })
4353 } else {
4354 Ok((buffer.read(cx)).text_snapshot())
4355 }
4356 }
4357
4358 fn language_server_for_buffer(
4359 &self,
4360 buffer: &Buffer,
4361 cx: &AppContext,
4362 ) -> Option<&Arc<LanguageServer>> {
4363 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4364 let worktree_id = file.worktree_id(cx);
4365 self.language_servers.get(&(worktree_id, language.name()))
4366 } else {
4367 None
4368 }
4369 }
4370}
4371
4372impl WorktreeHandle {
4373 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4374 match self {
4375 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4376 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4377 }
4378 }
4379}
4380
4381impl OpenBuffer {
4382 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4383 match self {
4384 OpenBuffer::Strong(handle) => Some(handle.clone()),
4385 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4386 OpenBuffer::Loading(_) => None,
4387 }
4388 }
4389}
4390
4391struct CandidateSet {
4392 snapshot: Snapshot,
4393 include_ignored: bool,
4394 include_root_name: bool,
4395}
4396
4397impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4398 type Candidates = CandidateSetIter<'a>;
4399
4400 fn id(&self) -> usize {
4401 self.snapshot.id().to_usize()
4402 }
4403
4404 fn len(&self) -> usize {
4405 if self.include_ignored {
4406 self.snapshot.file_count()
4407 } else {
4408 self.snapshot.visible_file_count()
4409 }
4410 }
4411
4412 fn prefix(&self) -> Arc<str> {
4413 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4414 self.snapshot.root_name().into()
4415 } else if self.include_root_name {
4416 format!("{}/", self.snapshot.root_name()).into()
4417 } else {
4418 "".into()
4419 }
4420 }
4421
4422 fn candidates(&'a self, start: usize) -> Self::Candidates {
4423 CandidateSetIter {
4424 traversal: self.snapshot.files(self.include_ignored, start),
4425 }
4426 }
4427}
4428
4429struct CandidateSetIter<'a> {
4430 traversal: Traversal<'a>,
4431}
4432
4433impl<'a> Iterator for CandidateSetIter<'a> {
4434 type Item = PathMatchCandidate<'a>;
4435
4436 fn next(&mut self) -> Option<Self::Item> {
4437 self.traversal.next().map(|entry| {
4438 if let EntryKind::File(char_bag) = entry.kind {
4439 PathMatchCandidate {
4440 path: &entry.path,
4441 char_bag,
4442 }
4443 } else {
4444 unreachable!()
4445 }
4446 })
4447 }
4448}
4449
4450impl Entity for Project {
4451 type Event = Event;
4452
4453 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4454 match &self.client_state {
4455 ProjectClientState::Local { remote_id_rx, .. } => {
4456 if let Some(project_id) = *remote_id_rx.borrow() {
4457 self.client
4458 .send(proto::UnregisterProject { project_id })
4459 .log_err();
4460 }
4461 }
4462 ProjectClientState::Remote { remote_id, .. } => {
4463 self.client
4464 .send(proto::LeaveProject {
4465 project_id: *remote_id,
4466 })
4467 .log_err();
4468 }
4469 }
4470 }
4471
4472 fn app_will_quit(
4473 &mut self,
4474 _: &mut MutableAppContext,
4475 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4476 let shutdown_futures = self
4477 .language_servers
4478 .drain()
4479 .filter_map(|(_, server)| server.shutdown())
4480 .collect::<Vec<_>>();
4481 Some(
4482 async move {
4483 futures::future::join_all(shutdown_futures).await;
4484 }
4485 .boxed(),
4486 )
4487 }
4488}
4489
4490impl Collaborator {
4491 fn from_proto(
4492 message: proto::Collaborator,
4493 user_store: &ModelHandle<UserStore>,
4494 cx: &mut AsyncAppContext,
4495 ) -> impl Future<Output = Result<Self>> {
4496 let user = user_store.update(cx, |user_store, cx| {
4497 user_store.fetch_user(message.user_id, cx)
4498 });
4499
4500 async move {
4501 Ok(Self {
4502 peer_id: PeerId(message.peer_id),
4503 user: user.await?,
4504 replica_id: message.replica_id as ReplicaId,
4505 })
4506 }
4507 }
4508}
4509
4510impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4511 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4512 Self {
4513 worktree_id,
4514 path: path.as_ref().into(),
4515 }
4516 }
4517}
4518
4519impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4520 fn from(options: lsp::CreateFileOptions) -> Self {
4521 Self {
4522 overwrite: options.overwrite.unwrap_or(false),
4523 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4524 }
4525 }
4526}
4527
4528impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4529 fn from(options: lsp::RenameFileOptions) -> Self {
4530 Self {
4531 overwrite: options.overwrite.unwrap_or(false),
4532 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4533 }
4534 }
4535}
4536
4537impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4538 fn from(options: lsp::DeleteFileOptions) -> Self {
4539 Self {
4540 recursive: options.recursive.unwrap_or(false),
4541 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4542 }
4543 }
4544}
4545
4546fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4547 proto::Symbol {
4548 source_worktree_id: symbol.source_worktree_id.to_proto(),
4549 worktree_id: symbol.worktree_id.to_proto(),
4550 language_name: symbol.language_name.clone(),
4551 name: symbol.name.clone(),
4552 kind: unsafe { mem::transmute(symbol.kind) },
4553 path: symbol.path.to_string_lossy().to_string(),
4554 start: Some(proto::Point {
4555 row: symbol.range.start.row,
4556 column: symbol.range.start.column,
4557 }),
4558 end: Some(proto::Point {
4559 row: symbol.range.end.row,
4560 column: symbol.range.end.column,
4561 }),
4562 signature: symbol.signature.to_vec(),
4563 }
4564}
4565
4566fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4567 let mut path_components = path.components();
4568 let mut base_components = base.components();
4569 let mut components: Vec<Component> = Vec::new();
4570 loop {
4571 match (path_components.next(), base_components.next()) {
4572 (None, None) => break,
4573 (Some(a), None) => {
4574 components.push(a);
4575 components.extend(path_components.by_ref());
4576 break;
4577 }
4578 (None, _) => components.push(Component::ParentDir),
4579 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4580 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4581 (Some(a), Some(_)) => {
4582 components.push(Component::ParentDir);
4583 for _ in base_components {
4584 components.push(Component::ParentDir);
4585 }
4586 components.push(a);
4587 components.extend(path_components.by_ref());
4588 break;
4589 }
4590 }
4591 }
4592 components.iter().map(|c| c.as_os_str()).collect()
4593}
4594
4595impl Item for Buffer {
4596 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4597 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4598 }
4599}
4600
4601#[cfg(test)]
4602mod tests {
4603 use super::{Event, *};
4604 use fs::RealFs;
4605 use futures::StreamExt;
4606 use gpui::test::subscribe;
4607 use language::{
4608 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4609 ToPoint,
4610 };
4611 use lsp::Url;
4612 use serde_json::json;
4613 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4614 use unindent::Unindent as _;
4615 use util::test::temp_tree;
4616 use worktree::WorktreeHandle as _;
4617
4618 #[gpui::test]
4619 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4620 let dir = temp_tree(json!({
4621 "root": {
4622 "apple": "",
4623 "banana": {
4624 "carrot": {
4625 "date": "",
4626 "endive": "",
4627 }
4628 },
4629 "fennel": {
4630 "grape": "",
4631 }
4632 }
4633 }));
4634
4635 let root_link_path = dir.path().join("root_link");
4636 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4637 unix::fs::symlink(
4638 &dir.path().join("root/fennel"),
4639 &dir.path().join("root/finnochio"),
4640 )
4641 .unwrap();
4642
4643 let project = Project::test(Arc::new(RealFs), cx);
4644
4645 let (tree, _) = project
4646 .update(cx, |project, cx| {
4647 project.find_or_create_local_worktree(&root_link_path, true, cx)
4648 })
4649 .await
4650 .unwrap();
4651
4652 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4653 .await;
4654 cx.read(|cx| {
4655 let tree = tree.read(cx);
4656 assert_eq!(tree.file_count(), 5);
4657 assert_eq!(
4658 tree.inode_for_path("fennel/grape"),
4659 tree.inode_for_path("finnochio/grape")
4660 );
4661 });
4662
4663 let cancel_flag = Default::default();
4664 let results = project
4665 .read_with(cx, |project, cx| {
4666 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4667 })
4668 .await;
4669 assert_eq!(
4670 results
4671 .into_iter()
4672 .map(|result| result.path)
4673 .collect::<Vec<Arc<Path>>>(),
4674 vec![
4675 PathBuf::from("banana/carrot/date").into(),
4676 PathBuf::from("banana/carrot/endive").into(),
4677 ]
4678 );
4679 }
4680
4681 #[gpui::test]
4682 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4683 cx.foreground().forbid_parking();
4684
4685 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4686 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4687 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4688 completion_provider: Some(lsp::CompletionOptions {
4689 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4690 ..Default::default()
4691 }),
4692 ..Default::default()
4693 });
4694 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4695 completion_provider: Some(lsp::CompletionOptions {
4696 trigger_characters: Some(vec![":".to_string()]),
4697 ..Default::default()
4698 }),
4699 ..Default::default()
4700 });
4701
4702 let rust_language = Arc::new(Language::new(
4703 LanguageConfig {
4704 name: "Rust".into(),
4705 path_suffixes: vec!["rs".to_string()],
4706 language_server: Some(rust_lsp_config),
4707 ..Default::default()
4708 },
4709 Some(tree_sitter_rust::language()),
4710 ));
4711 let json_language = Arc::new(Language::new(
4712 LanguageConfig {
4713 name: "JSON".into(),
4714 path_suffixes: vec!["json".to_string()],
4715 language_server: Some(json_lsp_config),
4716 ..Default::default()
4717 },
4718 None,
4719 ));
4720
4721 let fs = FakeFs::new(cx.background());
4722 fs.insert_tree(
4723 "/the-root",
4724 json!({
4725 "test.rs": "const A: i32 = 1;",
4726 "test2.rs": "",
4727 "Cargo.toml": "a = 1",
4728 "package.json": "{\"a\": 1}",
4729 }),
4730 )
4731 .await;
4732
4733 let project = Project::test(fs, cx);
4734 project.update(cx, |project, _| {
4735 project.languages.add(rust_language);
4736 project.languages.add(json_language);
4737 });
4738
4739 let worktree_id = project
4740 .update(cx, |project, cx| {
4741 project.find_or_create_local_worktree("/the-root", true, cx)
4742 })
4743 .await
4744 .unwrap()
4745 .0
4746 .read_with(cx, |tree, _| tree.id());
4747
4748 // Open a buffer without an associated language server.
4749 let toml_buffer = project
4750 .update(cx, |project, cx| {
4751 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4752 })
4753 .await
4754 .unwrap();
4755
4756 // Open a buffer with an associated language server.
4757 let rust_buffer = project
4758 .update(cx, |project, cx| {
4759 project.open_buffer((worktree_id, "test.rs"), cx)
4760 })
4761 .await
4762 .unwrap();
4763
4764 // A server is started up, and it is notified about Rust files.
4765 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4766 assert_eq!(
4767 fake_rust_server
4768 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4769 .await
4770 .text_document,
4771 lsp::TextDocumentItem {
4772 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4773 version: 0,
4774 text: "const A: i32 = 1;".to_string(),
4775 language_id: Default::default()
4776 }
4777 );
4778
4779 // The buffer is configured based on the language server's capabilities.
4780 rust_buffer.read_with(cx, |buffer, _| {
4781 assert_eq!(
4782 buffer.completion_triggers(),
4783 &[".".to_string(), "::".to_string()]
4784 );
4785 });
4786 toml_buffer.read_with(cx, |buffer, _| {
4787 assert!(buffer.completion_triggers().is_empty());
4788 });
4789
4790 // Edit a buffer. The changes are reported to the language server.
4791 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4792 assert_eq!(
4793 fake_rust_server
4794 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4795 .await
4796 .text_document,
4797 lsp::VersionedTextDocumentIdentifier::new(
4798 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4799 1
4800 )
4801 );
4802
4803 // Open a third buffer with a different associated language server.
4804 let json_buffer = project
4805 .update(cx, |project, cx| {
4806 project.open_buffer((worktree_id, "package.json"), cx)
4807 })
4808 .await
4809 .unwrap();
4810
4811 // Another language server is started up, and it is notified about
4812 // all three open buffers.
4813 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4814 assert_eq!(
4815 fake_json_server
4816 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4817 .await
4818 .text_document,
4819 lsp::TextDocumentItem {
4820 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4821 version: 0,
4822 text: "{\"a\": 1}".to_string(),
4823 language_id: Default::default()
4824 }
4825 );
4826
4827 // This buffer is configured based on the second language server's
4828 // capabilities.
4829 json_buffer.read_with(cx, |buffer, _| {
4830 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4831 });
4832
4833 // When opening another buffer whose language server is already running,
4834 // it is also configured based on the existing language server's capabilities.
4835 let rust_buffer2 = project
4836 .update(cx, |project, cx| {
4837 project.open_buffer((worktree_id, "test2.rs"), cx)
4838 })
4839 .await
4840 .unwrap();
4841 rust_buffer2.read_with(cx, |buffer, _| {
4842 assert_eq!(
4843 buffer.completion_triggers(),
4844 &[".".to_string(), "::".to_string()]
4845 );
4846 });
4847
4848 // Changes are reported only to servers matching the buffer's language.
4849 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4850 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4851 assert_eq!(
4852 fake_rust_server
4853 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4854 .await
4855 .text_document,
4856 lsp::VersionedTextDocumentIdentifier::new(
4857 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4858 1
4859 )
4860 );
4861
4862 // Save notifications are reported to all servers.
4863 toml_buffer
4864 .update(cx, |buffer, cx| buffer.save(cx))
4865 .await
4866 .unwrap();
4867 assert_eq!(
4868 fake_rust_server
4869 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4870 .await
4871 .text_document,
4872 lsp::TextDocumentIdentifier::new(
4873 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4874 )
4875 );
4876 assert_eq!(
4877 fake_json_server
4878 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4879 .await
4880 .text_document,
4881 lsp::TextDocumentIdentifier::new(
4882 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4883 )
4884 );
4885
4886 // Close notifications are reported only to servers matching the buffer's language.
4887 cx.update(|_| drop(json_buffer));
4888 let close_message = lsp::DidCloseTextDocumentParams {
4889 text_document: lsp::TextDocumentIdentifier::new(
4890 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4891 ),
4892 };
4893 assert_eq!(
4894 fake_json_server
4895 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4896 .await,
4897 close_message,
4898 );
4899 }
4900
4901 #[gpui::test]
4902 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4903 cx.foreground().forbid_parking();
4904
4905 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4906 let progress_token = language_server_config
4907 .disk_based_diagnostics_progress_token
4908 .clone()
4909 .unwrap();
4910
4911 let language = Arc::new(Language::new(
4912 LanguageConfig {
4913 name: "Rust".into(),
4914 path_suffixes: vec!["rs".to_string()],
4915 language_server: Some(language_server_config),
4916 ..Default::default()
4917 },
4918 Some(tree_sitter_rust::language()),
4919 ));
4920
4921 let fs = FakeFs::new(cx.background());
4922 fs.insert_tree(
4923 "/dir",
4924 json!({
4925 "a.rs": "fn a() { A }",
4926 "b.rs": "const y: i32 = 1",
4927 }),
4928 )
4929 .await;
4930
4931 let project = Project::test(fs, cx);
4932 project.update(cx, |project, _| project.languages.add(language));
4933
4934 let (tree, _) = project
4935 .update(cx, |project, cx| {
4936 project.find_or_create_local_worktree("/dir", true, cx)
4937 })
4938 .await
4939 .unwrap();
4940 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4941
4942 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4943 .await;
4944
4945 // Cause worktree to start the fake language server
4946 let _buffer = project
4947 .update(cx, |project, cx| {
4948 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4949 })
4950 .await
4951 .unwrap();
4952
4953 let mut events = subscribe(&project, cx);
4954
4955 let mut fake_server = fake_servers.next().await.unwrap();
4956 fake_server.start_progress(&progress_token).await;
4957 assert_eq!(
4958 events.next().await.unwrap(),
4959 Event::DiskBasedDiagnosticsStarted
4960 );
4961
4962 fake_server.start_progress(&progress_token).await;
4963 fake_server.end_progress(&progress_token).await;
4964 fake_server.start_progress(&progress_token).await;
4965
4966 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4967 lsp::PublishDiagnosticsParams {
4968 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4969 version: None,
4970 diagnostics: vec![lsp::Diagnostic {
4971 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4972 severity: Some(lsp::DiagnosticSeverity::ERROR),
4973 message: "undefined variable 'A'".to_string(),
4974 ..Default::default()
4975 }],
4976 },
4977 );
4978 assert_eq!(
4979 events.next().await.unwrap(),
4980 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4981 );
4982
4983 fake_server.end_progress(&progress_token).await;
4984 fake_server.end_progress(&progress_token).await;
4985 assert_eq!(
4986 events.next().await.unwrap(),
4987 Event::DiskBasedDiagnosticsUpdated
4988 );
4989 assert_eq!(
4990 events.next().await.unwrap(),
4991 Event::DiskBasedDiagnosticsFinished
4992 );
4993
4994 let buffer = project
4995 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4996 .await
4997 .unwrap();
4998
4999 buffer.read_with(cx, |buffer, _| {
5000 let snapshot = buffer.snapshot();
5001 let diagnostics = snapshot
5002 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5003 .collect::<Vec<_>>();
5004 assert_eq!(
5005 diagnostics,
5006 &[DiagnosticEntry {
5007 range: Point::new(0, 9)..Point::new(0, 10),
5008 diagnostic: Diagnostic {
5009 severity: lsp::DiagnosticSeverity::ERROR,
5010 message: "undefined variable 'A'".to_string(),
5011 group_id: 0,
5012 is_primary: true,
5013 ..Default::default()
5014 }
5015 }]
5016 )
5017 });
5018 }
5019
5020 #[gpui::test]
5021 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
5022 cx.foreground().forbid_parking();
5023
5024 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5025 lsp_config
5026 .disk_based_diagnostic_sources
5027 .insert("disk".to_string());
5028 let language = Arc::new(Language::new(
5029 LanguageConfig {
5030 name: "Rust".into(),
5031 path_suffixes: vec!["rs".to_string()],
5032 language_server: Some(lsp_config),
5033 ..Default::default()
5034 },
5035 Some(tree_sitter_rust::language()),
5036 ));
5037
5038 let text = "
5039 fn a() { A }
5040 fn b() { BB }
5041 fn c() { CCC }
5042 "
5043 .unindent();
5044
5045 let fs = FakeFs::new(cx.background());
5046 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5047
5048 let project = Project::test(fs, cx);
5049 project.update(cx, |project, _| project.languages.add(language));
5050
5051 let worktree_id = project
5052 .update(cx, |project, cx| {
5053 project.find_or_create_local_worktree("/dir", true, cx)
5054 })
5055 .await
5056 .unwrap()
5057 .0
5058 .read_with(cx, |tree, _| tree.id());
5059
5060 let buffer = project
5061 .update(cx, |project, cx| {
5062 project.open_buffer((worktree_id, "a.rs"), cx)
5063 })
5064 .await
5065 .unwrap();
5066
5067 let mut fake_server = fake_servers.next().await.unwrap();
5068 let open_notification = fake_server
5069 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5070 .await;
5071
5072 // Edit the buffer, moving the content down
5073 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5074 let change_notification_1 = fake_server
5075 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5076 .await;
5077 assert!(
5078 change_notification_1.text_document.version > open_notification.text_document.version
5079 );
5080
5081 // Report some diagnostics for the initial version of the buffer
5082 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5083 lsp::PublishDiagnosticsParams {
5084 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5085 version: Some(open_notification.text_document.version),
5086 diagnostics: vec![
5087 lsp::Diagnostic {
5088 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5089 severity: Some(DiagnosticSeverity::ERROR),
5090 message: "undefined variable 'A'".to_string(),
5091 source: Some("disk".to_string()),
5092 ..Default::default()
5093 },
5094 lsp::Diagnostic {
5095 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5096 severity: Some(DiagnosticSeverity::ERROR),
5097 message: "undefined variable 'BB'".to_string(),
5098 source: Some("disk".to_string()),
5099 ..Default::default()
5100 },
5101 lsp::Diagnostic {
5102 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5103 severity: Some(DiagnosticSeverity::ERROR),
5104 source: Some("disk".to_string()),
5105 message: "undefined variable 'CCC'".to_string(),
5106 ..Default::default()
5107 },
5108 ],
5109 },
5110 );
5111
5112 // The diagnostics have moved down since they were created.
5113 buffer.next_notification(cx).await;
5114 buffer.read_with(cx, |buffer, _| {
5115 assert_eq!(
5116 buffer
5117 .snapshot()
5118 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5119 .collect::<Vec<_>>(),
5120 &[
5121 DiagnosticEntry {
5122 range: Point::new(3, 9)..Point::new(3, 11),
5123 diagnostic: Diagnostic {
5124 severity: DiagnosticSeverity::ERROR,
5125 message: "undefined variable 'BB'".to_string(),
5126 is_disk_based: true,
5127 group_id: 1,
5128 is_primary: true,
5129 ..Default::default()
5130 },
5131 },
5132 DiagnosticEntry {
5133 range: Point::new(4, 9)..Point::new(4, 12),
5134 diagnostic: Diagnostic {
5135 severity: DiagnosticSeverity::ERROR,
5136 message: "undefined variable 'CCC'".to_string(),
5137 is_disk_based: true,
5138 group_id: 2,
5139 is_primary: true,
5140 ..Default::default()
5141 }
5142 }
5143 ]
5144 );
5145 assert_eq!(
5146 chunks_with_diagnostics(buffer, 0..buffer.len()),
5147 [
5148 ("\n\nfn a() { ".to_string(), None),
5149 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5150 (" }\nfn b() { ".to_string(), None),
5151 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5152 (" }\nfn c() { ".to_string(), None),
5153 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5154 (" }\n".to_string(), None),
5155 ]
5156 );
5157 assert_eq!(
5158 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5159 [
5160 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5161 (" }\nfn c() { ".to_string(), None),
5162 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5163 ]
5164 );
5165 });
5166
5167 // Ensure overlapping diagnostics are highlighted correctly.
5168 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5169 lsp::PublishDiagnosticsParams {
5170 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5171 version: Some(open_notification.text_document.version),
5172 diagnostics: vec![
5173 lsp::Diagnostic {
5174 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5175 severity: Some(DiagnosticSeverity::ERROR),
5176 message: "undefined variable 'A'".to_string(),
5177 source: Some("disk".to_string()),
5178 ..Default::default()
5179 },
5180 lsp::Diagnostic {
5181 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5182 severity: Some(DiagnosticSeverity::WARNING),
5183 message: "unreachable statement".to_string(),
5184 source: Some("disk".to_string()),
5185 ..Default::default()
5186 },
5187 ],
5188 },
5189 );
5190
5191 buffer.next_notification(cx).await;
5192 buffer.read_with(cx, |buffer, _| {
5193 assert_eq!(
5194 buffer
5195 .snapshot()
5196 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5197 .collect::<Vec<_>>(),
5198 &[
5199 DiagnosticEntry {
5200 range: Point::new(2, 9)..Point::new(2, 12),
5201 diagnostic: Diagnostic {
5202 severity: DiagnosticSeverity::WARNING,
5203 message: "unreachable statement".to_string(),
5204 is_disk_based: true,
5205 group_id: 1,
5206 is_primary: true,
5207 ..Default::default()
5208 }
5209 },
5210 DiagnosticEntry {
5211 range: Point::new(2, 9)..Point::new(2, 10),
5212 diagnostic: Diagnostic {
5213 severity: DiagnosticSeverity::ERROR,
5214 message: "undefined variable 'A'".to_string(),
5215 is_disk_based: true,
5216 group_id: 0,
5217 is_primary: true,
5218 ..Default::default()
5219 },
5220 }
5221 ]
5222 );
5223 assert_eq!(
5224 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5225 [
5226 ("fn a() { ".to_string(), None),
5227 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5228 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5229 ("\n".to_string(), None),
5230 ]
5231 );
5232 assert_eq!(
5233 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5234 [
5235 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5236 ("\n".to_string(), None),
5237 ]
5238 );
5239 });
5240
5241 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5242 // changes since the last save.
5243 buffer.update(cx, |buffer, cx| {
5244 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5245 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5246 });
5247 let change_notification_2 =
5248 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5249 assert!(
5250 change_notification_2.await.text_document.version
5251 > change_notification_1.text_document.version
5252 );
5253
5254 // Handle out-of-order diagnostics
5255 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5256 lsp::PublishDiagnosticsParams {
5257 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5258 version: Some(open_notification.text_document.version),
5259 diagnostics: vec![
5260 lsp::Diagnostic {
5261 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5262 severity: Some(DiagnosticSeverity::ERROR),
5263 message: "undefined variable 'BB'".to_string(),
5264 source: Some("disk".to_string()),
5265 ..Default::default()
5266 },
5267 lsp::Diagnostic {
5268 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5269 severity: Some(DiagnosticSeverity::WARNING),
5270 message: "undefined variable 'A'".to_string(),
5271 source: Some("disk".to_string()),
5272 ..Default::default()
5273 },
5274 ],
5275 },
5276 );
5277
5278 buffer.next_notification(cx).await;
5279 buffer.read_with(cx, |buffer, _| {
5280 assert_eq!(
5281 buffer
5282 .snapshot()
5283 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5284 .collect::<Vec<_>>(),
5285 &[
5286 DiagnosticEntry {
5287 range: Point::new(2, 21)..Point::new(2, 22),
5288 diagnostic: Diagnostic {
5289 severity: DiagnosticSeverity::WARNING,
5290 message: "undefined variable 'A'".to_string(),
5291 is_disk_based: true,
5292 group_id: 1,
5293 is_primary: true,
5294 ..Default::default()
5295 }
5296 },
5297 DiagnosticEntry {
5298 range: Point::new(3, 9)..Point::new(3, 11),
5299 diagnostic: Diagnostic {
5300 severity: DiagnosticSeverity::ERROR,
5301 message: "undefined variable 'BB'".to_string(),
5302 is_disk_based: true,
5303 group_id: 0,
5304 is_primary: true,
5305 ..Default::default()
5306 },
5307 }
5308 ]
5309 );
5310 });
5311 }
5312
5313 #[gpui::test]
5314 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5315 cx.foreground().forbid_parking();
5316
5317 let text = concat!(
5318 "let one = ;\n", //
5319 "let two = \n",
5320 "let three = 3;\n",
5321 );
5322
5323 let fs = FakeFs::new(cx.background());
5324 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5325
5326 let project = Project::test(fs, cx);
5327 let worktree_id = project
5328 .update(cx, |project, cx| {
5329 project.find_or_create_local_worktree("/dir", true, cx)
5330 })
5331 .await
5332 .unwrap()
5333 .0
5334 .read_with(cx, |tree, _| tree.id());
5335
5336 let buffer = project
5337 .update(cx, |project, cx| {
5338 project.open_buffer((worktree_id, "a.rs"), cx)
5339 })
5340 .await
5341 .unwrap();
5342
5343 project.update(cx, |project, cx| {
5344 project
5345 .update_buffer_diagnostics(
5346 &buffer,
5347 vec![
5348 DiagnosticEntry {
5349 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5350 diagnostic: Diagnostic {
5351 severity: DiagnosticSeverity::ERROR,
5352 message: "syntax error 1".to_string(),
5353 ..Default::default()
5354 },
5355 },
5356 DiagnosticEntry {
5357 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5358 diagnostic: Diagnostic {
5359 severity: DiagnosticSeverity::ERROR,
5360 message: "syntax error 2".to_string(),
5361 ..Default::default()
5362 },
5363 },
5364 ],
5365 None,
5366 cx,
5367 )
5368 .unwrap();
5369 });
5370
5371 // An empty range is extended forward to include the following character.
5372 // At the end of a line, an empty range is extended backward to include
5373 // the preceding character.
5374 buffer.read_with(cx, |buffer, _| {
5375 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5376 assert_eq!(
5377 chunks
5378 .iter()
5379 .map(|(s, d)| (s.as_str(), *d))
5380 .collect::<Vec<_>>(),
5381 &[
5382 ("let one = ", None),
5383 (";", Some(DiagnosticSeverity::ERROR)),
5384 ("\nlet two =", None),
5385 (" ", Some(DiagnosticSeverity::ERROR)),
5386 ("\nlet three = 3;\n", None)
5387 ]
5388 );
5389 });
5390 }
5391
5392 #[gpui::test]
5393 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5394 cx.foreground().forbid_parking();
5395
5396 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5397 let language = Arc::new(Language::new(
5398 LanguageConfig {
5399 name: "Rust".into(),
5400 path_suffixes: vec!["rs".to_string()],
5401 language_server: Some(lsp_config),
5402 ..Default::default()
5403 },
5404 Some(tree_sitter_rust::language()),
5405 ));
5406
5407 let text = "
5408 fn a() {
5409 f1();
5410 }
5411 fn b() {
5412 f2();
5413 }
5414 fn c() {
5415 f3();
5416 }
5417 "
5418 .unindent();
5419
5420 let fs = FakeFs::new(cx.background());
5421 fs.insert_tree(
5422 "/dir",
5423 json!({
5424 "a.rs": text.clone(),
5425 }),
5426 )
5427 .await;
5428
5429 let project = Project::test(fs, cx);
5430 project.update(cx, |project, _| project.languages.add(language));
5431
5432 let worktree_id = project
5433 .update(cx, |project, cx| {
5434 project.find_or_create_local_worktree("/dir", true, cx)
5435 })
5436 .await
5437 .unwrap()
5438 .0
5439 .read_with(cx, |tree, _| tree.id());
5440
5441 let buffer = project
5442 .update(cx, |project, cx| {
5443 project.open_buffer((worktree_id, "a.rs"), cx)
5444 })
5445 .await
5446 .unwrap();
5447
5448 let mut fake_server = fake_servers.next().await.unwrap();
5449 let lsp_document_version = fake_server
5450 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5451 .await
5452 .text_document
5453 .version;
5454
5455 // Simulate editing the buffer after the language server computes some edits.
5456 buffer.update(cx, |buffer, cx| {
5457 buffer.edit(
5458 [Point::new(0, 0)..Point::new(0, 0)],
5459 "// above first function\n",
5460 cx,
5461 );
5462 buffer.edit(
5463 [Point::new(2, 0)..Point::new(2, 0)],
5464 " // inside first function\n",
5465 cx,
5466 );
5467 buffer.edit(
5468 [Point::new(6, 4)..Point::new(6, 4)],
5469 "// inside second function ",
5470 cx,
5471 );
5472
5473 assert_eq!(
5474 buffer.text(),
5475 "
5476 // above first function
5477 fn a() {
5478 // inside first function
5479 f1();
5480 }
5481 fn b() {
5482 // inside second function f2();
5483 }
5484 fn c() {
5485 f3();
5486 }
5487 "
5488 .unindent()
5489 );
5490 });
5491
5492 let edits = project
5493 .update(cx, |project, cx| {
5494 project.edits_from_lsp(
5495 &buffer,
5496 vec![
5497 // replace body of first function
5498 lsp::TextEdit {
5499 range: lsp::Range::new(
5500 lsp::Position::new(0, 0),
5501 lsp::Position::new(3, 0),
5502 ),
5503 new_text: "
5504 fn a() {
5505 f10();
5506 }
5507 "
5508 .unindent(),
5509 },
5510 // edit inside second function
5511 lsp::TextEdit {
5512 range: lsp::Range::new(
5513 lsp::Position::new(4, 6),
5514 lsp::Position::new(4, 6),
5515 ),
5516 new_text: "00".into(),
5517 },
5518 // edit inside third function via two distinct edits
5519 lsp::TextEdit {
5520 range: lsp::Range::new(
5521 lsp::Position::new(7, 5),
5522 lsp::Position::new(7, 5),
5523 ),
5524 new_text: "4000".into(),
5525 },
5526 lsp::TextEdit {
5527 range: lsp::Range::new(
5528 lsp::Position::new(7, 5),
5529 lsp::Position::new(7, 6),
5530 ),
5531 new_text: "".into(),
5532 },
5533 ],
5534 Some(lsp_document_version),
5535 cx,
5536 )
5537 })
5538 .await
5539 .unwrap();
5540
5541 buffer.update(cx, |buffer, cx| {
5542 for (range, new_text) in edits {
5543 buffer.edit([range], new_text, cx);
5544 }
5545 assert_eq!(
5546 buffer.text(),
5547 "
5548 // above first function
5549 fn a() {
5550 // inside first function
5551 f10();
5552 }
5553 fn b() {
5554 // inside second function f200();
5555 }
5556 fn c() {
5557 f4000();
5558 }
5559 "
5560 .unindent()
5561 );
5562 });
5563 }
5564
5565 #[gpui::test]
5566 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5567 cx.foreground().forbid_parking();
5568
5569 let text = "
5570 use a::b;
5571 use a::c;
5572
5573 fn f() {
5574 b();
5575 c();
5576 }
5577 "
5578 .unindent();
5579
5580 let fs = FakeFs::new(cx.background());
5581 fs.insert_tree(
5582 "/dir",
5583 json!({
5584 "a.rs": text.clone(),
5585 }),
5586 )
5587 .await;
5588
5589 let project = Project::test(fs, cx);
5590 let worktree_id = project
5591 .update(cx, |project, cx| {
5592 project.find_or_create_local_worktree("/dir", true, cx)
5593 })
5594 .await
5595 .unwrap()
5596 .0
5597 .read_with(cx, |tree, _| tree.id());
5598
5599 let buffer = project
5600 .update(cx, |project, cx| {
5601 project.open_buffer((worktree_id, "a.rs"), cx)
5602 })
5603 .await
5604 .unwrap();
5605
5606 // Simulate the language server sending us a small edit in the form of a very large diff.
5607 // Rust-analyzer does this when performing a merge-imports code action.
5608 let edits = project
5609 .update(cx, |project, cx| {
5610 project.edits_from_lsp(
5611 &buffer,
5612 [
5613 // Replace the first use statement without editing the semicolon.
5614 lsp::TextEdit {
5615 range: lsp::Range::new(
5616 lsp::Position::new(0, 4),
5617 lsp::Position::new(0, 8),
5618 ),
5619 new_text: "a::{b, c}".into(),
5620 },
5621 // Reinsert the remainder of the file between the semicolon and the final
5622 // newline of the file.
5623 lsp::TextEdit {
5624 range: lsp::Range::new(
5625 lsp::Position::new(0, 9),
5626 lsp::Position::new(0, 9),
5627 ),
5628 new_text: "\n\n".into(),
5629 },
5630 lsp::TextEdit {
5631 range: lsp::Range::new(
5632 lsp::Position::new(0, 9),
5633 lsp::Position::new(0, 9),
5634 ),
5635 new_text: "
5636 fn f() {
5637 b();
5638 c();
5639 }"
5640 .unindent(),
5641 },
5642 // Delete everything after the first newline of the file.
5643 lsp::TextEdit {
5644 range: lsp::Range::new(
5645 lsp::Position::new(1, 0),
5646 lsp::Position::new(7, 0),
5647 ),
5648 new_text: "".into(),
5649 },
5650 ],
5651 None,
5652 cx,
5653 )
5654 })
5655 .await
5656 .unwrap();
5657
5658 buffer.update(cx, |buffer, cx| {
5659 let edits = edits
5660 .into_iter()
5661 .map(|(range, text)| {
5662 (
5663 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5664 text,
5665 )
5666 })
5667 .collect::<Vec<_>>();
5668
5669 assert_eq!(
5670 edits,
5671 [
5672 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5673 (Point::new(1, 0)..Point::new(2, 0), "".into())
5674 ]
5675 );
5676
5677 for (range, new_text) in edits {
5678 buffer.edit([range], new_text, cx);
5679 }
5680 assert_eq!(
5681 buffer.text(),
5682 "
5683 use a::{b, c};
5684
5685 fn f() {
5686 b();
5687 c();
5688 }
5689 "
5690 .unindent()
5691 );
5692 });
5693 }
5694
5695 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5696 buffer: &Buffer,
5697 range: Range<T>,
5698 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5699 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5700 for chunk in buffer.snapshot().chunks(range, true) {
5701 if chunks.last().map_or(false, |prev_chunk| {
5702 prev_chunk.1 == chunk.diagnostic_severity
5703 }) {
5704 chunks.last_mut().unwrap().0.push_str(chunk.text);
5705 } else {
5706 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5707 }
5708 }
5709 chunks
5710 }
5711
5712 #[gpui::test]
5713 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5714 let dir = temp_tree(json!({
5715 "root": {
5716 "dir1": {},
5717 "dir2": {
5718 "dir3": {}
5719 }
5720 }
5721 }));
5722
5723 let project = Project::test(Arc::new(RealFs), cx);
5724 let (tree, _) = project
5725 .update(cx, |project, cx| {
5726 project.find_or_create_local_worktree(&dir.path(), true, cx)
5727 })
5728 .await
5729 .unwrap();
5730
5731 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5732 .await;
5733
5734 let cancel_flag = Default::default();
5735 let results = project
5736 .read_with(cx, |project, cx| {
5737 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5738 })
5739 .await;
5740
5741 assert!(results.is_empty());
5742 }
5743
5744 #[gpui::test]
5745 async fn test_definition(cx: &mut gpui::TestAppContext) {
5746 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5747 let language = Arc::new(Language::new(
5748 LanguageConfig {
5749 name: "Rust".into(),
5750 path_suffixes: vec!["rs".to_string()],
5751 language_server: Some(language_server_config),
5752 ..Default::default()
5753 },
5754 Some(tree_sitter_rust::language()),
5755 ));
5756
5757 let fs = FakeFs::new(cx.background());
5758 fs.insert_tree(
5759 "/dir",
5760 json!({
5761 "a.rs": "const fn a() { A }",
5762 "b.rs": "const y: i32 = crate::a()",
5763 }),
5764 )
5765 .await;
5766
5767 let project = Project::test(fs, cx);
5768 project.update(cx, |project, _| {
5769 Arc::get_mut(&mut project.languages).unwrap().add(language);
5770 });
5771
5772 let (tree, _) = project
5773 .update(cx, |project, cx| {
5774 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5775 })
5776 .await
5777 .unwrap();
5778 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5779 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5780 .await;
5781
5782 let buffer = project
5783 .update(cx, |project, cx| {
5784 project.open_buffer(
5785 ProjectPath {
5786 worktree_id,
5787 path: Path::new("").into(),
5788 },
5789 cx,
5790 )
5791 })
5792 .await
5793 .unwrap();
5794
5795 let mut fake_server = fake_servers.next().await.unwrap();
5796 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5797 let params = params.text_document_position_params;
5798 assert_eq!(
5799 params.text_document.uri.to_file_path().unwrap(),
5800 Path::new("/dir/b.rs"),
5801 );
5802 assert_eq!(params.position, lsp::Position::new(0, 22));
5803
5804 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5805 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5806 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5807 )))
5808 });
5809
5810 let mut definitions = project
5811 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5812 .await
5813 .unwrap();
5814
5815 assert_eq!(definitions.len(), 1);
5816 let definition = definitions.pop().unwrap();
5817 cx.update(|cx| {
5818 let target_buffer = definition.buffer.read(cx);
5819 assert_eq!(
5820 target_buffer
5821 .file()
5822 .unwrap()
5823 .as_local()
5824 .unwrap()
5825 .abs_path(cx),
5826 Path::new("/dir/a.rs"),
5827 );
5828 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5829 assert_eq!(
5830 list_worktrees(&project, cx),
5831 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5832 );
5833
5834 drop(definition);
5835 });
5836 cx.read(|cx| {
5837 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5838 });
5839
5840 fn list_worktrees<'a>(
5841 project: &'a ModelHandle<Project>,
5842 cx: &'a AppContext,
5843 ) -> Vec<(&'a Path, bool)> {
5844 project
5845 .read(cx)
5846 .worktrees(cx)
5847 .map(|worktree| {
5848 let worktree = worktree.read(cx);
5849 (
5850 worktree.as_local().unwrap().abs_path().as_ref(),
5851 worktree.is_visible(),
5852 )
5853 })
5854 .collect::<Vec<_>>()
5855 }
5856 }
5857
5858 #[gpui::test]
5859 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5860 let fs = FakeFs::new(cx.background());
5861 fs.insert_tree(
5862 "/dir",
5863 json!({
5864 "file1": "the old contents",
5865 }),
5866 )
5867 .await;
5868
5869 let project = Project::test(fs.clone(), cx);
5870 let worktree_id = project
5871 .update(cx, |p, cx| {
5872 p.find_or_create_local_worktree("/dir", true, cx)
5873 })
5874 .await
5875 .unwrap()
5876 .0
5877 .read_with(cx, |tree, _| tree.id());
5878
5879 let buffer = project
5880 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5881 .await
5882 .unwrap();
5883 buffer
5884 .update(cx, |buffer, cx| {
5885 assert_eq!(buffer.text(), "the old contents");
5886 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5887 buffer.save(cx)
5888 })
5889 .await
5890 .unwrap();
5891
5892 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5893 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5894 }
5895
5896 #[gpui::test]
5897 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5898 let fs = FakeFs::new(cx.background());
5899 fs.insert_tree(
5900 "/dir",
5901 json!({
5902 "file1": "the old contents",
5903 }),
5904 )
5905 .await;
5906
5907 let project = Project::test(fs.clone(), cx);
5908 let worktree_id = project
5909 .update(cx, |p, cx| {
5910 p.find_or_create_local_worktree("/dir/file1", true, cx)
5911 })
5912 .await
5913 .unwrap()
5914 .0
5915 .read_with(cx, |tree, _| tree.id());
5916
5917 let buffer = project
5918 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5919 .await
5920 .unwrap();
5921 buffer
5922 .update(cx, |buffer, cx| {
5923 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5924 buffer.save(cx)
5925 })
5926 .await
5927 .unwrap();
5928
5929 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5930 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5931 }
5932
5933 #[gpui::test]
5934 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5935 let fs = FakeFs::new(cx.background());
5936 fs.insert_tree("/dir", json!({})).await;
5937
5938 let project = Project::test(fs.clone(), cx);
5939 let (worktree, _) = project
5940 .update(cx, |project, cx| {
5941 project.find_or_create_local_worktree("/dir", true, cx)
5942 })
5943 .await
5944 .unwrap();
5945 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5946
5947 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5948 buffer.update(cx, |buffer, cx| {
5949 buffer.edit([0..0], "abc", cx);
5950 assert!(buffer.is_dirty());
5951 assert!(!buffer.has_conflict());
5952 });
5953 project
5954 .update(cx, |project, cx| {
5955 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5956 })
5957 .await
5958 .unwrap();
5959 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5960 buffer.read_with(cx, |buffer, cx| {
5961 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5962 assert!(!buffer.is_dirty());
5963 assert!(!buffer.has_conflict());
5964 });
5965
5966 let opened_buffer = project
5967 .update(cx, |project, cx| {
5968 project.open_buffer((worktree_id, "file1"), cx)
5969 })
5970 .await
5971 .unwrap();
5972 assert_eq!(opened_buffer, buffer);
5973 }
5974
5975 #[gpui::test(retries = 5)]
5976 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5977 let dir = temp_tree(json!({
5978 "a": {
5979 "file1": "",
5980 "file2": "",
5981 "file3": "",
5982 },
5983 "b": {
5984 "c": {
5985 "file4": "",
5986 "file5": "",
5987 }
5988 }
5989 }));
5990
5991 let project = Project::test(Arc::new(RealFs), cx);
5992 let rpc = project.read_with(cx, |p, _| p.client.clone());
5993
5994 let (tree, _) = project
5995 .update(cx, |p, cx| {
5996 p.find_or_create_local_worktree(dir.path(), true, cx)
5997 })
5998 .await
5999 .unwrap();
6000 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6001
6002 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6003 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6004 async move { buffer.await.unwrap() }
6005 };
6006 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6007 tree.read_with(cx, |tree, _| {
6008 tree.entry_for_path(path)
6009 .expect(&format!("no entry for path {}", path))
6010 .id
6011 })
6012 };
6013
6014 let buffer2 = buffer_for_path("a/file2", cx).await;
6015 let buffer3 = buffer_for_path("a/file3", cx).await;
6016 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6017 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6018
6019 let file2_id = id_for_path("a/file2", &cx);
6020 let file3_id = id_for_path("a/file3", &cx);
6021 let file4_id = id_for_path("b/c/file4", &cx);
6022
6023 // Wait for the initial scan.
6024 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6025 .await;
6026
6027 // Create a remote copy of this worktree.
6028 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6029 let (remote, load_task) = cx.update(|cx| {
6030 Worktree::remote(
6031 1,
6032 1,
6033 initial_snapshot.to_proto(&Default::default(), true),
6034 rpc.clone(),
6035 cx,
6036 )
6037 });
6038 load_task.await;
6039
6040 cx.read(|cx| {
6041 assert!(!buffer2.read(cx).is_dirty());
6042 assert!(!buffer3.read(cx).is_dirty());
6043 assert!(!buffer4.read(cx).is_dirty());
6044 assert!(!buffer5.read(cx).is_dirty());
6045 });
6046
6047 // Rename and delete files and directories.
6048 tree.flush_fs_events(&cx).await;
6049 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6050 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6051 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6052 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6053 tree.flush_fs_events(&cx).await;
6054
6055 let expected_paths = vec![
6056 "a",
6057 "a/file1",
6058 "a/file2.new",
6059 "b",
6060 "d",
6061 "d/file3",
6062 "d/file4",
6063 ];
6064
6065 cx.read(|app| {
6066 assert_eq!(
6067 tree.read(app)
6068 .paths()
6069 .map(|p| p.to_str().unwrap())
6070 .collect::<Vec<_>>(),
6071 expected_paths
6072 );
6073
6074 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6075 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6076 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6077
6078 assert_eq!(
6079 buffer2.read(app).file().unwrap().path().as_ref(),
6080 Path::new("a/file2.new")
6081 );
6082 assert_eq!(
6083 buffer3.read(app).file().unwrap().path().as_ref(),
6084 Path::new("d/file3")
6085 );
6086 assert_eq!(
6087 buffer4.read(app).file().unwrap().path().as_ref(),
6088 Path::new("d/file4")
6089 );
6090 assert_eq!(
6091 buffer5.read(app).file().unwrap().path().as_ref(),
6092 Path::new("b/c/file5")
6093 );
6094
6095 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6096 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6097 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6098 assert!(buffer5.read(app).file().unwrap().is_deleted());
6099 });
6100
6101 // Update the remote worktree. Check that it becomes consistent with the
6102 // local worktree.
6103 remote.update(cx, |remote, cx| {
6104 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6105 &initial_snapshot,
6106 1,
6107 1,
6108 true,
6109 );
6110 remote
6111 .as_remote_mut()
6112 .unwrap()
6113 .snapshot
6114 .apply_remote_update(update_message)
6115 .unwrap();
6116
6117 assert_eq!(
6118 remote
6119 .paths()
6120 .map(|p| p.to_str().unwrap())
6121 .collect::<Vec<_>>(),
6122 expected_paths
6123 );
6124 });
6125 }
6126
6127 #[gpui::test]
6128 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6129 let fs = FakeFs::new(cx.background());
6130 fs.insert_tree(
6131 "/the-dir",
6132 json!({
6133 "a.txt": "a-contents",
6134 "b.txt": "b-contents",
6135 }),
6136 )
6137 .await;
6138
6139 let project = Project::test(fs.clone(), cx);
6140 let worktree_id = project
6141 .update(cx, |p, cx| {
6142 p.find_or_create_local_worktree("/the-dir", true, cx)
6143 })
6144 .await
6145 .unwrap()
6146 .0
6147 .read_with(cx, |tree, _| tree.id());
6148
6149 // Spawn multiple tasks to open paths, repeating some paths.
6150 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6151 (
6152 p.open_buffer((worktree_id, "a.txt"), cx),
6153 p.open_buffer((worktree_id, "b.txt"), cx),
6154 p.open_buffer((worktree_id, "a.txt"), cx),
6155 )
6156 });
6157
6158 let buffer_a_1 = buffer_a_1.await.unwrap();
6159 let buffer_a_2 = buffer_a_2.await.unwrap();
6160 let buffer_b = buffer_b.await.unwrap();
6161 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6162 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6163
6164 // There is only one buffer per path.
6165 let buffer_a_id = buffer_a_1.id();
6166 assert_eq!(buffer_a_2.id(), buffer_a_id);
6167
6168 // Open the same path again while it is still open.
6169 drop(buffer_a_1);
6170 let buffer_a_3 = project
6171 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6172 .await
6173 .unwrap();
6174
6175 // There's still only one buffer per path.
6176 assert_eq!(buffer_a_3.id(), buffer_a_id);
6177 }
6178
6179 #[gpui::test]
6180 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6181 use std::fs;
6182
6183 let dir = temp_tree(json!({
6184 "file1": "abc",
6185 "file2": "def",
6186 "file3": "ghi",
6187 }));
6188
6189 let project = Project::test(Arc::new(RealFs), cx);
6190 let (worktree, _) = project
6191 .update(cx, |p, cx| {
6192 p.find_or_create_local_worktree(dir.path(), true, cx)
6193 })
6194 .await
6195 .unwrap();
6196 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6197
6198 worktree.flush_fs_events(&cx).await;
6199 worktree
6200 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6201 .await;
6202
6203 let buffer1 = project
6204 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6205 .await
6206 .unwrap();
6207 let events = Rc::new(RefCell::new(Vec::new()));
6208
6209 // initially, the buffer isn't dirty.
6210 buffer1.update(cx, |buffer, cx| {
6211 cx.subscribe(&buffer1, {
6212 let events = events.clone();
6213 move |_, _, event, _| match event {
6214 BufferEvent::Operation(_) => {}
6215 _ => events.borrow_mut().push(event.clone()),
6216 }
6217 })
6218 .detach();
6219
6220 assert!(!buffer.is_dirty());
6221 assert!(events.borrow().is_empty());
6222
6223 buffer.edit(vec![1..2], "", cx);
6224 });
6225
6226 // after the first edit, the buffer is dirty, and emits a dirtied event.
6227 buffer1.update(cx, |buffer, cx| {
6228 assert!(buffer.text() == "ac");
6229 assert!(buffer.is_dirty());
6230 assert_eq!(
6231 *events.borrow(),
6232 &[
6233 language::Event::Edited { local: true },
6234 language::Event::Dirtied
6235 ]
6236 );
6237 events.borrow_mut().clear();
6238 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6239 });
6240
6241 // after saving, the buffer is not dirty, and emits a saved event.
6242 buffer1.update(cx, |buffer, cx| {
6243 assert!(!buffer.is_dirty());
6244 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6245 events.borrow_mut().clear();
6246
6247 buffer.edit(vec![1..1], "B", cx);
6248 buffer.edit(vec![2..2], "D", cx);
6249 });
6250
6251 // after editing again, the buffer is dirty, and emits another dirty event.
6252 buffer1.update(cx, |buffer, cx| {
6253 assert!(buffer.text() == "aBDc");
6254 assert!(buffer.is_dirty());
6255 assert_eq!(
6256 *events.borrow(),
6257 &[
6258 language::Event::Edited { local: true },
6259 language::Event::Dirtied,
6260 language::Event::Edited { local: true },
6261 ],
6262 );
6263 events.borrow_mut().clear();
6264
6265 // TODO - currently, after restoring the buffer to its
6266 // previously-saved state, the is still considered dirty.
6267 buffer.edit([1..3], "", cx);
6268 assert!(buffer.text() == "ac");
6269 assert!(buffer.is_dirty());
6270 });
6271
6272 assert_eq!(*events.borrow(), &[language::Event::Edited { local: true }]);
6273
6274 // When a file is deleted, the buffer is considered dirty.
6275 let events = Rc::new(RefCell::new(Vec::new()));
6276 let buffer2 = project
6277 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6278 .await
6279 .unwrap();
6280 buffer2.update(cx, |_, cx| {
6281 cx.subscribe(&buffer2, {
6282 let events = events.clone();
6283 move |_, _, event, _| events.borrow_mut().push(event.clone())
6284 })
6285 .detach();
6286 });
6287
6288 fs::remove_file(dir.path().join("file2")).unwrap();
6289 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6290 assert_eq!(
6291 *events.borrow(),
6292 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6293 );
6294
6295 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6296 let events = Rc::new(RefCell::new(Vec::new()));
6297 let buffer3 = project
6298 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6299 .await
6300 .unwrap();
6301 buffer3.update(cx, |_, cx| {
6302 cx.subscribe(&buffer3, {
6303 let events = events.clone();
6304 move |_, _, event, _| events.borrow_mut().push(event.clone())
6305 })
6306 .detach();
6307 });
6308
6309 worktree.flush_fs_events(&cx).await;
6310 buffer3.update(cx, |buffer, cx| {
6311 buffer.edit(Some(0..0), "x", cx);
6312 });
6313 events.borrow_mut().clear();
6314 fs::remove_file(dir.path().join("file3")).unwrap();
6315 buffer3
6316 .condition(&cx, |_, _| !events.borrow().is_empty())
6317 .await;
6318 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6319 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6320 }
6321
6322 #[gpui::test]
6323 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6324 use std::fs;
6325
6326 let initial_contents = "aaa\nbbbbb\nc\n";
6327 let dir = temp_tree(json!({ "the-file": initial_contents }));
6328
6329 let project = Project::test(Arc::new(RealFs), cx);
6330 let (worktree, _) = project
6331 .update(cx, |p, cx| {
6332 p.find_or_create_local_worktree(dir.path(), true, cx)
6333 })
6334 .await
6335 .unwrap();
6336 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6337
6338 worktree
6339 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6340 .await;
6341
6342 let abs_path = dir.path().join("the-file");
6343 let buffer = project
6344 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6345 .await
6346 .unwrap();
6347
6348 // TODO
6349 // Add a cursor on each row.
6350 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6351 // assert!(!buffer.is_dirty());
6352 // buffer.add_selection_set(
6353 // &(0..3)
6354 // .map(|row| Selection {
6355 // id: row as usize,
6356 // start: Point::new(row, 1),
6357 // end: Point::new(row, 1),
6358 // reversed: false,
6359 // goal: SelectionGoal::None,
6360 // })
6361 // .collect::<Vec<_>>(),
6362 // cx,
6363 // )
6364 // });
6365
6366 // Change the file on disk, adding two new lines of text, and removing
6367 // one line.
6368 buffer.read_with(cx, |buffer, _| {
6369 assert!(!buffer.is_dirty());
6370 assert!(!buffer.has_conflict());
6371 });
6372 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6373 fs::write(&abs_path, new_contents).unwrap();
6374
6375 // Because the buffer was not modified, it is reloaded from disk. Its
6376 // contents are edited according to the diff between the old and new
6377 // file contents.
6378 buffer
6379 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6380 .await;
6381
6382 buffer.update(cx, |buffer, _| {
6383 assert_eq!(buffer.text(), new_contents);
6384 assert!(!buffer.is_dirty());
6385 assert!(!buffer.has_conflict());
6386
6387 // TODO
6388 // let cursor_positions = buffer
6389 // .selection_set(selection_set_id)
6390 // .unwrap()
6391 // .selections::<Point>(&*buffer)
6392 // .map(|selection| {
6393 // assert_eq!(selection.start, selection.end);
6394 // selection.start
6395 // })
6396 // .collect::<Vec<_>>();
6397 // assert_eq!(
6398 // cursor_positions,
6399 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6400 // );
6401 });
6402
6403 // Modify the buffer
6404 buffer.update(cx, |buffer, cx| {
6405 buffer.edit(vec![0..0], " ", cx);
6406 assert!(buffer.is_dirty());
6407 assert!(!buffer.has_conflict());
6408 });
6409
6410 // Change the file on disk again, adding blank lines to the beginning.
6411 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6412
6413 // Because the buffer is modified, it doesn't reload from disk, but is
6414 // marked as having a conflict.
6415 buffer
6416 .condition(&cx, |buffer, _| buffer.has_conflict())
6417 .await;
6418 }
6419
6420 #[gpui::test]
6421 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6422 cx.foreground().forbid_parking();
6423
6424 let fs = FakeFs::new(cx.background());
6425 fs.insert_tree(
6426 "/the-dir",
6427 json!({
6428 "a.rs": "
6429 fn foo(mut v: Vec<usize>) {
6430 for x in &v {
6431 v.push(1);
6432 }
6433 }
6434 "
6435 .unindent(),
6436 }),
6437 )
6438 .await;
6439
6440 let project = Project::test(fs.clone(), cx);
6441 let (worktree, _) = project
6442 .update(cx, |p, cx| {
6443 p.find_or_create_local_worktree("/the-dir", true, cx)
6444 })
6445 .await
6446 .unwrap();
6447 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6448
6449 let buffer = project
6450 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6451 .await
6452 .unwrap();
6453
6454 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6455 let message = lsp::PublishDiagnosticsParams {
6456 uri: buffer_uri.clone(),
6457 diagnostics: vec![
6458 lsp::Diagnostic {
6459 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6460 severity: Some(DiagnosticSeverity::WARNING),
6461 message: "error 1".to_string(),
6462 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6463 location: lsp::Location {
6464 uri: buffer_uri.clone(),
6465 range: lsp::Range::new(
6466 lsp::Position::new(1, 8),
6467 lsp::Position::new(1, 9),
6468 ),
6469 },
6470 message: "error 1 hint 1".to_string(),
6471 }]),
6472 ..Default::default()
6473 },
6474 lsp::Diagnostic {
6475 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6476 severity: Some(DiagnosticSeverity::HINT),
6477 message: "error 1 hint 1".to_string(),
6478 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6479 location: lsp::Location {
6480 uri: buffer_uri.clone(),
6481 range: lsp::Range::new(
6482 lsp::Position::new(1, 8),
6483 lsp::Position::new(1, 9),
6484 ),
6485 },
6486 message: "original diagnostic".to_string(),
6487 }]),
6488 ..Default::default()
6489 },
6490 lsp::Diagnostic {
6491 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6492 severity: Some(DiagnosticSeverity::ERROR),
6493 message: "error 2".to_string(),
6494 related_information: Some(vec![
6495 lsp::DiagnosticRelatedInformation {
6496 location: lsp::Location {
6497 uri: buffer_uri.clone(),
6498 range: lsp::Range::new(
6499 lsp::Position::new(1, 13),
6500 lsp::Position::new(1, 15),
6501 ),
6502 },
6503 message: "error 2 hint 1".to_string(),
6504 },
6505 lsp::DiagnosticRelatedInformation {
6506 location: lsp::Location {
6507 uri: buffer_uri.clone(),
6508 range: lsp::Range::new(
6509 lsp::Position::new(1, 13),
6510 lsp::Position::new(1, 15),
6511 ),
6512 },
6513 message: "error 2 hint 2".to_string(),
6514 },
6515 ]),
6516 ..Default::default()
6517 },
6518 lsp::Diagnostic {
6519 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6520 severity: Some(DiagnosticSeverity::HINT),
6521 message: "error 2 hint 1".to_string(),
6522 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6523 location: lsp::Location {
6524 uri: buffer_uri.clone(),
6525 range: lsp::Range::new(
6526 lsp::Position::new(2, 8),
6527 lsp::Position::new(2, 17),
6528 ),
6529 },
6530 message: "original diagnostic".to_string(),
6531 }]),
6532 ..Default::default()
6533 },
6534 lsp::Diagnostic {
6535 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6536 severity: Some(DiagnosticSeverity::HINT),
6537 message: "error 2 hint 2".to_string(),
6538 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6539 location: lsp::Location {
6540 uri: buffer_uri.clone(),
6541 range: lsp::Range::new(
6542 lsp::Position::new(2, 8),
6543 lsp::Position::new(2, 17),
6544 ),
6545 },
6546 message: "original diagnostic".to_string(),
6547 }]),
6548 ..Default::default()
6549 },
6550 ],
6551 version: None,
6552 };
6553
6554 project
6555 .update(cx, |p, cx| {
6556 p.update_diagnostics(message, &Default::default(), cx)
6557 })
6558 .unwrap();
6559 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6560
6561 assert_eq!(
6562 buffer
6563 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6564 .collect::<Vec<_>>(),
6565 &[
6566 DiagnosticEntry {
6567 range: Point::new(1, 8)..Point::new(1, 9),
6568 diagnostic: Diagnostic {
6569 severity: DiagnosticSeverity::WARNING,
6570 message: "error 1".to_string(),
6571 group_id: 0,
6572 is_primary: true,
6573 ..Default::default()
6574 }
6575 },
6576 DiagnosticEntry {
6577 range: Point::new(1, 8)..Point::new(1, 9),
6578 diagnostic: Diagnostic {
6579 severity: DiagnosticSeverity::HINT,
6580 message: "error 1 hint 1".to_string(),
6581 group_id: 0,
6582 is_primary: false,
6583 ..Default::default()
6584 }
6585 },
6586 DiagnosticEntry {
6587 range: Point::new(1, 13)..Point::new(1, 15),
6588 diagnostic: Diagnostic {
6589 severity: DiagnosticSeverity::HINT,
6590 message: "error 2 hint 1".to_string(),
6591 group_id: 1,
6592 is_primary: false,
6593 ..Default::default()
6594 }
6595 },
6596 DiagnosticEntry {
6597 range: Point::new(1, 13)..Point::new(1, 15),
6598 diagnostic: Diagnostic {
6599 severity: DiagnosticSeverity::HINT,
6600 message: "error 2 hint 2".to_string(),
6601 group_id: 1,
6602 is_primary: false,
6603 ..Default::default()
6604 }
6605 },
6606 DiagnosticEntry {
6607 range: Point::new(2, 8)..Point::new(2, 17),
6608 diagnostic: Diagnostic {
6609 severity: DiagnosticSeverity::ERROR,
6610 message: "error 2".to_string(),
6611 group_id: 1,
6612 is_primary: true,
6613 ..Default::default()
6614 }
6615 }
6616 ]
6617 );
6618
6619 assert_eq!(
6620 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6621 &[
6622 DiagnosticEntry {
6623 range: Point::new(1, 8)..Point::new(1, 9),
6624 diagnostic: Diagnostic {
6625 severity: DiagnosticSeverity::WARNING,
6626 message: "error 1".to_string(),
6627 group_id: 0,
6628 is_primary: true,
6629 ..Default::default()
6630 }
6631 },
6632 DiagnosticEntry {
6633 range: Point::new(1, 8)..Point::new(1, 9),
6634 diagnostic: Diagnostic {
6635 severity: DiagnosticSeverity::HINT,
6636 message: "error 1 hint 1".to_string(),
6637 group_id: 0,
6638 is_primary: false,
6639 ..Default::default()
6640 }
6641 },
6642 ]
6643 );
6644 assert_eq!(
6645 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6646 &[
6647 DiagnosticEntry {
6648 range: Point::new(1, 13)..Point::new(1, 15),
6649 diagnostic: Diagnostic {
6650 severity: DiagnosticSeverity::HINT,
6651 message: "error 2 hint 1".to_string(),
6652 group_id: 1,
6653 is_primary: false,
6654 ..Default::default()
6655 }
6656 },
6657 DiagnosticEntry {
6658 range: Point::new(1, 13)..Point::new(1, 15),
6659 diagnostic: Diagnostic {
6660 severity: DiagnosticSeverity::HINT,
6661 message: "error 2 hint 2".to_string(),
6662 group_id: 1,
6663 is_primary: false,
6664 ..Default::default()
6665 }
6666 },
6667 DiagnosticEntry {
6668 range: Point::new(2, 8)..Point::new(2, 17),
6669 diagnostic: Diagnostic {
6670 severity: DiagnosticSeverity::ERROR,
6671 message: "error 2".to_string(),
6672 group_id: 1,
6673 is_primary: true,
6674 ..Default::default()
6675 }
6676 }
6677 ]
6678 );
6679 }
6680
6681 #[gpui::test]
6682 async fn test_rename(cx: &mut gpui::TestAppContext) {
6683 cx.foreground().forbid_parking();
6684
6685 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6686 let language = Arc::new(Language::new(
6687 LanguageConfig {
6688 name: "Rust".into(),
6689 path_suffixes: vec!["rs".to_string()],
6690 language_server: Some(language_server_config),
6691 ..Default::default()
6692 },
6693 Some(tree_sitter_rust::language()),
6694 ));
6695
6696 let fs = FakeFs::new(cx.background());
6697 fs.insert_tree(
6698 "/dir",
6699 json!({
6700 "one.rs": "const ONE: usize = 1;",
6701 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6702 }),
6703 )
6704 .await;
6705
6706 let project = Project::test(fs.clone(), cx);
6707 project.update(cx, |project, _| {
6708 Arc::get_mut(&mut project.languages).unwrap().add(language);
6709 });
6710
6711 let (tree, _) = project
6712 .update(cx, |project, cx| {
6713 project.find_or_create_local_worktree("/dir", true, cx)
6714 })
6715 .await
6716 .unwrap();
6717 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6718 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6719 .await;
6720
6721 let buffer = project
6722 .update(cx, |project, cx| {
6723 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6724 })
6725 .await
6726 .unwrap();
6727
6728 let mut fake_server = fake_servers.next().await.unwrap();
6729
6730 let response = project.update(cx, |project, cx| {
6731 project.prepare_rename(buffer.clone(), 7, cx)
6732 });
6733 fake_server
6734 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6735 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6736 assert_eq!(params.position, lsp::Position::new(0, 7));
6737 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6738 lsp::Position::new(0, 6),
6739 lsp::Position::new(0, 9),
6740 )))
6741 })
6742 .next()
6743 .await
6744 .unwrap();
6745 let range = response.await.unwrap().unwrap();
6746 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6747 assert_eq!(range, 6..9);
6748
6749 let response = project.update(cx, |project, cx| {
6750 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6751 });
6752 fake_server
6753 .handle_request::<lsp::request::Rename, _>(|params, _| {
6754 assert_eq!(
6755 params.text_document_position.text_document.uri.as_str(),
6756 "file:///dir/one.rs"
6757 );
6758 assert_eq!(
6759 params.text_document_position.position,
6760 lsp::Position::new(0, 7)
6761 );
6762 assert_eq!(params.new_name, "THREE");
6763 Some(lsp::WorkspaceEdit {
6764 changes: Some(
6765 [
6766 (
6767 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6768 vec![lsp::TextEdit::new(
6769 lsp::Range::new(
6770 lsp::Position::new(0, 6),
6771 lsp::Position::new(0, 9),
6772 ),
6773 "THREE".to_string(),
6774 )],
6775 ),
6776 (
6777 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6778 vec![
6779 lsp::TextEdit::new(
6780 lsp::Range::new(
6781 lsp::Position::new(0, 24),
6782 lsp::Position::new(0, 27),
6783 ),
6784 "THREE".to_string(),
6785 ),
6786 lsp::TextEdit::new(
6787 lsp::Range::new(
6788 lsp::Position::new(0, 35),
6789 lsp::Position::new(0, 38),
6790 ),
6791 "THREE".to_string(),
6792 ),
6793 ],
6794 ),
6795 ]
6796 .into_iter()
6797 .collect(),
6798 ),
6799 ..Default::default()
6800 })
6801 })
6802 .next()
6803 .await
6804 .unwrap();
6805 let mut transaction = response.await.unwrap().0;
6806 assert_eq!(transaction.len(), 2);
6807 assert_eq!(
6808 transaction
6809 .remove_entry(&buffer)
6810 .unwrap()
6811 .0
6812 .read_with(cx, |buffer, _| buffer.text()),
6813 "const THREE: usize = 1;"
6814 );
6815 assert_eq!(
6816 transaction
6817 .into_keys()
6818 .next()
6819 .unwrap()
6820 .read_with(cx, |buffer, _| buffer.text()),
6821 "const TWO: usize = one::THREE + one::THREE;"
6822 );
6823 }
6824
6825 #[gpui::test]
6826 async fn test_search(cx: &mut gpui::TestAppContext) {
6827 let fs = FakeFs::new(cx.background());
6828 fs.insert_tree(
6829 "/dir",
6830 json!({
6831 "one.rs": "const ONE: usize = 1;",
6832 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6833 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6834 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6835 }),
6836 )
6837 .await;
6838 let project = Project::test(fs.clone(), cx);
6839 let (tree, _) = project
6840 .update(cx, |project, cx| {
6841 project.find_or_create_local_worktree("/dir", true, cx)
6842 })
6843 .await
6844 .unwrap();
6845 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6846 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6847 .await;
6848
6849 assert_eq!(
6850 search(&project, SearchQuery::text("TWO", false, true), cx)
6851 .await
6852 .unwrap(),
6853 HashMap::from_iter([
6854 ("two.rs".to_string(), vec![6..9]),
6855 ("three.rs".to_string(), vec![37..40])
6856 ])
6857 );
6858
6859 let buffer_4 = project
6860 .update(cx, |project, cx| {
6861 project.open_buffer((worktree_id, "four.rs"), cx)
6862 })
6863 .await
6864 .unwrap();
6865 buffer_4.update(cx, |buffer, cx| {
6866 buffer.edit([20..28, 31..43], "two::TWO", cx);
6867 });
6868
6869 assert_eq!(
6870 search(&project, SearchQuery::text("TWO", false, true), cx)
6871 .await
6872 .unwrap(),
6873 HashMap::from_iter([
6874 ("two.rs".to_string(), vec![6..9]),
6875 ("three.rs".to_string(), vec![37..40]),
6876 ("four.rs".to_string(), vec![25..28, 36..39])
6877 ])
6878 );
6879
6880 async fn search(
6881 project: &ModelHandle<Project>,
6882 query: SearchQuery,
6883 cx: &mut gpui::TestAppContext,
6884 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6885 let results = project
6886 .update(cx, |project, cx| project.search(query, cx))
6887 .await?;
6888
6889 Ok(results
6890 .into_iter()
6891 .map(|(buffer, ranges)| {
6892 buffer.read_with(cx, |buffer, _| {
6893 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6894 let ranges = ranges
6895 .into_iter()
6896 .map(|range| range.to_offset(buffer))
6897 .collect::<Vec<_>>();
6898 (path, ranges)
6899 })
6900 })
6901 .collect())
6902 }
6903 }
6904}