1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
61 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
62 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
63 language_server_settings: Arc<Mutex<serde_json::Value>>,
64 next_language_server_id: usize,
65 client: Arc<client::Client>,
66 next_entry_id: Arc<AtomicUsize>,
67 user_store: ModelHandle<UserStore>,
68 fs: Arc<dyn Fs>,
69 client_state: ProjectClientState,
70 collaborators: HashMap<PeerId, Collaborator>,
71 subscriptions: Vec<client::Subscription>,
72 language_servers_with_diagnostics_running: isize,
73 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
74 shared_buffers: HashMap<PeerId, HashSet<u64>>,
75 loading_buffers: HashMap<
76 ProjectPath,
77 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
78 >,
79 loading_local_worktrees:
80 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
81 opened_buffers: HashMap<u64, OpenBuffer>,
82 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
83 nonce: u128,
84}
85
86enum OpenBuffer {
87 Strong(ModelHandle<Buffer>),
88 Weak(WeakModelHandle<Buffer>),
89 Loading(Vec<Operation>),
90}
91
92enum WorktreeHandle {
93 Strong(ModelHandle<Worktree>),
94 Weak(WeakModelHandle<Worktree>),
95}
96
97enum ProjectClientState {
98 Local {
99 is_shared: bool,
100 remote_id_tx: watch::Sender<Option<u64>>,
101 remote_id_rx: watch::Receiver<Option<u64>>,
102 _maintain_remote_id_task: Task<Option<()>>,
103 },
104 Remote {
105 sharing_has_stopped: bool,
106 remote_id: u64,
107 replica_id: ReplicaId,
108 _detect_unshare_task: Task<Option<()>>,
109 },
110}
111
112#[derive(Clone, Debug)]
113pub struct Collaborator {
114 pub user: Arc<User>,
115 pub peer_id: PeerId,
116 pub replica_id: ReplicaId,
117}
118
119#[derive(Clone, Debug, PartialEq)]
120pub enum Event {
121 ActiveEntryChanged(Option<ProjectEntryId>),
122 WorktreeRemoved(WorktreeId),
123 DiskBasedDiagnosticsStarted,
124 DiskBasedDiagnosticsUpdated,
125 DiskBasedDiagnosticsFinished,
126 DiagnosticsUpdated(ProjectPath),
127 RemoteIdChanged(Option<u64>),
128}
129
130enum LanguageServerEvent {
131 WorkStart {
132 token: String,
133 },
134 WorkProgress {
135 token: String,
136 progress: LanguageServerProgress,
137 },
138 WorkEnd {
139 token: String,
140 },
141 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
142}
143
144pub struct LanguageServerStatus {
145 pub name: String,
146 pub pending_work: BTreeMap<String, LanguageServerProgress>,
147 pending_diagnostic_updates: isize,
148}
149
150#[derive(Clone, Debug)]
151pub struct LanguageServerProgress {
152 pub message: Option<String>,
153 pub percentage: Option<usize>,
154 pub last_update_at: Instant,
155}
156
157#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
158pub struct ProjectPath {
159 pub worktree_id: WorktreeId,
160 pub path: Arc<Path>,
161}
162
163#[derive(Clone, Debug, Default, PartialEq)]
164pub struct DiagnosticSummary {
165 pub error_count: usize,
166 pub warning_count: usize,
167 pub info_count: usize,
168 pub hint_count: usize,
169}
170
171#[derive(Debug)]
172pub struct Location {
173 pub buffer: ModelHandle<Buffer>,
174 pub range: Range<language::Anchor>,
175}
176
177#[derive(Debug)]
178pub struct DocumentHighlight {
179 pub range: Range<language::Anchor>,
180 pub kind: DocumentHighlightKind,
181}
182
183#[derive(Clone, Debug)]
184pub struct Symbol {
185 pub source_worktree_id: WorktreeId,
186 pub worktree_id: WorktreeId,
187 pub language_name: String,
188 pub path: PathBuf,
189 pub label: CodeLabel,
190 pub name: String,
191 pub kind: lsp::SymbolKind,
192 pub range: Range<PointUtf16>,
193 pub signature: [u8; 32],
194}
195
196#[derive(Default)]
197pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
198
199impl DiagnosticSummary {
200 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
201 let mut this = Self {
202 error_count: 0,
203 warning_count: 0,
204 info_count: 0,
205 hint_count: 0,
206 };
207
208 for entry in diagnostics {
209 if entry.diagnostic.is_primary {
210 match entry.diagnostic.severity {
211 DiagnosticSeverity::ERROR => this.error_count += 1,
212 DiagnosticSeverity::WARNING => this.warning_count += 1,
213 DiagnosticSeverity::INFORMATION => this.info_count += 1,
214 DiagnosticSeverity::HINT => this.hint_count += 1,
215 _ => {}
216 }
217 }
218 }
219
220 this
221 }
222
223 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
224 proto::DiagnosticSummary {
225 path: path.to_string_lossy().to_string(),
226 error_count: self.error_count as u32,
227 warning_count: self.warning_count as u32,
228 info_count: self.info_count as u32,
229 hint_count: self.hint_count as u32,
230 }
231 }
232}
233
234#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
235pub struct ProjectEntryId(usize);
236
237impl ProjectEntryId {
238 pub fn new(counter: &AtomicUsize) -> Self {
239 Self(counter.fetch_add(1, SeqCst))
240 }
241
242 pub fn from_proto(id: u64) -> Self {
243 Self(id as usize)
244 }
245
246 pub fn to_proto(&self) -> u64 {
247 self.0 as u64
248 }
249
250 pub fn to_usize(&self) -> usize {
251 self.0
252 }
253}
254
255impl Project {
256 pub fn init(client: &Arc<Client>) {
257 client.add_model_message_handler(Self::handle_add_collaborator);
258 client.add_model_message_handler(Self::handle_buffer_reloaded);
259 client.add_model_message_handler(Self::handle_buffer_saved);
260 client.add_model_message_handler(Self::handle_start_language_server);
261 client.add_model_message_handler(Self::handle_update_language_server);
262 client.add_model_message_handler(Self::handle_remove_collaborator);
263 client.add_model_message_handler(Self::handle_register_worktree);
264 client.add_model_message_handler(Self::handle_unregister_worktree);
265 client.add_model_message_handler(Self::handle_unshare_project);
266 client.add_model_message_handler(Self::handle_update_buffer_file);
267 client.add_model_message_handler(Self::handle_update_buffer);
268 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
269 client.add_model_message_handler(Self::handle_update_worktree);
270 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
271 client.add_model_request_handler(Self::handle_apply_code_action);
272 client.add_model_request_handler(Self::handle_format_buffers);
273 client.add_model_request_handler(Self::handle_get_code_actions);
274 client.add_model_request_handler(Self::handle_get_completions);
275 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
276 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
277 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
278 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
279 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
280 client.add_model_request_handler(Self::handle_search_project);
281 client.add_model_request_handler(Self::handle_get_project_symbols);
282 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
283 client.add_model_request_handler(Self::handle_open_buffer_by_id);
284 client.add_model_request_handler(Self::handle_open_buffer_by_path);
285 client.add_model_request_handler(Self::handle_save_buffer);
286 }
287
288 pub fn local(
289 client: Arc<Client>,
290 user_store: ModelHandle<UserStore>,
291 languages: Arc<LanguageRegistry>,
292 fs: Arc<dyn Fs>,
293 cx: &mut MutableAppContext,
294 ) -> ModelHandle<Self> {
295 cx.add_model(|cx: &mut ModelContext<Self>| {
296 let (remote_id_tx, remote_id_rx) = watch::channel();
297 let _maintain_remote_id_task = cx.spawn_weak({
298 let rpc = client.clone();
299 move |this, mut cx| {
300 async move {
301 let mut status = rpc.status();
302 while let Some(status) = status.next().await {
303 if let Some(this) = this.upgrade(&cx) {
304 let remote_id = if status.is_connected() {
305 let response = rpc.request(proto::RegisterProject {}).await?;
306 Some(response.project_id)
307 } else {
308 None
309 };
310
311 if let Some(project_id) = remote_id {
312 let mut registrations = Vec::new();
313 this.update(&mut cx, |this, cx| {
314 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
315 registrations.push(worktree.update(
316 cx,
317 |worktree, cx| {
318 let worktree = worktree.as_local_mut().unwrap();
319 worktree.register(project_id, cx)
320 },
321 ));
322 }
323 });
324 for registration in registrations {
325 registration.await?;
326 }
327 }
328 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
329 }
330 }
331 Ok(())
332 }
333 .log_err()
334 }
335 });
336
337 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
338 Self {
339 worktrees: Default::default(),
340 collaborators: Default::default(),
341 opened_buffers: Default::default(),
342 shared_buffers: Default::default(),
343 loading_buffers: Default::default(),
344 loading_local_worktrees: Default::default(),
345 buffer_snapshots: Default::default(),
346 client_state: ProjectClientState::Local {
347 is_shared: false,
348 remote_id_tx,
349 remote_id_rx,
350 _maintain_remote_id_task,
351 },
352 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
353 subscriptions: Vec::new(),
354 active_entry: None,
355 languages,
356 client,
357 user_store,
358 fs,
359 next_entry_id: Default::default(),
360 language_servers_with_diagnostics_running: 0,
361 language_servers: Default::default(),
362 started_language_servers: Default::default(),
363 language_server_statuses: Default::default(),
364 language_server_settings: Default::default(),
365 next_language_server_id: 0,
366 nonce: StdRng::from_entropy().gen(),
367 }
368 })
369 }
370
371 pub async fn remote(
372 remote_id: u64,
373 client: Arc<Client>,
374 user_store: ModelHandle<UserStore>,
375 languages: Arc<LanguageRegistry>,
376 fs: Arc<dyn Fs>,
377 cx: &mut AsyncAppContext,
378 ) -> Result<ModelHandle<Self>> {
379 client.authenticate_and_connect(&cx).await?;
380
381 let response = client
382 .request(proto::JoinProject {
383 project_id: remote_id,
384 })
385 .await?;
386
387 let replica_id = response.replica_id as ReplicaId;
388
389 let mut worktrees = Vec::new();
390 for worktree in response.worktrees {
391 let (worktree, load_task) = cx
392 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
393 worktrees.push(worktree);
394 load_task.detach();
395 }
396
397 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
398 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
399 let mut this = Self {
400 worktrees: Vec::new(),
401 loading_buffers: Default::default(),
402 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
403 shared_buffers: Default::default(),
404 loading_local_worktrees: Default::default(),
405 active_entry: None,
406 collaborators: Default::default(),
407 languages,
408 user_store: user_store.clone(),
409 fs,
410 next_entry_id: Default::default(),
411 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
412 client: client.clone(),
413 client_state: ProjectClientState::Remote {
414 sharing_has_stopped: false,
415 remote_id,
416 replica_id,
417 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
418 async move {
419 let mut status = client.status();
420 let is_connected =
421 status.next().await.map_or(false, |s| s.is_connected());
422 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
423 if !is_connected || status.next().await.is_some() {
424 if let Some(this) = this.upgrade(&cx) {
425 this.update(&mut cx, |this, cx| this.project_unshared(cx))
426 }
427 }
428 Ok(())
429 }
430 .log_err()
431 }),
432 },
433 language_servers_with_diagnostics_running: 0,
434 language_servers: Default::default(),
435 started_language_servers: Default::default(),
436 language_server_settings: Default::default(),
437 language_server_statuses: response
438 .language_servers
439 .into_iter()
440 .map(|server| {
441 (
442 server.id as usize,
443 LanguageServerStatus {
444 name: server.name,
445 pending_work: Default::default(),
446 pending_diagnostic_updates: 0,
447 },
448 )
449 })
450 .collect(),
451 next_language_server_id: 0,
452 opened_buffers: Default::default(),
453 buffer_snapshots: Default::default(),
454 nonce: StdRng::from_entropy().gen(),
455 };
456 for worktree in worktrees {
457 this.add_worktree(&worktree, cx);
458 }
459 this
460 });
461
462 let user_ids = response
463 .collaborators
464 .iter()
465 .map(|peer| peer.user_id)
466 .collect();
467 user_store
468 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
469 .await?;
470 let mut collaborators = HashMap::default();
471 for message in response.collaborators {
472 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
473 collaborators.insert(collaborator.peer_id, collaborator);
474 }
475
476 this.update(cx, |this, _| {
477 this.collaborators = collaborators;
478 });
479
480 Ok(this)
481 }
482
483 #[cfg(any(test, feature = "test-support"))]
484 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
485 let languages = Arc::new(LanguageRegistry::test());
486 let http_client = client::test::FakeHttpClient::with_404_response();
487 let client = client::Client::new(http_client.clone());
488 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
489 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
490 }
491
492 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
493 self.opened_buffers
494 .get(&remote_id)
495 .and_then(|buffer| buffer.upgrade(cx))
496 }
497
498 #[cfg(any(test, feature = "test-support"))]
499 pub fn languages(&self) -> &Arc<LanguageRegistry> {
500 &self.languages
501 }
502
503 #[cfg(any(test, feature = "test-support"))]
504 pub fn check_invariants(&self, cx: &AppContext) {
505 if self.is_local() {
506 let mut worktree_root_paths = HashMap::default();
507 for worktree in self.worktrees(cx) {
508 let worktree = worktree.read(cx);
509 let abs_path = worktree.as_local().unwrap().abs_path().clone();
510 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
511 assert_eq!(
512 prev_worktree_id,
513 None,
514 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
515 abs_path,
516 worktree.id(),
517 prev_worktree_id
518 )
519 }
520 } else {
521 let replica_id = self.replica_id();
522 for buffer in self.opened_buffers.values() {
523 if let Some(buffer) = buffer.upgrade(cx) {
524 let buffer = buffer.read(cx);
525 assert_eq!(
526 buffer.deferred_ops_len(),
527 0,
528 "replica {}, buffer {} has deferred operations",
529 replica_id,
530 buffer.remote_id()
531 );
532 }
533 }
534 }
535 }
536
537 #[cfg(any(test, feature = "test-support"))]
538 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
539 let path = path.into();
540 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
541 self.opened_buffers.iter().any(|(_, buffer)| {
542 if let Some(buffer) = buffer.upgrade(cx) {
543 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
544 if file.worktree == worktree && file.path() == &path.path {
545 return true;
546 }
547 }
548 }
549 false
550 })
551 } else {
552 false
553 }
554 }
555
556 pub fn fs(&self) -> &Arc<dyn Fs> {
557 &self.fs
558 }
559
560 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
561 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
562 *remote_id_tx.borrow_mut() = remote_id;
563 }
564
565 self.subscriptions.clear();
566 if let Some(remote_id) = remote_id {
567 self.subscriptions
568 .push(self.client.add_model_for_remote_entity(remote_id, cx));
569 }
570 cx.emit(Event::RemoteIdChanged(remote_id))
571 }
572
573 pub fn remote_id(&self) -> Option<u64> {
574 match &self.client_state {
575 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
576 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
577 }
578 }
579
580 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
581 let mut id = None;
582 let mut watch = None;
583 match &self.client_state {
584 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
585 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
586 }
587
588 async move {
589 if let Some(id) = id {
590 return id;
591 }
592 let mut watch = watch.unwrap();
593 loop {
594 let id = *watch.borrow();
595 if let Some(id) = id {
596 return id;
597 }
598 watch.next().await;
599 }
600 }
601 }
602
603 pub fn replica_id(&self) -> ReplicaId {
604 match &self.client_state {
605 ProjectClientState::Local { .. } => 0,
606 ProjectClientState::Remote { replica_id, .. } => *replica_id,
607 }
608 }
609
610 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
611 &self.collaborators
612 }
613
614 pub fn worktrees<'a>(
615 &'a self,
616 cx: &'a AppContext,
617 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
618 self.worktrees
619 .iter()
620 .filter_map(move |worktree| worktree.upgrade(cx))
621 }
622
623 pub fn visible_worktrees<'a>(
624 &'a self,
625 cx: &'a AppContext,
626 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
627 self.worktrees.iter().filter_map(|worktree| {
628 worktree.upgrade(cx).and_then(|worktree| {
629 if worktree.read(cx).is_visible() {
630 Some(worktree)
631 } else {
632 None
633 }
634 })
635 })
636 }
637
638 pub fn worktree_for_id(
639 &self,
640 id: WorktreeId,
641 cx: &AppContext,
642 ) -> Option<ModelHandle<Worktree>> {
643 self.worktrees(cx)
644 .find(|worktree| worktree.read(cx).id() == id)
645 }
646
647 pub fn worktree_for_entry(
648 &self,
649 entry_id: ProjectEntryId,
650 cx: &AppContext,
651 ) -> Option<ModelHandle<Worktree>> {
652 self.worktrees(cx)
653 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
654 }
655
656 pub fn worktree_id_for_entry(
657 &self,
658 entry_id: ProjectEntryId,
659 cx: &AppContext,
660 ) -> Option<WorktreeId> {
661 self.worktree_for_entry(entry_id, cx)
662 .map(|worktree| worktree.read(cx).id())
663 }
664
665 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
666 let rpc = self.client.clone();
667 cx.spawn(|this, mut cx| async move {
668 let project_id = this.update(&mut cx, |this, cx| {
669 if let ProjectClientState::Local {
670 is_shared,
671 remote_id_rx,
672 ..
673 } = &mut this.client_state
674 {
675 *is_shared = true;
676
677 for open_buffer in this.opened_buffers.values_mut() {
678 match open_buffer {
679 OpenBuffer::Strong(_) => {}
680 OpenBuffer::Weak(buffer) => {
681 if let Some(buffer) = buffer.upgrade(cx) {
682 *open_buffer = OpenBuffer::Strong(buffer);
683 }
684 }
685 OpenBuffer::Loading(_) => unreachable!(),
686 }
687 }
688
689 for worktree_handle in this.worktrees.iter_mut() {
690 match worktree_handle {
691 WorktreeHandle::Strong(_) => {}
692 WorktreeHandle::Weak(worktree) => {
693 if let Some(worktree) = worktree.upgrade(cx) {
694 *worktree_handle = WorktreeHandle::Strong(worktree);
695 }
696 }
697 }
698 }
699
700 remote_id_rx
701 .borrow()
702 .ok_or_else(|| anyhow!("no project id"))
703 } else {
704 Err(anyhow!("can't share a remote project"))
705 }
706 })?;
707
708 rpc.request(proto::ShareProject { project_id }).await?;
709
710 let mut tasks = Vec::new();
711 this.update(&mut cx, |this, cx| {
712 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
713 worktree.update(cx, |worktree, cx| {
714 let worktree = worktree.as_local_mut().unwrap();
715 tasks.push(worktree.share(project_id, cx));
716 });
717 }
718 });
719 for task in tasks {
720 task.await?;
721 }
722 this.update(&mut cx, |_, cx| cx.notify());
723 Ok(())
724 })
725 }
726
727 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
728 let rpc = self.client.clone();
729 cx.spawn(|this, mut cx| async move {
730 let project_id = this.update(&mut cx, |this, cx| {
731 if let ProjectClientState::Local {
732 is_shared,
733 remote_id_rx,
734 ..
735 } = &mut this.client_state
736 {
737 *is_shared = false;
738
739 for open_buffer in this.opened_buffers.values_mut() {
740 match open_buffer {
741 OpenBuffer::Strong(buffer) => {
742 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
743 }
744 _ => {}
745 }
746 }
747
748 for worktree_handle in this.worktrees.iter_mut() {
749 match worktree_handle {
750 WorktreeHandle::Strong(worktree) => {
751 if !worktree.read(cx).is_visible() {
752 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
753 }
754 }
755 _ => {}
756 }
757 }
758
759 remote_id_rx
760 .borrow()
761 .ok_or_else(|| anyhow!("no project id"))
762 } else {
763 Err(anyhow!("can't share a remote project"))
764 }
765 })?;
766
767 rpc.send(proto::UnshareProject { project_id })?;
768 this.update(&mut cx, |this, cx| {
769 this.collaborators.clear();
770 this.shared_buffers.clear();
771 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
772 worktree.update(cx, |worktree, _| {
773 worktree.as_local_mut().unwrap().unshare();
774 });
775 }
776 cx.notify()
777 });
778 Ok(())
779 })
780 }
781
782 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
783 if let ProjectClientState::Remote {
784 sharing_has_stopped,
785 ..
786 } = &mut self.client_state
787 {
788 *sharing_has_stopped = true;
789 self.collaborators.clear();
790 cx.notify();
791 }
792 }
793
794 pub fn is_read_only(&self) -> bool {
795 match &self.client_state {
796 ProjectClientState::Local { .. } => false,
797 ProjectClientState::Remote {
798 sharing_has_stopped,
799 ..
800 } => *sharing_has_stopped,
801 }
802 }
803
804 pub fn is_local(&self) -> bool {
805 match &self.client_state {
806 ProjectClientState::Local { .. } => true,
807 ProjectClientState::Remote { .. } => false,
808 }
809 }
810
811 pub fn is_remote(&self) -> bool {
812 !self.is_local()
813 }
814
815 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
816 if self.is_remote() {
817 return Err(anyhow!("creating buffers as a guest is not supported yet"));
818 }
819
820 let buffer = cx.add_model(|cx| {
821 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
822 });
823 self.register_buffer(&buffer, cx)?;
824 Ok(buffer)
825 }
826
827 pub fn open_path(
828 &mut self,
829 path: impl Into<ProjectPath>,
830 cx: &mut ModelContext<Self>,
831 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
832 let task = self.open_buffer(path, cx);
833 cx.spawn_weak(|_, cx| async move {
834 let buffer = task.await?;
835 let project_entry_id = buffer
836 .read_with(&cx, |buffer, cx| {
837 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
838 })
839 .ok_or_else(|| anyhow!("no project entry"))?;
840 Ok((project_entry_id, buffer.into()))
841 })
842 }
843
844 pub fn open_buffer(
845 &mut self,
846 path: impl Into<ProjectPath>,
847 cx: &mut ModelContext<Self>,
848 ) -> Task<Result<ModelHandle<Buffer>>> {
849 let project_path = path.into();
850 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
851 worktree
852 } else {
853 return Task::ready(Err(anyhow!("no such worktree")));
854 };
855
856 // If there is already a buffer for the given path, then return it.
857 let existing_buffer = self.get_open_buffer(&project_path, cx);
858 if let Some(existing_buffer) = existing_buffer {
859 return Task::ready(Ok(existing_buffer));
860 }
861
862 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
863 // If the given path is already being loaded, then wait for that existing
864 // task to complete and return the same buffer.
865 hash_map::Entry::Occupied(e) => e.get().clone(),
866
867 // Otherwise, record the fact that this path is now being loaded.
868 hash_map::Entry::Vacant(entry) => {
869 let (mut tx, rx) = postage::watch::channel();
870 entry.insert(rx.clone());
871
872 let load_buffer = if worktree.read(cx).is_local() {
873 self.open_local_buffer(&project_path.path, &worktree, cx)
874 } else {
875 self.open_remote_buffer(&project_path.path, &worktree, cx)
876 };
877
878 cx.spawn(move |this, mut cx| async move {
879 let load_result = load_buffer.await;
880 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
881 // Record the fact that the buffer is no longer loading.
882 this.loading_buffers.remove(&project_path);
883 let buffer = load_result.map_err(Arc::new)?;
884 Ok(buffer)
885 }));
886 })
887 .detach();
888 rx
889 }
890 };
891
892 cx.foreground().spawn(async move {
893 loop {
894 if let Some(result) = loading_watch.borrow().as_ref() {
895 match result {
896 Ok(buffer) => return Ok(buffer.clone()),
897 Err(error) => return Err(anyhow!("{}", error)),
898 }
899 }
900 loading_watch.next().await;
901 }
902 })
903 }
904
905 fn open_local_buffer(
906 &mut self,
907 path: &Arc<Path>,
908 worktree: &ModelHandle<Worktree>,
909 cx: &mut ModelContext<Self>,
910 ) -> Task<Result<ModelHandle<Buffer>>> {
911 let load_buffer = worktree.update(cx, |worktree, cx| {
912 let worktree = worktree.as_local_mut().unwrap();
913 worktree.load_buffer(path, cx)
914 });
915 cx.spawn(|this, mut cx| async move {
916 let buffer = load_buffer.await?;
917 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
918 Ok(buffer)
919 })
920 }
921
922 fn open_remote_buffer(
923 &mut self,
924 path: &Arc<Path>,
925 worktree: &ModelHandle<Worktree>,
926 cx: &mut ModelContext<Self>,
927 ) -> Task<Result<ModelHandle<Buffer>>> {
928 let rpc = self.client.clone();
929 let project_id = self.remote_id().unwrap();
930 let remote_worktree_id = worktree.read(cx).id();
931 let path = path.clone();
932 let path_string = path.to_string_lossy().to_string();
933 cx.spawn(|this, mut cx| async move {
934 let response = rpc
935 .request(proto::OpenBufferByPath {
936 project_id,
937 worktree_id: remote_worktree_id.to_proto(),
938 path: path_string,
939 })
940 .await?;
941 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
942 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
943 .await
944 })
945 }
946
947 fn open_local_buffer_via_lsp(
948 &mut self,
949 abs_path: lsp::Url,
950 lang_name: Arc<str>,
951 lang_server: Arc<LanguageServer>,
952 cx: &mut ModelContext<Self>,
953 ) -> Task<Result<ModelHandle<Buffer>>> {
954 cx.spawn(|this, mut cx| async move {
955 let abs_path = abs_path
956 .to_file_path()
957 .map_err(|_| anyhow!("can't convert URI to path"))?;
958 let (worktree, relative_path) = if let Some(result) =
959 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
960 {
961 result
962 } else {
963 let worktree = this
964 .update(&mut cx, |this, cx| {
965 this.create_local_worktree(&abs_path, false, cx)
966 })
967 .await?;
968 this.update(&mut cx, |this, cx| {
969 this.language_servers
970 .insert((worktree.read(cx).id(), lang_name), lang_server);
971 });
972 (worktree, PathBuf::new())
973 };
974
975 let project_path = ProjectPath {
976 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
977 path: relative_path.into(),
978 };
979 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
980 .await
981 })
982 }
983
984 pub fn open_buffer_by_id(
985 &mut self,
986 id: u64,
987 cx: &mut ModelContext<Self>,
988 ) -> Task<Result<ModelHandle<Buffer>>> {
989 if let Some(buffer) = self.buffer_for_id(id, cx) {
990 Task::ready(Ok(buffer))
991 } else if self.is_local() {
992 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
993 } else if let Some(project_id) = self.remote_id() {
994 let request = self
995 .client
996 .request(proto::OpenBufferById { project_id, id });
997 cx.spawn(|this, mut cx| async move {
998 let buffer = request
999 .await?
1000 .buffer
1001 .ok_or_else(|| anyhow!("invalid buffer"))?;
1002 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1003 .await
1004 })
1005 } else {
1006 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1007 }
1008 }
1009
1010 pub fn save_buffer_as(
1011 &mut self,
1012 buffer: ModelHandle<Buffer>,
1013 abs_path: PathBuf,
1014 cx: &mut ModelContext<Project>,
1015 ) -> Task<Result<()>> {
1016 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1017 cx.spawn(|this, mut cx| async move {
1018 let (worktree, path) = worktree_task.await?;
1019 worktree
1020 .update(&mut cx, |worktree, cx| {
1021 worktree
1022 .as_local_mut()
1023 .unwrap()
1024 .save_buffer_as(buffer.clone(), path, cx)
1025 })
1026 .await?;
1027 this.update(&mut cx, |this, cx| {
1028 this.assign_language_to_buffer(&buffer, cx);
1029 this.register_buffer_with_language_server(&buffer, cx);
1030 });
1031 Ok(())
1032 })
1033 }
1034
1035 pub fn get_open_buffer(
1036 &mut self,
1037 path: &ProjectPath,
1038 cx: &mut ModelContext<Self>,
1039 ) -> Option<ModelHandle<Buffer>> {
1040 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1041 self.opened_buffers.values().find_map(|buffer| {
1042 let buffer = buffer.upgrade(cx)?;
1043 let file = File::from_dyn(buffer.read(cx).file())?;
1044 if file.worktree == worktree && file.path() == &path.path {
1045 Some(buffer)
1046 } else {
1047 None
1048 }
1049 })
1050 }
1051
1052 fn register_buffer(
1053 &mut self,
1054 buffer: &ModelHandle<Buffer>,
1055 cx: &mut ModelContext<Self>,
1056 ) -> Result<()> {
1057 let remote_id = buffer.read(cx).remote_id();
1058 let open_buffer = if self.is_remote() || self.is_shared() {
1059 OpenBuffer::Strong(buffer.clone())
1060 } else {
1061 OpenBuffer::Weak(buffer.downgrade())
1062 };
1063
1064 match self.opened_buffers.insert(remote_id, open_buffer) {
1065 None => {}
1066 Some(OpenBuffer::Loading(operations)) => {
1067 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1068 }
1069 Some(OpenBuffer::Weak(existing_handle)) => {
1070 if existing_handle.upgrade(cx).is_some() {
1071 Err(anyhow!(
1072 "already registered buffer with remote id {}",
1073 remote_id
1074 ))?
1075 }
1076 }
1077 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1078 "already registered buffer with remote id {}",
1079 remote_id
1080 ))?,
1081 }
1082 cx.subscribe(buffer, |this, buffer, event, cx| {
1083 this.on_buffer_event(buffer, event, cx);
1084 })
1085 .detach();
1086
1087 self.assign_language_to_buffer(buffer, cx);
1088 self.register_buffer_with_language_server(buffer, cx);
1089
1090 Ok(())
1091 }
1092
1093 fn register_buffer_with_language_server(
1094 &mut self,
1095 buffer_handle: &ModelHandle<Buffer>,
1096 cx: &mut ModelContext<Self>,
1097 ) {
1098 let buffer = buffer_handle.read(cx);
1099 let buffer_id = buffer.remote_id();
1100 if let Some(file) = File::from_dyn(buffer.file()) {
1101 if file.is_local() {
1102 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1103 let initial_snapshot = buffer.text_snapshot();
1104 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1105
1106 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1107 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1108 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1109 .log_err();
1110 }
1111 }
1112
1113 if let Some(server) = language_server {
1114 server
1115 .notify::<lsp::notification::DidOpenTextDocument>(
1116 lsp::DidOpenTextDocumentParams {
1117 text_document: lsp::TextDocumentItem::new(
1118 uri,
1119 Default::default(),
1120 0,
1121 initial_snapshot.text(),
1122 ),
1123 }
1124 .clone(),
1125 )
1126 .log_err();
1127 buffer_handle.update(cx, |buffer, cx| {
1128 buffer.set_completion_triggers(
1129 server
1130 .capabilities()
1131 .completion_provider
1132 .as_ref()
1133 .and_then(|provider| provider.trigger_characters.clone())
1134 .unwrap_or(Vec::new()),
1135 cx,
1136 )
1137 });
1138 self.buffer_snapshots
1139 .insert(buffer_id, vec![(0, initial_snapshot)]);
1140 }
1141
1142 cx.observe_release(buffer_handle, |this, buffer, cx| {
1143 if let Some(file) = File::from_dyn(buffer.file()) {
1144 if file.is_local() {
1145 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1146 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1147 server
1148 .notify::<lsp::notification::DidCloseTextDocument>(
1149 lsp::DidCloseTextDocumentParams {
1150 text_document: lsp::TextDocumentIdentifier::new(
1151 uri.clone(),
1152 ),
1153 },
1154 )
1155 .log_err();
1156 }
1157 }
1158 }
1159 })
1160 .detach();
1161 }
1162 }
1163 }
1164
1165 fn on_buffer_event(
1166 &mut self,
1167 buffer: ModelHandle<Buffer>,
1168 event: &BufferEvent,
1169 cx: &mut ModelContext<Self>,
1170 ) -> Option<()> {
1171 match event {
1172 BufferEvent::Operation(operation) => {
1173 let project_id = self.remote_id()?;
1174 let request = self.client.request(proto::UpdateBuffer {
1175 project_id,
1176 buffer_id: buffer.read(cx).remote_id(),
1177 operations: vec![language::proto::serialize_operation(&operation)],
1178 });
1179 cx.background().spawn(request).detach_and_log_err(cx);
1180 }
1181 BufferEvent::Edited { .. } => {
1182 let language_server = self
1183 .language_server_for_buffer(buffer.read(cx), cx)?
1184 .clone();
1185 let buffer = buffer.read(cx);
1186 let file = File::from_dyn(buffer.file())?;
1187 let abs_path = file.as_local()?.abs_path(cx);
1188 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1189 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1190 let (version, prev_snapshot) = buffer_snapshots.last()?;
1191 let next_snapshot = buffer.text_snapshot();
1192 let next_version = version + 1;
1193
1194 let content_changes = buffer
1195 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1196 .map(|edit| {
1197 let edit_start = edit.new.start.0;
1198 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1199 let new_text = next_snapshot
1200 .text_for_range(edit.new.start.1..edit.new.end.1)
1201 .collect();
1202 lsp::TextDocumentContentChangeEvent {
1203 range: Some(lsp::Range::new(
1204 edit_start.to_lsp_position(),
1205 edit_end.to_lsp_position(),
1206 )),
1207 range_length: None,
1208 text: new_text,
1209 }
1210 })
1211 .collect();
1212
1213 buffer_snapshots.push((next_version, next_snapshot));
1214
1215 language_server
1216 .notify::<lsp::notification::DidChangeTextDocument>(
1217 lsp::DidChangeTextDocumentParams {
1218 text_document: lsp::VersionedTextDocumentIdentifier::new(
1219 uri,
1220 next_version,
1221 ),
1222 content_changes,
1223 },
1224 )
1225 .log_err();
1226 }
1227 BufferEvent::Saved => {
1228 let file = File::from_dyn(buffer.read(cx).file())?;
1229 let worktree_id = file.worktree_id(cx);
1230 let abs_path = file.as_local()?.abs_path(cx);
1231 let text_document = lsp::TextDocumentIdentifier {
1232 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1233 };
1234
1235 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1236 server
1237 .notify::<lsp::notification::DidSaveTextDocument>(
1238 lsp::DidSaveTextDocumentParams {
1239 text_document: text_document.clone(),
1240 text: None,
1241 },
1242 )
1243 .log_err();
1244 }
1245 }
1246 _ => {}
1247 }
1248
1249 None
1250 }
1251
1252 fn language_servers_for_worktree(
1253 &self,
1254 worktree_id: WorktreeId,
1255 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1256 self.language_servers.iter().filter_map(
1257 move |((language_server_worktree_id, language_name), server)| {
1258 if *language_server_worktree_id == worktree_id {
1259 Some((language_name.as_ref(), server))
1260 } else {
1261 None
1262 }
1263 },
1264 )
1265 }
1266
1267 fn assign_language_to_buffer(
1268 &mut self,
1269 buffer: &ModelHandle<Buffer>,
1270 cx: &mut ModelContext<Self>,
1271 ) -> Option<()> {
1272 // If the buffer has a language, set it and start the language server if we haven't already.
1273 let full_path = buffer.read(cx).file()?.full_path(cx);
1274 let language = self.languages.select_language(&full_path)?;
1275 buffer.update(cx, |buffer, cx| {
1276 buffer.set_language(Some(language.clone()), cx);
1277 });
1278
1279 let file = File::from_dyn(buffer.read(cx).file())?;
1280 let worktree = file.worktree.read(cx).as_local()?;
1281 let worktree_id = worktree.id();
1282 let worktree_abs_path = worktree.abs_path().clone();
1283 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1284
1285 None
1286 }
1287
1288 fn start_language_server(
1289 &mut self,
1290 worktree_id: WorktreeId,
1291 worktree_path: Arc<Path>,
1292 language: Arc<Language>,
1293 cx: &mut ModelContext<Self>,
1294 ) {
1295 let key = (worktree_id, language.name());
1296 self.started_language_servers
1297 .entry(key.clone())
1298 .or_insert_with(|| {
1299 let server_id = post_inc(&mut self.next_language_server_id);
1300 let language_server = self.languages.start_language_server(
1301 language.clone(),
1302 worktree_path,
1303 self.client.http_client(),
1304 cx,
1305 );
1306 cx.spawn_weak(|this, mut cx| async move {
1307 let mut language_server = language_server?.await.log_err()?;
1308 let this = this.upgrade(&cx)?;
1309 let (language_server_events_tx, language_server_events_rx) =
1310 smol::channel::unbounded();
1311
1312 language_server
1313 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1314 let language_server_events_tx = language_server_events_tx.clone();
1315 move |params| {
1316 language_server_events_tx
1317 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1318 .ok();
1319 }
1320 })
1321 .detach();
1322
1323 language_server
1324 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1325 let settings = this
1326 .read_with(&cx, |this, _| this.language_server_settings.clone());
1327 move |params| {
1328 let settings = settings.lock();
1329 Ok(params
1330 .items
1331 .into_iter()
1332 .map(|item| {
1333 if let Some(section) = &item.section {
1334 settings
1335 .get(section)
1336 .cloned()
1337 .unwrap_or(serde_json::Value::Null)
1338 } else {
1339 settings.clone()
1340 }
1341 })
1342 .collect())
1343 }
1344 })
1345 .detach();
1346
1347 language_server
1348 .on_notification::<lsp::notification::Progress, _>(move |params| {
1349 let token = match params.token {
1350 lsp::NumberOrString::String(token) => token,
1351 lsp::NumberOrString::Number(token) => {
1352 log::info!("skipping numeric progress token {}", token);
1353 return;
1354 }
1355 };
1356
1357 match params.value {
1358 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1359 lsp::WorkDoneProgress::Begin(_) => {
1360 language_server_events_tx
1361 .try_send(LanguageServerEvent::WorkStart { token })
1362 .ok();
1363 }
1364 lsp::WorkDoneProgress::Report(report) => {
1365 language_server_events_tx
1366 .try_send(LanguageServerEvent::WorkProgress {
1367 token,
1368 progress: LanguageServerProgress {
1369 message: report.message,
1370 percentage: report
1371 .percentage
1372 .map(|p| p as usize),
1373 last_update_at: Instant::now(),
1374 },
1375 })
1376 .ok();
1377 }
1378 lsp::WorkDoneProgress::End(_) => {
1379 language_server_events_tx
1380 .try_send(LanguageServerEvent::WorkEnd { token })
1381 .ok();
1382 }
1383 },
1384 }
1385 })
1386 .detach();
1387
1388 // Process all the LSP events.
1389 cx.spawn(|mut cx| {
1390 let this = this.downgrade();
1391 async move {
1392 while let Ok(event) = language_server_events_rx.recv().await {
1393 let this = this.upgrade(&cx)?;
1394 this.update(&mut cx, |this, cx| {
1395 this.on_lsp_event(server_id, event, &language, cx)
1396 });
1397
1398 // Don't starve the main thread when lots of events arrive all at once.
1399 smol::future::yield_now().await;
1400 }
1401 Some(())
1402 }
1403 })
1404 .detach();
1405
1406 let language_server = language_server.initialize().await.log_err()?;
1407 this.update(&mut cx, |this, cx| {
1408 this.language_servers
1409 .insert(key.clone(), language_server.clone());
1410 this.language_server_statuses.insert(
1411 server_id,
1412 LanguageServerStatus {
1413 name: language_server.name().to_string(),
1414 pending_work: Default::default(),
1415 pending_diagnostic_updates: 0,
1416 },
1417 );
1418 language_server
1419 .notify::<lsp::notification::DidChangeConfiguration>(
1420 lsp::DidChangeConfigurationParams {
1421 settings: this.language_server_settings.lock().clone(),
1422 },
1423 )
1424 .ok();
1425
1426 if let Some(project_id) = this.remote_id() {
1427 this.client
1428 .send(proto::StartLanguageServer {
1429 project_id,
1430 server: Some(proto::LanguageServer {
1431 id: server_id as u64,
1432 name: language_server.name().to_string(),
1433 }),
1434 })
1435 .log_err();
1436 }
1437
1438 // Tell the language server about every open buffer in the worktree that matches the language.
1439 for buffer in this.opened_buffers.values() {
1440 if let Some(buffer_handle) = buffer.upgrade(cx) {
1441 let buffer = buffer_handle.read(cx);
1442 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1443 file
1444 } else {
1445 continue;
1446 };
1447 let language = if let Some(language) = buffer.language() {
1448 language
1449 } else {
1450 continue;
1451 };
1452 if (file.worktree.read(cx).id(), language.name()) != key {
1453 continue;
1454 }
1455
1456 let file = file.as_local()?;
1457 let versions = this
1458 .buffer_snapshots
1459 .entry(buffer.remote_id())
1460 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1461 let (version, initial_snapshot) = versions.last().unwrap();
1462 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1463 language_server
1464 .notify::<lsp::notification::DidOpenTextDocument>(
1465 lsp::DidOpenTextDocumentParams {
1466 text_document: lsp::TextDocumentItem::new(
1467 uri,
1468 Default::default(),
1469 *version,
1470 initial_snapshot.text(),
1471 ),
1472 },
1473 )
1474 .log_err()?;
1475 buffer_handle.update(cx, |buffer, cx| {
1476 buffer.set_completion_triggers(
1477 language_server
1478 .capabilities()
1479 .completion_provider
1480 .as_ref()
1481 .and_then(|provider| {
1482 provider.trigger_characters.clone()
1483 })
1484 .unwrap_or(Vec::new()),
1485 cx,
1486 )
1487 });
1488 }
1489 }
1490
1491 cx.notify();
1492 Some(())
1493 });
1494
1495 Some(language_server)
1496 })
1497 });
1498 }
1499
1500 fn on_lsp_event(
1501 &mut self,
1502 language_server_id: usize,
1503 event: LanguageServerEvent,
1504 language: &Arc<Language>,
1505 cx: &mut ModelContext<Self>,
1506 ) {
1507 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1508 let language_server_status =
1509 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1510 status
1511 } else {
1512 return;
1513 };
1514
1515 match event {
1516 LanguageServerEvent::WorkStart { token } => {
1517 if Some(&token) == disk_diagnostics_token {
1518 language_server_status.pending_diagnostic_updates += 1;
1519 if language_server_status.pending_diagnostic_updates == 1 {
1520 self.disk_based_diagnostics_started(cx);
1521 self.broadcast_language_server_update(
1522 language_server_id,
1523 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1524 proto::LspDiskBasedDiagnosticsUpdating {},
1525 ),
1526 );
1527 }
1528 } else {
1529 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1530 self.broadcast_language_server_update(
1531 language_server_id,
1532 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1533 token,
1534 }),
1535 );
1536 }
1537 }
1538 LanguageServerEvent::WorkProgress { token, progress } => {
1539 if Some(&token) != disk_diagnostics_token {
1540 self.on_lsp_work_progress(
1541 language_server_id,
1542 token.clone(),
1543 progress.clone(),
1544 cx,
1545 );
1546 self.broadcast_language_server_update(
1547 language_server_id,
1548 proto::update_language_server::Variant::WorkProgress(
1549 proto::LspWorkProgress {
1550 token,
1551 message: progress.message,
1552 percentage: progress.percentage.map(|p| p as u32),
1553 },
1554 ),
1555 );
1556 }
1557 }
1558 LanguageServerEvent::WorkEnd { token } => {
1559 if Some(&token) == disk_diagnostics_token {
1560 language_server_status.pending_diagnostic_updates -= 1;
1561 if language_server_status.pending_diagnostic_updates == 0 {
1562 self.disk_based_diagnostics_finished(cx);
1563 self.broadcast_language_server_update(
1564 language_server_id,
1565 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1566 proto::LspDiskBasedDiagnosticsUpdated {},
1567 ),
1568 );
1569 }
1570 } else {
1571 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1572 self.broadcast_language_server_update(
1573 language_server_id,
1574 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1575 token,
1576 }),
1577 );
1578 }
1579 }
1580 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1581 language.process_diagnostics(&mut params);
1582
1583 if disk_diagnostics_token.is_none() {
1584 self.disk_based_diagnostics_started(cx);
1585 self.broadcast_language_server_update(
1586 language_server_id,
1587 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1588 proto::LspDiskBasedDiagnosticsUpdating {},
1589 ),
1590 );
1591 }
1592 self.update_diagnostics(
1593 params,
1594 language
1595 .disk_based_diagnostic_sources()
1596 .unwrap_or(&Default::default()),
1597 cx,
1598 )
1599 .log_err();
1600 if disk_diagnostics_token.is_none() {
1601 self.disk_based_diagnostics_finished(cx);
1602 self.broadcast_language_server_update(
1603 language_server_id,
1604 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1605 proto::LspDiskBasedDiagnosticsUpdated {},
1606 ),
1607 );
1608 }
1609 }
1610 }
1611 }
1612
1613 fn on_lsp_work_start(
1614 &mut self,
1615 language_server_id: usize,
1616 token: String,
1617 cx: &mut ModelContext<Self>,
1618 ) {
1619 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1620 status.pending_work.insert(
1621 token,
1622 LanguageServerProgress {
1623 message: None,
1624 percentage: None,
1625 last_update_at: Instant::now(),
1626 },
1627 );
1628 cx.notify();
1629 }
1630 }
1631
1632 fn on_lsp_work_progress(
1633 &mut self,
1634 language_server_id: usize,
1635 token: String,
1636 progress: LanguageServerProgress,
1637 cx: &mut ModelContext<Self>,
1638 ) {
1639 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1640 status.pending_work.insert(token, progress);
1641 cx.notify();
1642 }
1643 }
1644
1645 fn on_lsp_work_end(
1646 &mut self,
1647 language_server_id: usize,
1648 token: String,
1649 cx: &mut ModelContext<Self>,
1650 ) {
1651 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1652 status.pending_work.remove(&token);
1653 cx.notify();
1654 }
1655 }
1656
1657 fn broadcast_language_server_update(
1658 &self,
1659 language_server_id: usize,
1660 event: proto::update_language_server::Variant,
1661 ) {
1662 if let Some(project_id) = self.remote_id() {
1663 self.client
1664 .send(proto::UpdateLanguageServer {
1665 project_id,
1666 language_server_id: language_server_id as u64,
1667 variant: Some(event),
1668 })
1669 .log_err();
1670 }
1671 }
1672
1673 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1674 for server in self.language_servers.values() {
1675 server
1676 .notify::<lsp::notification::DidChangeConfiguration>(
1677 lsp::DidChangeConfigurationParams {
1678 settings: settings.clone(),
1679 },
1680 )
1681 .ok();
1682 }
1683 *self.language_server_settings.lock() = settings;
1684 }
1685
1686 pub fn language_server_statuses(
1687 &self,
1688 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1689 self.language_server_statuses.values()
1690 }
1691
1692 pub fn update_diagnostics(
1693 &mut self,
1694 params: lsp::PublishDiagnosticsParams,
1695 disk_based_sources: &HashSet<String>,
1696 cx: &mut ModelContext<Self>,
1697 ) -> Result<()> {
1698 let abs_path = params
1699 .uri
1700 .to_file_path()
1701 .map_err(|_| anyhow!("URI is not a file"))?;
1702 let mut next_group_id = 0;
1703 let mut diagnostics = Vec::default();
1704 let mut primary_diagnostic_group_ids = HashMap::default();
1705 let mut sources_by_group_id = HashMap::default();
1706 let mut supporting_diagnostics = HashMap::default();
1707 for diagnostic in ¶ms.diagnostics {
1708 let source = diagnostic.source.as_ref();
1709 let code = diagnostic.code.as_ref().map(|code| match code {
1710 lsp::NumberOrString::Number(code) => code.to_string(),
1711 lsp::NumberOrString::String(code) => code.clone(),
1712 });
1713 let range = range_from_lsp(diagnostic.range);
1714 let is_supporting = diagnostic
1715 .related_information
1716 .as_ref()
1717 .map_or(false, |infos| {
1718 infos.iter().any(|info| {
1719 primary_diagnostic_group_ids.contains_key(&(
1720 source,
1721 code.clone(),
1722 range_from_lsp(info.location.range),
1723 ))
1724 })
1725 });
1726
1727 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1728 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1729 });
1730
1731 if is_supporting {
1732 supporting_diagnostics.insert(
1733 (source, code.clone(), range),
1734 (diagnostic.severity, is_unnecessary),
1735 );
1736 } else {
1737 let group_id = post_inc(&mut next_group_id);
1738 let is_disk_based =
1739 source.map_or(false, |source| disk_based_sources.contains(source));
1740
1741 sources_by_group_id.insert(group_id, source);
1742 primary_diagnostic_group_ids
1743 .insert((source, code.clone(), range.clone()), group_id);
1744
1745 diagnostics.push(DiagnosticEntry {
1746 range,
1747 diagnostic: Diagnostic {
1748 code: code.clone(),
1749 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1750 message: diagnostic.message.clone(),
1751 group_id,
1752 is_primary: true,
1753 is_valid: true,
1754 is_disk_based,
1755 is_unnecessary,
1756 },
1757 });
1758 if let Some(infos) = &diagnostic.related_information {
1759 for info in infos {
1760 if info.location.uri == params.uri && !info.message.is_empty() {
1761 let range = range_from_lsp(info.location.range);
1762 diagnostics.push(DiagnosticEntry {
1763 range,
1764 diagnostic: Diagnostic {
1765 code: code.clone(),
1766 severity: DiagnosticSeverity::INFORMATION,
1767 message: info.message.clone(),
1768 group_id,
1769 is_primary: false,
1770 is_valid: true,
1771 is_disk_based,
1772 is_unnecessary: false,
1773 },
1774 });
1775 }
1776 }
1777 }
1778 }
1779 }
1780
1781 for entry in &mut diagnostics {
1782 let diagnostic = &mut entry.diagnostic;
1783 if !diagnostic.is_primary {
1784 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1785 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1786 source,
1787 diagnostic.code.clone(),
1788 entry.range.clone(),
1789 )) {
1790 if let Some(severity) = severity {
1791 diagnostic.severity = severity;
1792 }
1793 diagnostic.is_unnecessary = is_unnecessary;
1794 }
1795 }
1796 }
1797
1798 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1799 Ok(())
1800 }
1801
1802 pub fn update_diagnostic_entries(
1803 &mut self,
1804 abs_path: PathBuf,
1805 version: Option<i32>,
1806 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1807 cx: &mut ModelContext<Project>,
1808 ) -> Result<(), anyhow::Error> {
1809 let (worktree, relative_path) = self
1810 .find_local_worktree(&abs_path, cx)
1811 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1812 if !worktree.read(cx).is_visible() {
1813 return Ok(());
1814 }
1815
1816 let project_path = ProjectPath {
1817 worktree_id: worktree.read(cx).id(),
1818 path: relative_path.into(),
1819 };
1820
1821 for buffer in self.opened_buffers.values() {
1822 if let Some(buffer) = buffer.upgrade(cx) {
1823 if buffer
1824 .read(cx)
1825 .file()
1826 .map_or(false, |file| *file.path() == project_path.path)
1827 {
1828 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1829 break;
1830 }
1831 }
1832 }
1833 worktree.update(cx, |worktree, cx| {
1834 worktree
1835 .as_local_mut()
1836 .ok_or_else(|| anyhow!("not a local worktree"))?
1837 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1838 })?;
1839 cx.emit(Event::DiagnosticsUpdated(project_path));
1840 Ok(())
1841 }
1842
1843 fn update_buffer_diagnostics(
1844 &mut self,
1845 buffer: &ModelHandle<Buffer>,
1846 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1847 version: Option<i32>,
1848 cx: &mut ModelContext<Self>,
1849 ) -> Result<()> {
1850 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1851 Ordering::Equal
1852 .then_with(|| b.is_primary.cmp(&a.is_primary))
1853 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1854 .then_with(|| a.severity.cmp(&b.severity))
1855 .then_with(|| a.message.cmp(&b.message))
1856 }
1857
1858 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1859
1860 diagnostics.sort_unstable_by(|a, b| {
1861 Ordering::Equal
1862 .then_with(|| a.range.start.cmp(&b.range.start))
1863 .then_with(|| b.range.end.cmp(&a.range.end))
1864 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1865 });
1866
1867 let mut sanitized_diagnostics = Vec::new();
1868 let mut edits_since_save = snapshot
1869 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1870 .peekable();
1871 let mut last_edit_old_end = PointUtf16::zero();
1872 let mut last_edit_new_end = PointUtf16::zero();
1873 'outer: for entry in diagnostics {
1874 let mut start = entry.range.start;
1875 let mut end = entry.range.end;
1876
1877 // Some diagnostics are based on files on disk instead of buffers'
1878 // current contents. Adjust these diagnostics' ranges to reflect
1879 // any unsaved edits.
1880 if entry.diagnostic.is_disk_based {
1881 while let Some(edit) = edits_since_save.peek() {
1882 if edit.old.end <= start {
1883 last_edit_old_end = edit.old.end;
1884 last_edit_new_end = edit.new.end;
1885 edits_since_save.next();
1886 } else if edit.old.start <= end && edit.old.end >= start {
1887 continue 'outer;
1888 } else {
1889 break;
1890 }
1891 }
1892
1893 let start_overshoot = start - last_edit_old_end;
1894 start = last_edit_new_end;
1895 start += start_overshoot;
1896
1897 let end_overshoot = end - last_edit_old_end;
1898 end = last_edit_new_end;
1899 end += end_overshoot;
1900 }
1901
1902 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1903 ..snapshot.clip_point_utf16(end, Bias::Right);
1904
1905 // Expand empty ranges by one character
1906 if range.start == range.end {
1907 range.end.column += 1;
1908 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1909 if range.start == range.end && range.end.column > 0 {
1910 range.start.column -= 1;
1911 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1912 }
1913 }
1914
1915 sanitized_diagnostics.push(DiagnosticEntry {
1916 range,
1917 diagnostic: entry.diagnostic,
1918 });
1919 }
1920 drop(edits_since_save);
1921
1922 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1923 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1924 Ok(())
1925 }
1926
1927 pub fn format(
1928 &self,
1929 buffers: HashSet<ModelHandle<Buffer>>,
1930 push_to_history: bool,
1931 cx: &mut ModelContext<Project>,
1932 ) -> Task<Result<ProjectTransaction>> {
1933 let mut local_buffers = Vec::new();
1934 let mut remote_buffers = None;
1935 for buffer_handle in buffers {
1936 let buffer = buffer_handle.read(cx);
1937 if let Some(file) = File::from_dyn(buffer.file()) {
1938 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1939 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1940 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1941 }
1942 } else {
1943 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1944 }
1945 } else {
1946 return Task::ready(Ok(Default::default()));
1947 }
1948 }
1949
1950 let remote_buffers = self.remote_id().zip(remote_buffers);
1951 let client = self.client.clone();
1952
1953 cx.spawn(|this, mut cx| async move {
1954 let mut project_transaction = ProjectTransaction::default();
1955
1956 if let Some((project_id, remote_buffers)) = remote_buffers {
1957 let response = client
1958 .request(proto::FormatBuffers {
1959 project_id,
1960 buffer_ids: remote_buffers
1961 .iter()
1962 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1963 .collect(),
1964 })
1965 .await?
1966 .transaction
1967 .ok_or_else(|| anyhow!("missing transaction"))?;
1968 project_transaction = this
1969 .update(&mut cx, |this, cx| {
1970 this.deserialize_project_transaction(response, push_to_history, cx)
1971 })
1972 .await?;
1973 }
1974
1975 for (buffer, buffer_abs_path, language_server) in local_buffers {
1976 let text_document = lsp::TextDocumentIdentifier::new(
1977 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1978 );
1979 let capabilities = &language_server.capabilities();
1980 let lsp_edits = if capabilities
1981 .document_formatting_provider
1982 .as_ref()
1983 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1984 {
1985 language_server
1986 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1987 text_document,
1988 options: Default::default(),
1989 work_done_progress_params: Default::default(),
1990 })
1991 .await?
1992 } else if capabilities
1993 .document_range_formatting_provider
1994 .as_ref()
1995 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1996 {
1997 let buffer_start = lsp::Position::new(0, 0);
1998 let buffer_end = buffer
1999 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
2000 .to_lsp_position();
2001 language_server
2002 .request::<lsp::request::RangeFormatting>(
2003 lsp::DocumentRangeFormattingParams {
2004 text_document,
2005 range: lsp::Range::new(buffer_start, buffer_end),
2006 options: Default::default(),
2007 work_done_progress_params: Default::default(),
2008 },
2009 )
2010 .await?
2011 } else {
2012 continue;
2013 };
2014
2015 if let Some(lsp_edits) = lsp_edits {
2016 let edits = this
2017 .update(&mut cx, |this, cx| {
2018 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2019 })
2020 .await?;
2021 buffer.update(&mut cx, |buffer, cx| {
2022 buffer.finalize_last_transaction();
2023 buffer.start_transaction();
2024 for (range, text) in edits {
2025 buffer.edit([range], text, cx);
2026 }
2027 if buffer.end_transaction(cx).is_some() {
2028 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2029 if !push_to_history {
2030 buffer.forget_transaction(transaction.id);
2031 }
2032 project_transaction.0.insert(cx.handle(), transaction);
2033 }
2034 });
2035 }
2036 }
2037
2038 Ok(project_transaction)
2039 })
2040 }
2041
2042 pub fn definition<T: ToPointUtf16>(
2043 &self,
2044 buffer: &ModelHandle<Buffer>,
2045 position: T,
2046 cx: &mut ModelContext<Self>,
2047 ) -> Task<Result<Vec<Location>>> {
2048 let position = position.to_point_utf16(buffer.read(cx));
2049 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2050 }
2051
2052 pub fn references<T: ToPointUtf16>(
2053 &self,
2054 buffer: &ModelHandle<Buffer>,
2055 position: T,
2056 cx: &mut ModelContext<Self>,
2057 ) -> Task<Result<Vec<Location>>> {
2058 let position = position.to_point_utf16(buffer.read(cx));
2059 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2060 }
2061
2062 pub fn document_highlights<T: ToPointUtf16>(
2063 &self,
2064 buffer: &ModelHandle<Buffer>,
2065 position: T,
2066 cx: &mut ModelContext<Self>,
2067 ) -> Task<Result<Vec<DocumentHighlight>>> {
2068 let position = position.to_point_utf16(buffer.read(cx));
2069
2070 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2071 }
2072
2073 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2074 if self.is_local() {
2075 let mut language_servers = HashMap::default();
2076 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2077 if let Some((worktree, language)) = self
2078 .worktree_for_id(*worktree_id, cx)
2079 .and_then(|worktree| worktree.read(cx).as_local())
2080 .zip(self.languages.get_language(language_name))
2081 {
2082 language_servers
2083 .entry(Arc::as_ptr(language_server))
2084 .or_insert((
2085 language_server.clone(),
2086 *worktree_id,
2087 worktree.abs_path().clone(),
2088 language.clone(),
2089 ));
2090 }
2091 }
2092
2093 let mut requests = Vec::new();
2094 for (language_server, _, _, _) in language_servers.values() {
2095 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2096 lsp::WorkspaceSymbolParams {
2097 query: query.to_string(),
2098 ..Default::default()
2099 },
2100 ));
2101 }
2102
2103 cx.spawn_weak(|this, cx| async move {
2104 let responses = futures::future::try_join_all(requests).await?;
2105
2106 let mut symbols = Vec::new();
2107 if let Some(this) = this.upgrade(&cx) {
2108 this.read_with(&cx, |this, cx| {
2109 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2110 language_servers.into_values().zip(responses)
2111 {
2112 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2113 |lsp_symbol| {
2114 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2115 let mut worktree_id = source_worktree_id;
2116 let path;
2117 if let Some((worktree, rel_path)) =
2118 this.find_local_worktree(&abs_path, cx)
2119 {
2120 worktree_id = worktree.read(cx).id();
2121 path = rel_path;
2122 } else {
2123 path = relativize_path(&worktree_abs_path, &abs_path);
2124 }
2125
2126 let label = language
2127 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2128 .unwrap_or_else(|| {
2129 CodeLabel::plain(lsp_symbol.name.clone(), None)
2130 });
2131 let signature = this.symbol_signature(worktree_id, &path);
2132
2133 Some(Symbol {
2134 source_worktree_id,
2135 worktree_id,
2136 language_name: language.name().to_string(),
2137 name: lsp_symbol.name,
2138 kind: lsp_symbol.kind,
2139 label,
2140 path,
2141 range: range_from_lsp(lsp_symbol.location.range),
2142 signature,
2143 })
2144 },
2145 ));
2146 }
2147 })
2148 }
2149
2150 Ok(symbols)
2151 })
2152 } else if let Some(project_id) = self.remote_id() {
2153 let request = self.client.request(proto::GetProjectSymbols {
2154 project_id,
2155 query: query.to_string(),
2156 });
2157 cx.spawn_weak(|this, cx| async move {
2158 let response = request.await?;
2159 let mut symbols = Vec::new();
2160 if let Some(this) = this.upgrade(&cx) {
2161 this.read_with(&cx, |this, _| {
2162 symbols.extend(
2163 response
2164 .symbols
2165 .into_iter()
2166 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2167 );
2168 })
2169 }
2170 Ok(symbols)
2171 })
2172 } else {
2173 Task::ready(Ok(Default::default()))
2174 }
2175 }
2176
2177 pub fn open_buffer_for_symbol(
2178 &mut self,
2179 symbol: &Symbol,
2180 cx: &mut ModelContext<Self>,
2181 ) -> Task<Result<ModelHandle<Buffer>>> {
2182 if self.is_local() {
2183 let language_server = if let Some(server) = self.language_servers.get(&(
2184 symbol.source_worktree_id,
2185 Arc::from(symbol.language_name.as_str()),
2186 )) {
2187 server.clone()
2188 } else {
2189 return Task::ready(Err(anyhow!(
2190 "language server for worktree and language not found"
2191 )));
2192 };
2193
2194 let worktree_abs_path = if let Some(worktree_abs_path) = self
2195 .worktree_for_id(symbol.worktree_id, cx)
2196 .and_then(|worktree| worktree.read(cx).as_local())
2197 .map(|local_worktree| local_worktree.abs_path())
2198 {
2199 worktree_abs_path
2200 } else {
2201 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2202 };
2203 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2204 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2205 uri
2206 } else {
2207 return Task::ready(Err(anyhow!("invalid symbol path")));
2208 };
2209
2210 self.open_local_buffer_via_lsp(
2211 symbol_uri,
2212 Arc::from(symbol.language_name.as_str()),
2213 language_server,
2214 cx,
2215 )
2216 } else if let Some(project_id) = self.remote_id() {
2217 let request = self.client.request(proto::OpenBufferForSymbol {
2218 project_id,
2219 symbol: Some(serialize_symbol(symbol)),
2220 });
2221 cx.spawn(|this, mut cx| async move {
2222 let response = request.await?;
2223 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2224 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2225 .await
2226 })
2227 } else {
2228 Task::ready(Err(anyhow!("project does not have a remote id")))
2229 }
2230 }
2231
2232 pub fn completions<T: ToPointUtf16>(
2233 &self,
2234 source_buffer_handle: &ModelHandle<Buffer>,
2235 position: T,
2236 cx: &mut ModelContext<Self>,
2237 ) -> Task<Result<Vec<Completion>>> {
2238 let source_buffer_handle = source_buffer_handle.clone();
2239 let source_buffer = source_buffer_handle.read(cx);
2240 let buffer_id = source_buffer.remote_id();
2241 let language = source_buffer.language().cloned();
2242 let worktree;
2243 let buffer_abs_path;
2244 if let Some(file) = File::from_dyn(source_buffer.file()) {
2245 worktree = file.worktree.clone();
2246 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2247 } else {
2248 return Task::ready(Ok(Default::default()));
2249 };
2250
2251 let position = position.to_point_utf16(source_buffer);
2252 let anchor = source_buffer.anchor_after(position);
2253
2254 if worktree.read(cx).as_local().is_some() {
2255 let buffer_abs_path = buffer_abs_path.unwrap();
2256 let lang_server =
2257 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2258 server.clone()
2259 } else {
2260 return Task::ready(Ok(Default::default()));
2261 };
2262
2263 cx.spawn(|_, cx| async move {
2264 let completions = lang_server
2265 .request::<lsp::request::Completion>(lsp::CompletionParams {
2266 text_document_position: lsp::TextDocumentPositionParams::new(
2267 lsp::TextDocumentIdentifier::new(
2268 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2269 ),
2270 position.to_lsp_position(),
2271 ),
2272 context: Default::default(),
2273 work_done_progress_params: Default::default(),
2274 partial_result_params: Default::default(),
2275 })
2276 .await
2277 .context("lsp completion request failed")?;
2278
2279 let completions = if let Some(completions) = completions {
2280 match completions {
2281 lsp::CompletionResponse::Array(completions) => completions,
2282 lsp::CompletionResponse::List(list) => list.items,
2283 }
2284 } else {
2285 Default::default()
2286 };
2287
2288 source_buffer_handle.read_with(&cx, |this, _| {
2289 Ok(completions
2290 .into_iter()
2291 .filter_map(|lsp_completion| {
2292 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2293 lsp::CompletionTextEdit::Edit(edit) => {
2294 (range_from_lsp(edit.range), edit.new_text.clone())
2295 }
2296 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2297 log::info!("unsupported insert/replace completion");
2298 return None;
2299 }
2300 };
2301
2302 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2303 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2304 if clipped_start == old_range.start && clipped_end == old_range.end {
2305 Some(Completion {
2306 old_range: this.anchor_before(old_range.start)
2307 ..this.anchor_after(old_range.end),
2308 new_text,
2309 label: language
2310 .as_ref()
2311 .and_then(|l| l.label_for_completion(&lsp_completion))
2312 .unwrap_or_else(|| {
2313 CodeLabel::plain(
2314 lsp_completion.label.clone(),
2315 lsp_completion.filter_text.as_deref(),
2316 )
2317 }),
2318 lsp_completion,
2319 })
2320 } else {
2321 None
2322 }
2323 })
2324 .collect())
2325 })
2326 })
2327 } else if let Some(project_id) = self.remote_id() {
2328 let rpc = self.client.clone();
2329 let message = proto::GetCompletions {
2330 project_id,
2331 buffer_id,
2332 position: Some(language::proto::serialize_anchor(&anchor)),
2333 version: serialize_version(&source_buffer.version()),
2334 };
2335 cx.spawn_weak(|_, mut cx| async move {
2336 let response = rpc.request(message).await?;
2337
2338 source_buffer_handle
2339 .update(&mut cx, |buffer, _| {
2340 buffer.wait_for_version(deserialize_version(response.version))
2341 })
2342 .await;
2343
2344 response
2345 .completions
2346 .into_iter()
2347 .map(|completion| {
2348 language::proto::deserialize_completion(completion, language.as_ref())
2349 })
2350 .collect()
2351 })
2352 } else {
2353 Task::ready(Ok(Default::default()))
2354 }
2355 }
2356
2357 pub fn apply_additional_edits_for_completion(
2358 &self,
2359 buffer_handle: ModelHandle<Buffer>,
2360 completion: Completion,
2361 push_to_history: bool,
2362 cx: &mut ModelContext<Self>,
2363 ) -> Task<Result<Option<Transaction>>> {
2364 let buffer = buffer_handle.read(cx);
2365 let buffer_id = buffer.remote_id();
2366
2367 if self.is_local() {
2368 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2369 server.clone()
2370 } else {
2371 return Task::ready(Ok(Default::default()));
2372 };
2373
2374 cx.spawn(|this, mut cx| async move {
2375 let resolved_completion = lang_server
2376 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2377 .await?;
2378 if let Some(edits) = resolved_completion.additional_text_edits {
2379 let edits = this
2380 .update(&mut cx, |this, cx| {
2381 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2382 })
2383 .await?;
2384 buffer_handle.update(&mut cx, |buffer, cx| {
2385 buffer.finalize_last_transaction();
2386 buffer.start_transaction();
2387 for (range, text) in edits {
2388 buffer.edit([range], text, cx);
2389 }
2390 let transaction = if buffer.end_transaction(cx).is_some() {
2391 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2392 if !push_to_history {
2393 buffer.forget_transaction(transaction.id);
2394 }
2395 Some(transaction)
2396 } else {
2397 None
2398 };
2399 Ok(transaction)
2400 })
2401 } else {
2402 Ok(None)
2403 }
2404 })
2405 } else if let Some(project_id) = self.remote_id() {
2406 let client = self.client.clone();
2407 cx.spawn(|_, mut cx| async move {
2408 let response = client
2409 .request(proto::ApplyCompletionAdditionalEdits {
2410 project_id,
2411 buffer_id,
2412 completion: Some(language::proto::serialize_completion(&completion)),
2413 })
2414 .await?;
2415
2416 if let Some(transaction) = response.transaction {
2417 let transaction = language::proto::deserialize_transaction(transaction)?;
2418 buffer_handle
2419 .update(&mut cx, |buffer, _| {
2420 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2421 })
2422 .await;
2423 if push_to_history {
2424 buffer_handle.update(&mut cx, |buffer, _| {
2425 buffer.push_transaction(transaction.clone(), Instant::now());
2426 });
2427 }
2428 Ok(Some(transaction))
2429 } else {
2430 Ok(None)
2431 }
2432 })
2433 } else {
2434 Task::ready(Err(anyhow!("project does not have a remote id")))
2435 }
2436 }
2437
2438 pub fn code_actions<T: ToOffset>(
2439 &self,
2440 buffer_handle: &ModelHandle<Buffer>,
2441 range: Range<T>,
2442 cx: &mut ModelContext<Self>,
2443 ) -> Task<Result<Vec<CodeAction>>> {
2444 let buffer_handle = buffer_handle.clone();
2445 let buffer = buffer_handle.read(cx);
2446 let buffer_id = buffer.remote_id();
2447 let worktree;
2448 let buffer_abs_path;
2449 if let Some(file) = File::from_dyn(buffer.file()) {
2450 worktree = file.worktree.clone();
2451 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2452 } else {
2453 return Task::ready(Ok(Default::default()));
2454 };
2455 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2456
2457 if worktree.read(cx).as_local().is_some() {
2458 let buffer_abs_path = buffer_abs_path.unwrap();
2459 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2460 server.clone()
2461 } else {
2462 return Task::ready(Ok(Default::default()));
2463 };
2464
2465 let lsp_range = lsp::Range::new(
2466 range.start.to_point_utf16(buffer).to_lsp_position(),
2467 range.end.to_point_utf16(buffer).to_lsp_position(),
2468 );
2469 cx.foreground().spawn(async move {
2470 if !lang_server.capabilities().code_action_provider.is_some() {
2471 return Ok(Default::default());
2472 }
2473
2474 Ok(lang_server
2475 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2476 text_document: lsp::TextDocumentIdentifier::new(
2477 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2478 ),
2479 range: lsp_range,
2480 work_done_progress_params: Default::default(),
2481 partial_result_params: Default::default(),
2482 context: lsp::CodeActionContext {
2483 diagnostics: Default::default(),
2484 only: Some(vec![
2485 lsp::CodeActionKind::QUICKFIX,
2486 lsp::CodeActionKind::REFACTOR,
2487 lsp::CodeActionKind::REFACTOR_EXTRACT,
2488 ]),
2489 },
2490 })
2491 .await?
2492 .unwrap_or_default()
2493 .into_iter()
2494 .filter_map(|entry| {
2495 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2496 Some(CodeAction {
2497 range: range.clone(),
2498 lsp_action,
2499 })
2500 } else {
2501 None
2502 }
2503 })
2504 .collect())
2505 })
2506 } else if let Some(project_id) = self.remote_id() {
2507 let rpc = self.client.clone();
2508 let version = buffer.version();
2509 cx.spawn_weak(|_, mut cx| async move {
2510 let response = rpc
2511 .request(proto::GetCodeActions {
2512 project_id,
2513 buffer_id,
2514 start: Some(language::proto::serialize_anchor(&range.start)),
2515 end: Some(language::proto::serialize_anchor(&range.end)),
2516 version: serialize_version(&version),
2517 })
2518 .await?;
2519
2520 buffer_handle
2521 .update(&mut cx, |buffer, _| {
2522 buffer.wait_for_version(deserialize_version(response.version))
2523 })
2524 .await;
2525
2526 response
2527 .actions
2528 .into_iter()
2529 .map(language::proto::deserialize_code_action)
2530 .collect()
2531 })
2532 } else {
2533 Task::ready(Ok(Default::default()))
2534 }
2535 }
2536
2537 pub fn apply_code_action(
2538 &self,
2539 buffer_handle: ModelHandle<Buffer>,
2540 mut action: CodeAction,
2541 push_to_history: bool,
2542 cx: &mut ModelContext<Self>,
2543 ) -> Task<Result<ProjectTransaction>> {
2544 if self.is_local() {
2545 let buffer = buffer_handle.read(cx);
2546 let lang_name = if let Some(lang) = buffer.language() {
2547 lang.name()
2548 } else {
2549 return Task::ready(Ok(Default::default()));
2550 };
2551 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2552 server.clone()
2553 } else {
2554 return Task::ready(Ok(Default::default()));
2555 };
2556 let range = action.range.to_point_utf16(buffer);
2557
2558 cx.spawn(|this, mut cx| async move {
2559 if let Some(lsp_range) = action
2560 .lsp_action
2561 .data
2562 .as_mut()
2563 .and_then(|d| d.get_mut("codeActionParams"))
2564 .and_then(|d| d.get_mut("range"))
2565 {
2566 *lsp_range = serde_json::to_value(&lsp::Range::new(
2567 range.start.to_lsp_position(),
2568 range.end.to_lsp_position(),
2569 ))
2570 .unwrap();
2571 action.lsp_action = lang_server
2572 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2573 .await?;
2574 } else {
2575 let actions = this
2576 .update(&mut cx, |this, cx| {
2577 this.code_actions(&buffer_handle, action.range, cx)
2578 })
2579 .await?;
2580 action.lsp_action = actions
2581 .into_iter()
2582 .find(|a| a.lsp_action.title == action.lsp_action.title)
2583 .ok_or_else(|| anyhow!("code action is outdated"))?
2584 .lsp_action;
2585 }
2586
2587 if let Some(edit) = action.lsp_action.edit {
2588 Self::deserialize_workspace_edit(
2589 this,
2590 edit,
2591 push_to_history,
2592 lang_name,
2593 lang_server,
2594 &mut cx,
2595 )
2596 .await
2597 } else {
2598 Ok(ProjectTransaction::default())
2599 }
2600 })
2601 } else if let Some(project_id) = self.remote_id() {
2602 let client = self.client.clone();
2603 let request = proto::ApplyCodeAction {
2604 project_id,
2605 buffer_id: buffer_handle.read(cx).remote_id(),
2606 action: Some(language::proto::serialize_code_action(&action)),
2607 };
2608 cx.spawn(|this, mut cx| async move {
2609 let response = client
2610 .request(request)
2611 .await?
2612 .transaction
2613 .ok_or_else(|| anyhow!("missing transaction"))?;
2614 this.update(&mut cx, |this, cx| {
2615 this.deserialize_project_transaction(response, push_to_history, cx)
2616 })
2617 .await
2618 })
2619 } else {
2620 Task::ready(Err(anyhow!("project does not have a remote id")))
2621 }
2622 }
2623
2624 async fn deserialize_workspace_edit(
2625 this: ModelHandle<Self>,
2626 edit: lsp::WorkspaceEdit,
2627 push_to_history: bool,
2628 language_name: Arc<str>,
2629 language_server: Arc<LanguageServer>,
2630 cx: &mut AsyncAppContext,
2631 ) -> Result<ProjectTransaction> {
2632 let fs = this.read_with(cx, |this, _| this.fs.clone());
2633 let mut operations = Vec::new();
2634 if let Some(document_changes) = edit.document_changes {
2635 match document_changes {
2636 lsp::DocumentChanges::Edits(edits) => {
2637 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2638 }
2639 lsp::DocumentChanges::Operations(ops) => operations = ops,
2640 }
2641 } else if let Some(changes) = edit.changes {
2642 operations.extend(changes.into_iter().map(|(uri, edits)| {
2643 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2644 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2645 uri,
2646 version: None,
2647 },
2648 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2649 })
2650 }));
2651 }
2652
2653 let mut project_transaction = ProjectTransaction::default();
2654 for operation in operations {
2655 match operation {
2656 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2657 let abs_path = op
2658 .uri
2659 .to_file_path()
2660 .map_err(|_| anyhow!("can't convert URI to path"))?;
2661
2662 if let Some(parent_path) = abs_path.parent() {
2663 fs.create_dir(parent_path).await?;
2664 }
2665 if abs_path.ends_with("/") {
2666 fs.create_dir(&abs_path).await?;
2667 } else {
2668 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2669 .await?;
2670 }
2671 }
2672 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2673 let source_abs_path = op
2674 .old_uri
2675 .to_file_path()
2676 .map_err(|_| anyhow!("can't convert URI to path"))?;
2677 let target_abs_path = op
2678 .new_uri
2679 .to_file_path()
2680 .map_err(|_| anyhow!("can't convert URI to path"))?;
2681 fs.rename(
2682 &source_abs_path,
2683 &target_abs_path,
2684 op.options.map(Into::into).unwrap_or_default(),
2685 )
2686 .await?;
2687 }
2688 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2689 let abs_path = op
2690 .uri
2691 .to_file_path()
2692 .map_err(|_| anyhow!("can't convert URI to path"))?;
2693 let options = op.options.map(Into::into).unwrap_or_default();
2694 if abs_path.ends_with("/") {
2695 fs.remove_dir(&abs_path, options).await?;
2696 } else {
2697 fs.remove_file(&abs_path, options).await?;
2698 }
2699 }
2700 lsp::DocumentChangeOperation::Edit(op) => {
2701 let buffer_to_edit = this
2702 .update(cx, |this, cx| {
2703 this.open_local_buffer_via_lsp(
2704 op.text_document.uri,
2705 language_name.clone(),
2706 language_server.clone(),
2707 cx,
2708 )
2709 })
2710 .await?;
2711
2712 let edits = this
2713 .update(cx, |this, cx| {
2714 let edits = op.edits.into_iter().map(|edit| match edit {
2715 lsp::OneOf::Left(edit) => edit,
2716 lsp::OneOf::Right(edit) => edit.text_edit,
2717 });
2718 this.edits_from_lsp(
2719 &buffer_to_edit,
2720 edits,
2721 op.text_document.version,
2722 cx,
2723 )
2724 })
2725 .await?;
2726
2727 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2728 buffer.finalize_last_transaction();
2729 buffer.start_transaction();
2730 for (range, text) in edits {
2731 buffer.edit([range], text, cx);
2732 }
2733 let transaction = if buffer.end_transaction(cx).is_some() {
2734 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2735 if !push_to_history {
2736 buffer.forget_transaction(transaction.id);
2737 }
2738 Some(transaction)
2739 } else {
2740 None
2741 };
2742
2743 transaction
2744 });
2745 if let Some(transaction) = transaction {
2746 project_transaction.0.insert(buffer_to_edit, transaction);
2747 }
2748 }
2749 }
2750 }
2751
2752 Ok(project_transaction)
2753 }
2754
2755 pub fn prepare_rename<T: ToPointUtf16>(
2756 &self,
2757 buffer: ModelHandle<Buffer>,
2758 position: T,
2759 cx: &mut ModelContext<Self>,
2760 ) -> Task<Result<Option<Range<Anchor>>>> {
2761 let position = position.to_point_utf16(buffer.read(cx));
2762 self.request_lsp(buffer, PrepareRename { position }, cx)
2763 }
2764
2765 pub fn perform_rename<T: ToPointUtf16>(
2766 &self,
2767 buffer: ModelHandle<Buffer>,
2768 position: T,
2769 new_name: String,
2770 push_to_history: bool,
2771 cx: &mut ModelContext<Self>,
2772 ) -> Task<Result<ProjectTransaction>> {
2773 let position = position.to_point_utf16(buffer.read(cx));
2774 self.request_lsp(
2775 buffer,
2776 PerformRename {
2777 position,
2778 new_name,
2779 push_to_history,
2780 },
2781 cx,
2782 )
2783 }
2784
2785 pub fn search(
2786 &self,
2787 query: SearchQuery,
2788 cx: &mut ModelContext<Self>,
2789 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2790 if self.is_local() {
2791 let snapshots = self
2792 .visible_worktrees(cx)
2793 .filter_map(|tree| {
2794 let tree = tree.read(cx).as_local()?;
2795 Some(tree.snapshot())
2796 })
2797 .collect::<Vec<_>>();
2798
2799 let background = cx.background().clone();
2800 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2801 if path_count == 0 {
2802 return Task::ready(Ok(Default::default()));
2803 }
2804 let workers = background.num_cpus().min(path_count);
2805 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2806 cx.background()
2807 .spawn({
2808 let fs = self.fs.clone();
2809 let background = cx.background().clone();
2810 let query = query.clone();
2811 async move {
2812 let fs = &fs;
2813 let query = &query;
2814 let matching_paths_tx = &matching_paths_tx;
2815 let paths_per_worker = (path_count + workers - 1) / workers;
2816 let snapshots = &snapshots;
2817 background
2818 .scoped(|scope| {
2819 for worker_ix in 0..workers {
2820 let worker_start_ix = worker_ix * paths_per_worker;
2821 let worker_end_ix = worker_start_ix + paths_per_worker;
2822 scope.spawn(async move {
2823 let mut snapshot_start_ix = 0;
2824 let mut abs_path = PathBuf::new();
2825 for snapshot in snapshots {
2826 let snapshot_end_ix =
2827 snapshot_start_ix + snapshot.visible_file_count();
2828 if worker_end_ix <= snapshot_start_ix {
2829 break;
2830 } else if worker_start_ix > snapshot_end_ix {
2831 snapshot_start_ix = snapshot_end_ix;
2832 continue;
2833 } else {
2834 let start_in_snapshot = worker_start_ix
2835 .saturating_sub(snapshot_start_ix);
2836 let end_in_snapshot =
2837 cmp::min(worker_end_ix, snapshot_end_ix)
2838 - snapshot_start_ix;
2839
2840 for entry in snapshot
2841 .files(false, start_in_snapshot)
2842 .take(end_in_snapshot - start_in_snapshot)
2843 {
2844 if matching_paths_tx.is_closed() {
2845 break;
2846 }
2847
2848 abs_path.clear();
2849 abs_path.push(&snapshot.abs_path());
2850 abs_path.push(&entry.path);
2851 let matches = if let Some(file) =
2852 fs.open_sync(&abs_path).await.log_err()
2853 {
2854 query.detect(file).unwrap_or(false)
2855 } else {
2856 false
2857 };
2858
2859 if matches {
2860 let project_path =
2861 (snapshot.id(), entry.path.clone());
2862 if matching_paths_tx
2863 .send(project_path)
2864 .await
2865 .is_err()
2866 {
2867 break;
2868 }
2869 }
2870 }
2871
2872 snapshot_start_ix = snapshot_end_ix;
2873 }
2874 }
2875 });
2876 }
2877 })
2878 .await;
2879 }
2880 })
2881 .detach();
2882
2883 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2884 let open_buffers = self
2885 .opened_buffers
2886 .values()
2887 .filter_map(|b| b.upgrade(cx))
2888 .collect::<HashSet<_>>();
2889 cx.spawn(|this, cx| async move {
2890 for buffer in &open_buffers {
2891 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2892 buffers_tx.send((buffer.clone(), snapshot)).await?;
2893 }
2894
2895 let open_buffers = Rc::new(RefCell::new(open_buffers));
2896 while let Some(project_path) = matching_paths_rx.next().await {
2897 if buffers_tx.is_closed() {
2898 break;
2899 }
2900
2901 let this = this.clone();
2902 let open_buffers = open_buffers.clone();
2903 let buffers_tx = buffers_tx.clone();
2904 cx.spawn(|mut cx| async move {
2905 if let Some(buffer) = this
2906 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2907 .await
2908 .log_err()
2909 {
2910 if open_buffers.borrow_mut().insert(buffer.clone()) {
2911 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2912 buffers_tx.send((buffer, snapshot)).await?;
2913 }
2914 }
2915
2916 Ok::<_, anyhow::Error>(())
2917 })
2918 .detach();
2919 }
2920
2921 Ok::<_, anyhow::Error>(())
2922 })
2923 .detach_and_log_err(cx);
2924
2925 let background = cx.background().clone();
2926 cx.background().spawn(async move {
2927 let query = &query;
2928 let mut matched_buffers = Vec::new();
2929 for _ in 0..workers {
2930 matched_buffers.push(HashMap::default());
2931 }
2932 background
2933 .scoped(|scope| {
2934 for worker_matched_buffers in matched_buffers.iter_mut() {
2935 let mut buffers_rx = buffers_rx.clone();
2936 scope.spawn(async move {
2937 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2938 let buffer_matches = query
2939 .search(snapshot.as_rope())
2940 .await
2941 .iter()
2942 .map(|range| {
2943 snapshot.anchor_before(range.start)
2944 ..snapshot.anchor_after(range.end)
2945 })
2946 .collect::<Vec<_>>();
2947 if !buffer_matches.is_empty() {
2948 worker_matched_buffers
2949 .insert(buffer.clone(), buffer_matches);
2950 }
2951 }
2952 });
2953 }
2954 })
2955 .await;
2956 Ok(matched_buffers.into_iter().flatten().collect())
2957 })
2958 } else if let Some(project_id) = self.remote_id() {
2959 let request = self.client.request(query.to_proto(project_id));
2960 cx.spawn(|this, mut cx| async move {
2961 let response = request.await?;
2962 let mut result = HashMap::default();
2963 for location in response.locations {
2964 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2965 let target_buffer = this
2966 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2967 .await?;
2968 let start = location
2969 .start
2970 .and_then(deserialize_anchor)
2971 .ok_or_else(|| anyhow!("missing target start"))?;
2972 let end = location
2973 .end
2974 .and_then(deserialize_anchor)
2975 .ok_or_else(|| anyhow!("missing target end"))?;
2976 result
2977 .entry(target_buffer)
2978 .or_insert(Vec::new())
2979 .push(start..end)
2980 }
2981 Ok(result)
2982 })
2983 } else {
2984 Task::ready(Ok(Default::default()))
2985 }
2986 }
2987
2988 fn request_lsp<R: LspCommand>(
2989 &self,
2990 buffer_handle: ModelHandle<Buffer>,
2991 request: R,
2992 cx: &mut ModelContext<Self>,
2993 ) -> Task<Result<R::Response>>
2994 where
2995 <R::LspRequest as lsp::request::Request>::Result: Send,
2996 {
2997 let buffer = buffer_handle.read(cx);
2998 if self.is_local() {
2999 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3000 if let Some((file, language_server)) =
3001 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3002 {
3003 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3004 return cx.spawn(|this, cx| async move {
3005 if !request.check_capabilities(&language_server.capabilities()) {
3006 return Ok(Default::default());
3007 }
3008
3009 let response = language_server
3010 .request::<R::LspRequest>(lsp_params)
3011 .await
3012 .context("lsp request failed")?;
3013 request
3014 .response_from_lsp(response, this, buffer_handle, cx)
3015 .await
3016 });
3017 }
3018 } else if let Some(project_id) = self.remote_id() {
3019 let rpc = self.client.clone();
3020 let message = request.to_proto(project_id, buffer);
3021 return cx.spawn(|this, cx| async move {
3022 let response = rpc.request(message).await?;
3023 request
3024 .response_from_proto(response, this, buffer_handle, cx)
3025 .await
3026 });
3027 }
3028 Task::ready(Ok(Default::default()))
3029 }
3030
3031 pub fn find_or_create_local_worktree(
3032 &mut self,
3033 abs_path: impl AsRef<Path>,
3034 visible: bool,
3035 cx: &mut ModelContext<Self>,
3036 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3037 let abs_path = abs_path.as_ref();
3038 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3039 Task::ready(Ok((tree.clone(), relative_path.into())))
3040 } else {
3041 let worktree = self.create_local_worktree(abs_path, visible, cx);
3042 cx.foreground()
3043 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3044 }
3045 }
3046
3047 pub fn find_local_worktree(
3048 &self,
3049 abs_path: &Path,
3050 cx: &AppContext,
3051 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3052 for tree in self.worktrees(cx) {
3053 if let Some(relative_path) = tree
3054 .read(cx)
3055 .as_local()
3056 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3057 {
3058 return Some((tree.clone(), relative_path.into()));
3059 }
3060 }
3061 None
3062 }
3063
3064 pub fn is_shared(&self) -> bool {
3065 match &self.client_state {
3066 ProjectClientState::Local { is_shared, .. } => *is_shared,
3067 ProjectClientState::Remote { .. } => false,
3068 }
3069 }
3070
3071 fn create_local_worktree(
3072 &mut self,
3073 abs_path: impl AsRef<Path>,
3074 visible: bool,
3075 cx: &mut ModelContext<Self>,
3076 ) -> Task<Result<ModelHandle<Worktree>>> {
3077 let fs = self.fs.clone();
3078 let client = self.client.clone();
3079 let next_entry_id = self.next_entry_id.clone();
3080 let path: Arc<Path> = abs_path.as_ref().into();
3081 let task = self
3082 .loading_local_worktrees
3083 .entry(path.clone())
3084 .or_insert_with(|| {
3085 cx.spawn(|project, mut cx| {
3086 async move {
3087 let worktree = Worktree::local(
3088 client.clone(),
3089 path.clone(),
3090 visible,
3091 fs,
3092 next_entry_id,
3093 &mut cx,
3094 )
3095 .await;
3096 project.update(&mut cx, |project, _| {
3097 project.loading_local_worktrees.remove(&path);
3098 });
3099 let worktree = worktree?;
3100
3101 let (remote_project_id, is_shared) =
3102 project.update(&mut cx, |project, cx| {
3103 project.add_worktree(&worktree, cx);
3104 (project.remote_id(), project.is_shared())
3105 });
3106
3107 if let Some(project_id) = remote_project_id {
3108 if is_shared {
3109 worktree
3110 .update(&mut cx, |worktree, cx| {
3111 worktree.as_local_mut().unwrap().share(project_id, cx)
3112 })
3113 .await?;
3114 } else {
3115 worktree
3116 .update(&mut cx, |worktree, cx| {
3117 worktree.as_local_mut().unwrap().register(project_id, cx)
3118 })
3119 .await?;
3120 }
3121 }
3122
3123 Ok(worktree)
3124 }
3125 .map_err(|err| Arc::new(err))
3126 })
3127 .shared()
3128 })
3129 .clone();
3130 cx.foreground().spawn(async move {
3131 match task.await {
3132 Ok(worktree) => Ok(worktree),
3133 Err(err) => Err(anyhow!("{}", err)),
3134 }
3135 })
3136 }
3137
3138 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3139 self.worktrees.retain(|worktree| {
3140 worktree
3141 .upgrade(cx)
3142 .map_or(false, |w| w.read(cx).id() != id)
3143 });
3144 cx.notify();
3145 }
3146
3147 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3148 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3149 if worktree.read(cx).is_local() {
3150 cx.subscribe(&worktree, |this, worktree, _, cx| {
3151 this.update_local_worktree_buffers(worktree, cx);
3152 })
3153 .detach();
3154 }
3155
3156 let push_strong_handle = {
3157 let worktree = worktree.read(cx);
3158 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3159 };
3160 if push_strong_handle {
3161 self.worktrees
3162 .push(WorktreeHandle::Strong(worktree.clone()));
3163 } else {
3164 cx.observe_release(&worktree, |this, _, cx| {
3165 this.worktrees
3166 .retain(|worktree| worktree.upgrade(cx).is_some());
3167 cx.notify();
3168 })
3169 .detach();
3170 self.worktrees
3171 .push(WorktreeHandle::Weak(worktree.downgrade()));
3172 }
3173 cx.notify();
3174 }
3175
3176 fn update_local_worktree_buffers(
3177 &mut self,
3178 worktree_handle: ModelHandle<Worktree>,
3179 cx: &mut ModelContext<Self>,
3180 ) {
3181 let snapshot = worktree_handle.read(cx).snapshot();
3182 let mut buffers_to_delete = Vec::new();
3183 for (buffer_id, buffer) in &self.opened_buffers {
3184 if let Some(buffer) = buffer.upgrade(cx) {
3185 buffer.update(cx, |buffer, cx| {
3186 if let Some(old_file) = File::from_dyn(buffer.file()) {
3187 if old_file.worktree != worktree_handle {
3188 return;
3189 }
3190
3191 let new_file = if let Some(entry) = old_file
3192 .entry_id
3193 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3194 {
3195 File {
3196 is_local: true,
3197 entry_id: Some(entry.id),
3198 mtime: entry.mtime,
3199 path: entry.path.clone(),
3200 worktree: worktree_handle.clone(),
3201 }
3202 } else if let Some(entry) =
3203 snapshot.entry_for_path(old_file.path().as_ref())
3204 {
3205 File {
3206 is_local: true,
3207 entry_id: Some(entry.id),
3208 mtime: entry.mtime,
3209 path: entry.path.clone(),
3210 worktree: worktree_handle.clone(),
3211 }
3212 } else {
3213 File {
3214 is_local: true,
3215 entry_id: None,
3216 path: old_file.path().clone(),
3217 mtime: old_file.mtime(),
3218 worktree: worktree_handle.clone(),
3219 }
3220 };
3221
3222 if let Some(project_id) = self.remote_id() {
3223 self.client
3224 .send(proto::UpdateBufferFile {
3225 project_id,
3226 buffer_id: *buffer_id as u64,
3227 file: Some(new_file.to_proto()),
3228 })
3229 .log_err();
3230 }
3231 buffer.file_updated(Box::new(new_file), cx).detach();
3232 }
3233 });
3234 } else {
3235 buffers_to_delete.push(*buffer_id);
3236 }
3237 }
3238
3239 for buffer_id in buffers_to_delete {
3240 self.opened_buffers.remove(&buffer_id);
3241 }
3242 }
3243
3244 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3245 let new_active_entry = entry.and_then(|project_path| {
3246 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3247 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3248 Some(entry.id)
3249 });
3250 if new_active_entry != self.active_entry {
3251 self.active_entry = new_active_entry;
3252 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3253 }
3254 }
3255
3256 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3257 self.language_servers_with_diagnostics_running > 0
3258 }
3259
3260 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3261 let mut summary = DiagnosticSummary::default();
3262 for (_, path_summary) in self.diagnostic_summaries(cx) {
3263 summary.error_count += path_summary.error_count;
3264 summary.warning_count += path_summary.warning_count;
3265 summary.info_count += path_summary.info_count;
3266 summary.hint_count += path_summary.hint_count;
3267 }
3268 summary
3269 }
3270
3271 pub fn diagnostic_summaries<'a>(
3272 &'a self,
3273 cx: &'a AppContext,
3274 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3275 self.worktrees(cx).flat_map(move |worktree| {
3276 let worktree = worktree.read(cx);
3277 let worktree_id = worktree.id();
3278 worktree
3279 .diagnostic_summaries()
3280 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3281 })
3282 }
3283
3284 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3285 self.language_servers_with_diagnostics_running += 1;
3286 if self.language_servers_with_diagnostics_running == 1 {
3287 cx.emit(Event::DiskBasedDiagnosticsStarted);
3288 }
3289 }
3290
3291 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3292 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3293 self.language_servers_with_diagnostics_running -= 1;
3294 if self.language_servers_with_diagnostics_running == 0 {
3295 cx.emit(Event::DiskBasedDiagnosticsFinished);
3296 }
3297 }
3298
3299 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3300 self.active_entry
3301 }
3302
3303 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3304 self.worktree_for_id(path.worktree_id, cx)?
3305 .read(cx)
3306 .entry_for_path(&path.path)
3307 .map(|entry| entry.id)
3308 }
3309
3310 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3311 let worktree = self.worktree_for_entry(entry_id, cx)?;
3312 let worktree = worktree.read(cx);
3313 let worktree_id = worktree.id();
3314 let path = worktree.entry_for_id(entry_id)?.path.clone();
3315 Some(ProjectPath { worktree_id, path })
3316 }
3317
3318 // RPC message handlers
3319
3320 async fn handle_unshare_project(
3321 this: ModelHandle<Self>,
3322 _: TypedEnvelope<proto::UnshareProject>,
3323 _: Arc<Client>,
3324 mut cx: AsyncAppContext,
3325 ) -> Result<()> {
3326 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3327 Ok(())
3328 }
3329
3330 async fn handle_add_collaborator(
3331 this: ModelHandle<Self>,
3332 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3333 _: Arc<Client>,
3334 mut cx: AsyncAppContext,
3335 ) -> Result<()> {
3336 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3337 let collaborator = envelope
3338 .payload
3339 .collaborator
3340 .take()
3341 .ok_or_else(|| anyhow!("empty collaborator"))?;
3342
3343 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3344 this.update(&mut cx, |this, cx| {
3345 this.collaborators
3346 .insert(collaborator.peer_id, collaborator);
3347 cx.notify();
3348 });
3349
3350 Ok(())
3351 }
3352
3353 async fn handle_remove_collaborator(
3354 this: ModelHandle<Self>,
3355 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3356 _: Arc<Client>,
3357 mut cx: AsyncAppContext,
3358 ) -> Result<()> {
3359 this.update(&mut cx, |this, cx| {
3360 let peer_id = PeerId(envelope.payload.peer_id);
3361 let replica_id = this
3362 .collaborators
3363 .remove(&peer_id)
3364 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3365 .replica_id;
3366 for (_, buffer) in &this.opened_buffers {
3367 if let Some(buffer) = buffer.upgrade(cx) {
3368 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3369 }
3370 }
3371 cx.notify();
3372 Ok(())
3373 })
3374 }
3375
3376 async fn handle_register_worktree(
3377 this: ModelHandle<Self>,
3378 envelope: TypedEnvelope<proto::RegisterWorktree>,
3379 client: Arc<Client>,
3380 mut cx: AsyncAppContext,
3381 ) -> Result<()> {
3382 this.update(&mut cx, |this, cx| {
3383 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3384 let replica_id = this.replica_id();
3385 let worktree = proto::Worktree {
3386 id: envelope.payload.worktree_id,
3387 root_name: envelope.payload.root_name,
3388 entries: Default::default(),
3389 diagnostic_summaries: Default::default(),
3390 visible: envelope.payload.visible,
3391 };
3392 let (worktree, load_task) =
3393 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3394 this.add_worktree(&worktree, cx);
3395 load_task.detach();
3396 Ok(())
3397 })
3398 }
3399
3400 async fn handle_unregister_worktree(
3401 this: ModelHandle<Self>,
3402 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3403 _: Arc<Client>,
3404 mut cx: AsyncAppContext,
3405 ) -> Result<()> {
3406 this.update(&mut cx, |this, cx| {
3407 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3408 this.remove_worktree(worktree_id, cx);
3409 Ok(())
3410 })
3411 }
3412
3413 async fn handle_update_worktree(
3414 this: ModelHandle<Self>,
3415 envelope: TypedEnvelope<proto::UpdateWorktree>,
3416 _: Arc<Client>,
3417 mut cx: AsyncAppContext,
3418 ) -> Result<()> {
3419 this.update(&mut cx, |this, cx| {
3420 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3421 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3422 worktree.update(cx, |worktree, _| {
3423 let worktree = worktree.as_remote_mut().unwrap();
3424 worktree.update_from_remote(envelope)
3425 })?;
3426 }
3427 Ok(())
3428 })
3429 }
3430
3431 async fn handle_update_diagnostic_summary(
3432 this: ModelHandle<Self>,
3433 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3434 _: Arc<Client>,
3435 mut cx: AsyncAppContext,
3436 ) -> Result<()> {
3437 this.update(&mut cx, |this, cx| {
3438 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3439 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3440 if let Some(summary) = envelope.payload.summary {
3441 let project_path = ProjectPath {
3442 worktree_id,
3443 path: Path::new(&summary.path).into(),
3444 };
3445 worktree.update(cx, |worktree, _| {
3446 worktree
3447 .as_remote_mut()
3448 .unwrap()
3449 .update_diagnostic_summary(project_path.path.clone(), &summary);
3450 });
3451 cx.emit(Event::DiagnosticsUpdated(project_path));
3452 }
3453 }
3454 Ok(())
3455 })
3456 }
3457
3458 async fn handle_start_language_server(
3459 this: ModelHandle<Self>,
3460 envelope: TypedEnvelope<proto::StartLanguageServer>,
3461 _: Arc<Client>,
3462 mut cx: AsyncAppContext,
3463 ) -> Result<()> {
3464 let server = envelope
3465 .payload
3466 .server
3467 .ok_or_else(|| anyhow!("invalid server"))?;
3468 this.update(&mut cx, |this, cx| {
3469 this.language_server_statuses.insert(
3470 server.id as usize,
3471 LanguageServerStatus {
3472 name: server.name,
3473 pending_work: Default::default(),
3474 pending_diagnostic_updates: 0,
3475 },
3476 );
3477 cx.notify();
3478 });
3479 Ok(())
3480 }
3481
3482 async fn handle_update_language_server(
3483 this: ModelHandle<Self>,
3484 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3485 _: Arc<Client>,
3486 mut cx: AsyncAppContext,
3487 ) -> Result<()> {
3488 let language_server_id = envelope.payload.language_server_id as usize;
3489 match envelope
3490 .payload
3491 .variant
3492 .ok_or_else(|| anyhow!("invalid variant"))?
3493 {
3494 proto::update_language_server::Variant::WorkStart(payload) => {
3495 this.update(&mut cx, |this, cx| {
3496 this.on_lsp_work_start(language_server_id, payload.token, cx);
3497 })
3498 }
3499 proto::update_language_server::Variant::WorkProgress(payload) => {
3500 this.update(&mut cx, |this, cx| {
3501 this.on_lsp_work_progress(
3502 language_server_id,
3503 payload.token,
3504 LanguageServerProgress {
3505 message: payload.message,
3506 percentage: payload.percentage.map(|p| p as usize),
3507 last_update_at: Instant::now(),
3508 },
3509 cx,
3510 );
3511 })
3512 }
3513 proto::update_language_server::Variant::WorkEnd(payload) => {
3514 this.update(&mut cx, |this, cx| {
3515 this.on_lsp_work_end(language_server_id, payload.token, cx);
3516 })
3517 }
3518 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3519 this.update(&mut cx, |this, cx| {
3520 this.disk_based_diagnostics_started(cx);
3521 })
3522 }
3523 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3524 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3525 }
3526 }
3527
3528 Ok(())
3529 }
3530
3531 async fn handle_update_buffer(
3532 this: ModelHandle<Self>,
3533 envelope: TypedEnvelope<proto::UpdateBuffer>,
3534 _: Arc<Client>,
3535 mut cx: AsyncAppContext,
3536 ) -> Result<()> {
3537 this.update(&mut cx, |this, cx| {
3538 let payload = envelope.payload.clone();
3539 let buffer_id = payload.buffer_id;
3540 let ops = payload
3541 .operations
3542 .into_iter()
3543 .map(|op| language::proto::deserialize_operation(op))
3544 .collect::<Result<Vec<_>, _>>()?;
3545 match this.opened_buffers.entry(buffer_id) {
3546 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3547 OpenBuffer::Strong(buffer) => {
3548 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3549 }
3550 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3551 OpenBuffer::Weak(_) => {}
3552 },
3553 hash_map::Entry::Vacant(e) => {
3554 e.insert(OpenBuffer::Loading(ops));
3555 }
3556 }
3557 Ok(())
3558 })
3559 }
3560
3561 async fn handle_update_buffer_file(
3562 this: ModelHandle<Self>,
3563 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3564 _: Arc<Client>,
3565 mut cx: AsyncAppContext,
3566 ) -> Result<()> {
3567 this.update(&mut cx, |this, cx| {
3568 let payload = envelope.payload.clone();
3569 let buffer_id = payload.buffer_id;
3570 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3571 let worktree = this
3572 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3573 .ok_or_else(|| anyhow!("no such worktree"))?;
3574 let file = File::from_proto(file, worktree.clone(), cx)?;
3575 let buffer = this
3576 .opened_buffers
3577 .get_mut(&buffer_id)
3578 .and_then(|b| b.upgrade(cx))
3579 .ok_or_else(|| anyhow!("no such buffer"))?;
3580 buffer.update(cx, |buffer, cx| {
3581 buffer.file_updated(Box::new(file), cx).detach();
3582 });
3583 Ok(())
3584 })
3585 }
3586
3587 async fn handle_save_buffer(
3588 this: ModelHandle<Self>,
3589 envelope: TypedEnvelope<proto::SaveBuffer>,
3590 _: Arc<Client>,
3591 mut cx: AsyncAppContext,
3592 ) -> Result<proto::BufferSaved> {
3593 let buffer_id = envelope.payload.buffer_id;
3594 let requested_version = deserialize_version(envelope.payload.version);
3595
3596 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3597 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3598 let buffer = this
3599 .opened_buffers
3600 .get(&buffer_id)
3601 .map(|buffer| buffer.upgrade(cx).unwrap())
3602 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3603 Ok::<_, anyhow::Error>((project_id, buffer))
3604 })?;
3605 buffer
3606 .update(&mut cx, |buffer, _| {
3607 buffer.wait_for_version(requested_version)
3608 })
3609 .await;
3610
3611 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3612 Ok(proto::BufferSaved {
3613 project_id,
3614 buffer_id,
3615 version: serialize_version(&saved_version),
3616 mtime: Some(mtime.into()),
3617 })
3618 }
3619
3620 async fn handle_format_buffers(
3621 this: ModelHandle<Self>,
3622 envelope: TypedEnvelope<proto::FormatBuffers>,
3623 _: Arc<Client>,
3624 mut cx: AsyncAppContext,
3625 ) -> Result<proto::FormatBuffersResponse> {
3626 let sender_id = envelope.original_sender_id()?;
3627 let format = this.update(&mut cx, |this, cx| {
3628 let mut buffers = HashSet::default();
3629 for buffer_id in &envelope.payload.buffer_ids {
3630 buffers.insert(
3631 this.opened_buffers
3632 .get(buffer_id)
3633 .map(|buffer| buffer.upgrade(cx).unwrap())
3634 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3635 );
3636 }
3637 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3638 })?;
3639
3640 let project_transaction = format.await?;
3641 let project_transaction = this.update(&mut cx, |this, cx| {
3642 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3643 });
3644 Ok(proto::FormatBuffersResponse {
3645 transaction: Some(project_transaction),
3646 })
3647 }
3648
3649 async fn handle_get_completions(
3650 this: ModelHandle<Self>,
3651 envelope: TypedEnvelope<proto::GetCompletions>,
3652 _: Arc<Client>,
3653 mut cx: AsyncAppContext,
3654 ) -> Result<proto::GetCompletionsResponse> {
3655 let position = envelope
3656 .payload
3657 .position
3658 .and_then(language::proto::deserialize_anchor)
3659 .ok_or_else(|| anyhow!("invalid position"))?;
3660 let version = deserialize_version(envelope.payload.version);
3661 let buffer = this.read_with(&cx, |this, cx| {
3662 this.opened_buffers
3663 .get(&envelope.payload.buffer_id)
3664 .map(|buffer| buffer.upgrade(cx).unwrap())
3665 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3666 })?;
3667 buffer
3668 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3669 .await;
3670 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3671 let completions = this
3672 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3673 .await?;
3674
3675 Ok(proto::GetCompletionsResponse {
3676 completions: completions
3677 .iter()
3678 .map(language::proto::serialize_completion)
3679 .collect(),
3680 version: serialize_version(&version),
3681 })
3682 }
3683
3684 async fn handle_apply_additional_edits_for_completion(
3685 this: ModelHandle<Self>,
3686 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3687 _: Arc<Client>,
3688 mut cx: AsyncAppContext,
3689 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3690 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3691 let buffer = this
3692 .opened_buffers
3693 .get(&envelope.payload.buffer_id)
3694 .map(|buffer| buffer.upgrade(cx).unwrap())
3695 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3696 let language = buffer.read(cx).language();
3697 let completion = language::proto::deserialize_completion(
3698 envelope
3699 .payload
3700 .completion
3701 .ok_or_else(|| anyhow!("invalid completion"))?,
3702 language,
3703 )?;
3704 Ok::<_, anyhow::Error>(
3705 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3706 )
3707 })?;
3708
3709 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3710 transaction: apply_additional_edits
3711 .await?
3712 .as_ref()
3713 .map(language::proto::serialize_transaction),
3714 })
3715 }
3716
3717 async fn handle_get_code_actions(
3718 this: ModelHandle<Self>,
3719 envelope: TypedEnvelope<proto::GetCodeActions>,
3720 _: Arc<Client>,
3721 mut cx: AsyncAppContext,
3722 ) -> Result<proto::GetCodeActionsResponse> {
3723 let start = envelope
3724 .payload
3725 .start
3726 .and_then(language::proto::deserialize_anchor)
3727 .ok_or_else(|| anyhow!("invalid start"))?;
3728 let end = envelope
3729 .payload
3730 .end
3731 .and_then(language::proto::deserialize_anchor)
3732 .ok_or_else(|| anyhow!("invalid end"))?;
3733 let buffer = this.update(&mut cx, |this, cx| {
3734 this.opened_buffers
3735 .get(&envelope.payload.buffer_id)
3736 .map(|buffer| buffer.upgrade(cx).unwrap())
3737 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3738 })?;
3739 buffer
3740 .update(&mut cx, |buffer, _| {
3741 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3742 })
3743 .await;
3744
3745 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3746 let code_actions = this.update(&mut cx, |this, cx| {
3747 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3748 })?;
3749
3750 Ok(proto::GetCodeActionsResponse {
3751 actions: code_actions
3752 .await?
3753 .iter()
3754 .map(language::proto::serialize_code_action)
3755 .collect(),
3756 version: serialize_version(&version),
3757 })
3758 }
3759
3760 async fn handle_apply_code_action(
3761 this: ModelHandle<Self>,
3762 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3763 _: Arc<Client>,
3764 mut cx: AsyncAppContext,
3765 ) -> Result<proto::ApplyCodeActionResponse> {
3766 let sender_id = envelope.original_sender_id()?;
3767 let action = language::proto::deserialize_code_action(
3768 envelope
3769 .payload
3770 .action
3771 .ok_or_else(|| anyhow!("invalid action"))?,
3772 )?;
3773 let apply_code_action = this.update(&mut cx, |this, cx| {
3774 let buffer = this
3775 .opened_buffers
3776 .get(&envelope.payload.buffer_id)
3777 .map(|buffer| buffer.upgrade(cx).unwrap())
3778 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3779 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3780 })?;
3781
3782 let project_transaction = apply_code_action.await?;
3783 let project_transaction = this.update(&mut cx, |this, cx| {
3784 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3785 });
3786 Ok(proto::ApplyCodeActionResponse {
3787 transaction: Some(project_transaction),
3788 })
3789 }
3790
3791 async fn handle_lsp_command<T: LspCommand>(
3792 this: ModelHandle<Self>,
3793 envelope: TypedEnvelope<T::ProtoRequest>,
3794 _: Arc<Client>,
3795 mut cx: AsyncAppContext,
3796 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3797 where
3798 <T::LspRequest as lsp::request::Request>::Result: Send,
3799 {
3800 let sender_id = envelope.original_sender_id()?;
3801 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3802 let buffer_handle = this.read_with(&cx, |this, _| {
3803 this.opened_buffers
3804 .get(&buffer_id)
3805 .and_then(|buffer| buffer.upgrade(&cx))
3806 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3807 })?;
3808 let request = T::from_proto(
3809 envelope.payload,
3810 this.clone(),
3811 buffer_handle.clone(),
3812 cx.clone(),
3813 )
3814 .await?;
3815 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3816 let response = this
3817 .update(&mut cx, |this, cx| {
3818 this.request_lsp(buffer_handle, request, cx)
3819 })
3820 .await?;
3821 this.update(&mut cx, |this, cx| {
3822 Ok(T::response_to_proto(
3823 response,
3824 this,
3825 sender_id,
3826 &buffer_version,
3827 cx,
3828 ))
3829 })
3830 }
3831
3832 async fn handle_get_project_symbols(
3833 this: ModelHandle<Self>,
3834 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3835 _: Arc<Client>,
3836 mut cx: AsyncAppContext,
3837 ) -> Result<proto::GetProjectSymbolsResponse> {
3838 let symbols = this
3839 .update(&mut cx, |this, cx| {
3840 this.symbols(&envelope.payload.query, cx)
3841 })
3842 .await?;
3843
3844 Ok(proto::GetProjectSymbolsResponse {
3845 symbols: symbols.iter().map(serialize_symbol).collect(),
3846 })
3847 }
3848
3849 async fn handle_search_project(
3850 this: ModelHandle<Self>,
3851 envelope: TypedEnvelope<proto::SearchProject>,
3852 _: Arc<Client>,
3853 mut cx: AsyncAppContext,
3854 ) -> Result<proto::SearchProjectResponse> {
3855 let peer_id = envelope.original_sender_id()?;
3856 let query = SearchQuery::from_proto(envelope.payload)?;
3857 let result = this
3858 .update(&mut cx, |this, cx| this.search(query, cx))
3859 .await?;
3860
3861 this.update(&mut cx, |this, cx| {
3862 let mut locations = Vec::new();
3863 for (buffer, ranges) in result {
3864 for range in ranges {
3865 let start = serialize_anchor(&range.start);
3866 let end = serialize_anchor(&range.end);
3867 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3868 locations.push(proto::Location {
3869 buffer: Some(buffer),
3870 start: Some(start),
3871 end: Some(end),
3872 });
3873 }
3874 }
3875 Ok(proto::SearchProjectResponse { locations })
3876 })
3877 }
3878
3879 async fn handle_open_buffer_for_symbol(
3880 this: ModelHandle<Self>,
3881 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3882 _: Arc<Client>,
3883 mut cx: AsyncAppContext,
3884 ) -> Result<proto::OpenBufferForSymbolResponse> {
3885 let peer_id = envelope.original_sender_id()?;
3886 let symbol = envelope
3887 .payload
3888 .symbol
3889 .ok_or_else(|| anyhow!("invalid symbol"))?;
3890 let symbol = this.read_with(&cx, |this, _| {
3891 let symbol = this.deserialize_symbol(symbol)?;
3892 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3893 if signature == symbol.signature {
3894 Ok(symbol)
3895 } else {
3896 Err(anyhow!("invalid symbol signature"))
3897 }
3898 })?;
3899 let buffer = this
3900 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3901 .await?;
3902
3903 Ok(proto::OpenBufferForSymbolResponse {
3904 buffer: Some(this.update(&mut cx, |this, cx| {
3905 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3906 })),
3907 })
3908 }
3909
3910 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3911 let mut hasher = Sha256::new();
3912 hasher.update(worktree_id.to_proto().to_be_bytes());
3913 hasher.update(path.to_string_lossy().as_bytes());
3914 hasher.update(self.nonce.to_be_bytes());
3915 hasher.finalize().as_slice().try_into().unwrap()
3916 }
3917
3918 async fn handle_open_buffer_by_id(
3919 this: ModelHandle<Self>,
3920 envelope: TypedEnvelope<proto::OpenBufferById>,
3921 _: Arc<Client>,
3922 mut cx: AsyncAppContext,
3923 ) -> Result<proto::OpenBufferResponse> {
3924 let peer_id = envelope.original_sender_id()?;
3925 let buffer = this
3926 .update(&mut cx, |this, cx| {
3927 this.open_buffer_by_id(envelope.payload.id, cx)
3928 })
3929 .await?;
3930 this.update(&mut cx, |this, cx| {
3931 Ok(proto::OpenBufferResponse {
3932 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3933 })
3934 })
3935 }
3936
3937 async fn handle_open_buffer_by_path(
3938 this: ModelHandle<Self>,
3939 envelope: TypedEnvelope<proto::OpenBufferByPath>,
3940 _: Arc<Client>,
3941 mut cx: AsyncAppContext,
3942 ) -> Result<proto::OpenBufferResponse> {
3943 let peer_id = envelope.original_sender_id()?;
3944 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3945 let open_buffer = this.update(&mut cx, |this, cx| {
3946 this.open_buffer(
3947 ProjectPath {
3948 worktree_id,
3949 path: PathBuf::from(envelope.payload.path).into(),
3950 },
3951 cx,
3952 )
3953 });
3954
3955 let buffer = open_buffer.await?;
3956 this.update(&mut cx, |this, cx| {
3957 Ok(proto::OpenBufferResponse {
3958 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3959 })
3960 })
3961 }
3962
3963 fn serialize_project_transaction_for_peer(
3964 &mut self,
3965 project_transaction: ProjectTransaction,
3966 peer_id: PeerId,
3967 cx: &AppContext,
3968 ) -> proto::ProjectTransaction {
3969 let mut serialized_transaction = proto::ProjectTransaction {
3970 buffers: Default::default(),
3971 transactions: Default::default(),
3972 };
3973 for (buffer, transaction) in project_transaction.0 {
3974 serialized_transaction
3975 .buffers
3976 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3977 serialized_transaction
3978 .transactions
3979 .push(language::proto::serialize_transaction(&transaction));
3980 }
3981 serialized_transaction
3982 }
3983
3984 fn deserialize_project_transaction(
3985 &mut self,
3986 message: proto::ProjectTransaction,
3987 push_to_history: bool,
3988 cx: &mut ModelContext<Self>,
3989 ) -> Task<Result<ProjectTransaction>> {
3990 cx.spawn(|this, mut cx| async move {
3991 let mut project_transaction = ProjectTransaction::default();
3992 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3993 let buffer = this
3994 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3995 .await?;
3996 let transaction = language::proto::deserialize_transaction(transaction)?;
3997 project_transaction.0.insert(buffer, transaction);
3998 }
3999
4000 for (buffer, transaction) in &project_transaction.0 {
4001 buffer
4002 .update(&mut cx, |buffer, _| {
4003 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4004 })
4005 .await;
4006
4007 if push_to_history {
4008 buffer.update(&mut cx, |buffer, _| {
4009 buffer.push_transaction(transaction.clone(), Instant::now());
4010 });
4011 }
4012 }
4013
4014 Ok(project_transaction)
4015 })
4016 }
4017
4018 fn serialize_buffer_for_peer(
4019 &mut self,
4020 buffer: &ModelHandle<Buffer>,
4021 peer_id: PeerId,
4022 cx: &AppContext,
4023 ) -> proto::Buffer {
4024 let buffer_id = buffer.read(cx).remote_id();
4025 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4026 if shared_buffers.insert(buffer_id) {
4027 proto::Buffer {
4028 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4029 }
4030 } else {
4031 proto::Buffer {
4032 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4033 }
4034 }
4035 }
4036
4037 fn deserialize_buffer(
4038 &mut self,
4039 buffer: proto::Buffer,
4040 cx: &mut ModelContext<Self>,
4041 ) -> Task<Result<ModelHandle<Buffer>>> {
4042 let replica_id = self.replica_id();
4043
4044 let opened_buffer_tx = self.opened_buffer.0.clone();
4045 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4046 cx.spawn(|this, mut cx| async move {
4047 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4048 proto::buffer::Variant::Id(id) => {
4049 let buffer = loop {
4050 let buffer = this.read_with(&cx, |this, cx| {
4051 this.opened_buffers
4052 .get(&id)
4053 .and_then(|buffer| buffer.upgrade(cx))
4054 });
4055 if let Some(buffer) = buffer {
4056 break buffer;
4057 }
4058 opened_buffer_rx
4059 .next()
4060 .await
4061 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4062 };
4063 Ok(buffer)
4064 }
4065 proto::buffer::Variant::State(mut buffer) => {
4066 let mut buffer_worktree = None;
4067 let mut buffer_file = None;
4068 if let Some(file) = buffer.file.take() {
4069 this.read_with(&cx, |this, cx| {
4070 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4071 let worktree =
4072 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4073 anyhow!("no worktree found for id {}", file.worktree_id)
4074 })?;
4075 buffer_file =
4076 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4077 as Box<dyn language::File>);
4078 buffer_worktree = Some(worktree);
4079 Ok::<_, anyhow::Error>(())
4080 })?;
4081 }
4082
4083 let buffer = cx.add_model(|cx| {
4084 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4085 });
4086
4087 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4088
4089 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4090 Ok(buffer)
4091 }
4092 }
4093 })
4094 }
4095
4096 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4097 let language = self
4098 .languages
4099 .get_language(&serialized_symbol.language_name);
4100 let start = serialized_symbol
4101 .start
4102 .ok_or_else(|| anyhow!("invalid start"))?;
4103 let end = serialized_symbol
4104 .end
4105 .ok_or_else(|| anyhow!("invalid end"))?;
4106 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4107 Ok(Symbol {
4108 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4109 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4110 language_name: serialized_symbol.language_name.clone(),
4111 label: language
4112 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4113 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4114 name: serialized_symbol.name,
4115 path: PathBuf::from(serialized_symbol.path),
4116 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4117 kind,
4118 signature: serialized_symbol
4119 .signature
4120 .try_into()
4121 .map_err(|_| anyhow!("invalid signature"))?,
4122 })
4123 }
4124
4125 async fn handle_buffer_saved(
4126 this: ModelHandle<Self>,
4127 envelope: TypedEnvelope<proto::BufferSaved>,
4128 _: Arc<Client>,
4129 mut cx: AsyncAppContext,
4130 ) -> Result<()> {
4131 let version = deserialize_version(envelope.payload.version);
4132 let mtime = envelope
4133 .payload
4134 .mtime
4135 .ok_or_else(|| anyhow!("missing mtime"))?
4136 .into();
4137
4138 this.update(&mut cx, |this, cx| {
4139 let buffer = this
4140 .opened_buffers
4141 .get(&envelope.payload.buffer_id)
4142 .and_then(|buffer| buffer.upgrade(cx));
4143 if let Some(buffer) = buffer {
4144 buffer.update(cx, |buffer, cx| {
4145 buffer.did_save(version, mtime, None, cx);
4146 });
4147 }
4148 Ok(())
4149 })
4150 }
4151
4152 async fn handle_buffer_reloaded(
4153 this: ModelHandle<Self>,
4154 envelope: TypedEnvelope<proto::BufferReloaded>,
4155 _: Arc<Client>,
4156 mut cx: AsyncAppContext,
4157 ) -> Result<()> {
4158 let payload = envelope.payload.clone();
4159 let version = deserialize_version(payload.version);
4160 let mtime = payload
4161 .mtime
4162 .ok_or_else(|| anyhow!("missing mtime"))?
4163 .into();
4164 this.update(&mut cx, |this, cx| {
4165 let buffer = this
4166 .opened_buffers
4167 .get(&payload.buffer_id)
4168 .and_then(|buffer| buffer.upgrade(cx));
4169 if let Some(buffer) = buffer {
4170 buffer.update(cx, |buffer, cx| {
4171 buffer.did_reload(version, mtime, cx);
4172 });
4173 }
4174 Ok(())
4175 })
4176 }
4177
4178 pub fn match_paths<'a>(
4179 &self,
4180 query: &'a str,
4181 include_ignored: bool,
4182 smart_case: bool,
4183 max_results: usize,
4184 cancel_flag: &'a AtomicBool,
4185 cx: &AppContext,
4186 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4187 let worktrees = self
4188 .worktrees(cx)
4189 .filter(|worktree| worktree.read(cx).is_visible())
4190 .collect::<Vec<_>>();
4191 let include_root_name = worktrees.len() > 1;
4192 let candidate_sets = worktrees
4193 .into_iter()
4194 .map(|worktree| CandidateSet {
4195 snapshot: worktree.read(cx).snapshot(),
4196 include_ignored,
4197 include_root_name,
4198 })
4199 .collect::<Vec<_>>();
4200
4201 let background = cx.background().clone();
4202 async move {
4203 fuzzy::match_paths(
4204 candidate_sets.as_slice(),
4205 query,
4206 smart_case,
4207 max_results,
4208 cancel_flag,
4209 background,
4210 )
4211 .await
4212 }
4213 }
4214
4215 fn edits_from_lsp(
4216 &mut self,
4217 buffer: &ModelHandle<Buffer>,
4218 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4219 version: Option<i32>,
4220 cx: &mut ModelContext<Self>,
4221 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4222 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4223 cx.background().spawn(async move {
4224 let snapshot = snapshot?;
4225 let mut lsp_edits = lsp_edits
4226 .into_iter()
4227 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4228 .peekable();
4229
4230 let mut edits = Vec::new();
4231 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4232 // Combine any LSP edits that are adjacent.
4233 //
4234 // Also, combine LSP edits that are separated from each other by only
4235 // a newline. This is important because for some code actions,
4236 // Rust-analyzer rewrites the entire buffer via a series of edits that
4237 // are separated by unchanged newline characters.
4238 //
4239 // In order for the diffing logic below to work properly, any edits that
4240 // cancel each other out must be combined into one.
4241 while let Some((next_range, next_text)) = lsp_edits.peek() {
4242 if next_range.start > range.end {
4243 if next_range.start.row > range.end.row + 1
4244 || next_range.start.column > 0
4245 || snapshot.clip_point_utf16(
4246 PointUtf16::new(range.end.row, u32::MAX),
4247 Bias::Left,
4248 ) > range.end
4249 {
4250 break;
4251 }
4252 new_text.push('\n');
4253 }
4254 range.end = next_range.end;
4255 new_text.push_str(&next_text);
4256 lsp_edits.next();
4257 }
4258
4259 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4260 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4261 {
4262 return Err(anyhow!("invalid edits received from language server"));
4263 }
4264
4265 // For multiline edits, perform a diff of the old and new text so that
4266 // we can identify the changes more precisely, preserving the locations
4267 // of any anchors positioned in the unchanged regions.
4268 if range.end.row > range.start.row {
4269 let mut offset = range.start.to_offset(&snapshot);
4270 let old_text = snapshot.text_for_range(range).collect::<String>();
4271
4272 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4273 let mut moved_since_edit = true;
4274 for change in diff.iter_all_changes() {
4275 let tag = change.tag();
4276 let value = change.value();
4277 match tag {
4278 ChangeTag::Equal => {
4279 offset += value.len();
4280 moved_since_edit = true;
4281 }
4282 ChangeTag::Delete => {
4283 let start = snapshot.anchor_after(offset);
4284 let end = snapshot.anchor_before(offset + value.len());
4285 if moved_since_edit {
4286 edits.push((start..end, String::new()));
4287 } else {
4288 edits.last_mut().unwrap().0.end = end;
4289 }
4290 offset += value.len();
4291 moved_since_edit = false;
4292 }
4293 ChangeTag::Insert => {
4294 if moved_since_edit {
4295 let anchor = snapshot.anchor_after(offset);
4296 edits.push((anchor.clone()..anchor, value.to_string()));
4297 } else {
4298 edits.last_mut().unwrap().1.push_str(value);
4299 }
4300 moved_since_edit = false;
4301 }
4302 }
4303 }
4304 } else if range.end == range.start {
4305 let anchor = snapshot.anchor_after(range.start);
4306 edits.push((anchor.clone()..anchor, new_text));
4307 } else {
4308 let edit_start = snapshot.anchor_after(range.start);
4309 let edit_end = snapshot.anchor_before(range.end);
4310 edits.push((edit_start..edit_end, new_text));
4311 }
4312 }
4313
4314 Ok(edits)
4315 })
4316 }
4317
4318 fn buffer_snapshot_for_lsp_version(
4319 &mut self,
4320 buffer: &ModelHandle<Buffer>,
4321 version: Option<i32>,
4322 cx: &AppContext,
4323 ) -> Result<TextBufferSnapshot> {
4324 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4325
4326 if let Some(version) = version {
4327 let buffer_id = buffer.read(cx).remote_id();
4328 let snapshots = self
4329 .buffer_snapshots
4330 .get_mut(&buffer_id)
4331 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4332 let mut found_snapshot = None;
4333 snapshots.retain(|(snapshot_version, snapshot)| {
4334 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4335 false
4336 } else {
4337 if *snapshot_version == version {
4338 found_snapshot = Some(snapshot.clone());
4339 }
4340 true
4341 }
4342 });
4343
4344 found_snapshot.ok_or_else(|| {
4345 anyhow!(
4346 "snapshot not found for buffer {} at version {}",
4347 buffer_id,
4348 version
4349 )
4350 })
4351 } else {
4352 Ok((buffer.read(cx)).text_snapshot())
4353 }
4354 }
4355
4356 fn language_server_for_buffer(
4357 &self,
4358 buffer: &Buffer,
4359 cx: &AppContext,
4360 ) -> Option<&Arc<LanguageServer>> {
4361 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4362 let worktree_id = file.worktree_id(cx);
4363 self.language_servers.get(&(worktree_id, language.name()))
4364 } else {
4365 None
4366 }
4367 }
4368}
4369
4370impl WorktreeHandle {
4371 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4372 match self {
4373 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4374 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4375 }
4376 }
4377}
4378
4379impl OpenBuffer {
4380 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4381 match self {
4382 OpenBuffer::Strong(handle) => Some(handle.clone()),
4383 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4384 OpenBuffer::Loading(_) => None,
4385 }
4386 }
4387}
4388
4389struct CandidateSet {
4390 snapshot: Snapshot,
4391 include_ignored: bool,
4392 include_root_name: bool,
4393}
4394
4395impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4396 type Candidates = CandidateSetIter<'a>;
4397
4398 fn id(&self) -> usize {
4399 self.snapshot.id().to_usize()
4400 }
4401
4402 fn len(&self) -> usize {
4403 if self.include_ignored {
4404 self.snapshot.file_count()
4405 } else {
4406 self.snapshot.visible_file_count()
4407 }
4408 }
4409
4410 fn prefix(&self) -> Arc<str> {
4411 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4412 self.snapshot.root_name().into()
4413 } else if self.include_root_name {
4414 format!("{}/", self.snapshot.root_name()).into()
4415 } else {
4416 "".into()
4417 }
4418 }
4419
4420 fn candidates(&'a self, start: usize) -> Self::Candidates {
4421 CandidateSetIter {
4422 traversal: self.snapshot.files(self.include_ignored, start),
4423 }
4424 }
4425}
4426
4427struct CandidateSetIter<'a> {
4428 traversal: Traversal<'a>,
4429}
4430
4431impl<'a> Iterator for CandidateSetIter<'a> {
4432 type Item = PathMatchCandidate<'a>;
4433
4434 fn next(&mut self) -> Option<Self::Item> {
4435 self.traversal.next().map(|entry| {
4436 if let EntryKind::File(char_bag) = entry.kind {
4437 PathMatchCandidate {
4438 path: &entry.path,
4439 char_bag,
4440 }
4441 } else {
4442 unreachable!()
4443 }
4444 })
4445 }
4446}
4447
4448impl Entity for Project {
4449 type Event = Event;
4450
4451 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4452 match &self.client_state {
4453 ProjectClientState::Local { remote_id_rx, .. } => {
4454 if let Some(project_id) = *remote_id_rx.borrow() {
4455 self.client
4456 .send(proto::UnregisterProject { project_id })
4457 .log_err();
4458 }
4459 }
4460 ProjectClientState::Remote { remote_id, .. } => {
4461 self.client
4462 .send(proto::LeaveProject {
4463 project_id: *remote_id,
4464 })
4465 .log_err();
4466 }
4467 }
4468 }
4469
4470 fn app_will_quit(
4471 &mut self,
4472 _: &mut MutableAppContext,
4473 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4474 let shutdown_futures = self
4475 .language_servers
4476 .drain()
4477 .filter_map(|(_, server)| server.shutdown())
4478 .collect::<Vec<_>>();
4479 Some(
4480 async move {
4481 futures::future::join_all(shutdown_futures).await;
4482 }
4483 .boxed(),
4484 )
4485 }
4486}
4487
4488impl Collaborator {
4489 fn from_proto(
4490 message: proto::Collaborator,
4491 user_store: &ModelHandle<UserStore>,
4492 cx: &mut AsyncAppContext,
4493 ) -> impl Future<Output = Result<Self>> {
4494 let user = user_store.update(cx, |user_store, cx| {
4495 user_store.fetch_user(message.user_id, cx)
4496 });
4497
4498 async move {
4499 Ok(Self {
4500 peer_id: PeerId(message.peer_id),
4501 user: user.await?,
4502 replica_id: message.replica_id as ReplicaId,
4503 })
4504 }
4505 }
4506}
4507
4508impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4509 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4510 Self {
4511 worktree_id,
4512 path: path.as_ref().into(),
4513 }
4514 }
4515}
4516
4517impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4518 fn from(options: lsp::CreateFileOptions) -> Self {
4519 Self {
4520 overwrite: options.overwrite.unwrap_or(false),
4521 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4522 }
4523 }
4524}
4525
4526impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4527 fn from(options: lsp::RenameFileOptions) -> Self {
4528 Self {
4529 overwrite: options.overwrite.unwrap_or(false),
4530 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4531 }
4532 }
4533}
4534
4535impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4536 fn from(options: lsp::DeleteFileOptions) -> Self {
4537 Self {
4538 recursive: options.recursive.unwrap_or(false),
4539 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4540 }
4541 }
4542}
4543
4544fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4545 proto::Symbol {
4546 source_worktree_id: symbol.source_worktree_id.to_proto(),
4547 worktree_id: symbol.worktree_id.to_proto(),
4548 language_name: symbol.language_name.clone(),
4549 name: symbol.name.clone(),
4550 kind: unsafe { mem::transmute(symbol.kind) },
4551 path: symbol.path.to_string_lossy().to_string(),
4552 start: Some(proto::Point {
4553 row: symbol.range.start.row,
4554 column: symbol.range.start.column,
4555 }),
4556 end: Some(proto::Point {
4557 row: symbol.range.end.row,
4558 column: symbol.range.end.column,
4559 }),
4560 signature: symbol.signature.to_vec(),
4561 }
4562}
4563
4564fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4565 let mut path_components = path.components();
4566 let mut base_components = base.components();
4567 let mut components: Vec<Component> = Vec::new();
4568 loop {
4569 match (path_components.next(), base_components.next()) {
4570 (None, None) => break,
4571 (Some(a), None) => {
4572 components.push(a);
4573 components.extend(path_components.by_ref());
4574 break;
4575 }
4576 (None, _) => components.push(Component::ParentDir),
4577 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4578 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4579 (Some(a), Some(_)) => {
4580 components.push(Component::ParentDir);
4581 for _ in base_components {
4582 components.push(Component::ParentDir);
4583 }
4584 components.push(a);
4585 components.extend(path_components.by_ref());
4586 break;
4587 }
4588 }
4589 }
4590 components.iter().map(|c| c.as_os_str()).collect()
4591}
4592
4593impl Item for Buffer {
4594 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4595 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4596 }
4597}
4598
4599#[cfg(test)]
4600mod tests {
4601 use super::{Event, *};
4602 use fs::RealFs;
4603 use futures::StreamExt;
4604 use gpui::test::subscribe;
4605 use language::{
4606 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4607 ToPoint,
4608 };
4609 use lsp::Url;
4610 use serde_json::json;
4611 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4612 use unindent::Unindent as _;
4613 use util::test::temp_tree;
4614 use worktree::WorktreeHandle as _;
4615
4616 #[gpui::test]
4617 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4618 let dir = temp_tree(json!({
4619 "root": {
4620 "apple": "",
4621 "banana": {
4622 "carrot": {
4623 "date": "",
4624 "endive": "",
4625 }
4626 },
4627 "fennel": {
4628 "grape": "",
4629 }
4630 }
4631 }));
4632
4633 let root_link_path = dir.path().join("root_link");
4634 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4635 unix::fs::symlink(
4636 &dir.path().join("root/fennel"),
4637 &dir.path().join("root/finnochio"),
4638 )
4639 .unwrap();
4640
4641 let project = Project::test(Arc::new(RealFs), cx);
4642
4643 let (tree, _) = project
4644 .update(cx, |project, cx| {
4645 project.find_or_create_local_worktree(&root_link_path, true, cx)
4646 })
4647 .await
4648 .unwrap();
4649
4650 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4651 .await;
4652 cx.read(|cx| {
4653 let tree = tree.read(cx);
4654 assert_eq!(tree.file_count(), 5);
4655 assert_eq!(
4656 tree.inode_for_path("fennel/grape"),
4657 tree.inode_for_path("finnochio/grape")
4658 );
4659 });
4660
4661 let cancel_flag = Default::default();
4662 let results = project
4663 .read_with(cx, |project, cx| {
4664 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4665 })
4666 .await;
4667 assert_eq!(
4668 results
4669 .into_iter()
4670 .map(|result| result.path)
4671 .collect::<Vec<Arc<Path>>>(),
4672 vec![
4673 PathBuf::from("banana/carrot/date").into(),
4674 PathBuf::from("banana/carrot/endive").into(),
4675 ]
4676 );
4677 }
4678
4679 #[gpui::test]
4680 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4681 cx.foreground().forbid_parking();
4682
4683 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4684 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4685 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4686 completion_provider: Some(lsp::CompletionOptions {
4687 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4688 ..Default::default()
4689 }),
4690 ..Default::default()
4691 });
4692 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4693 completion_provider: Some(lsp::CompletionOptions {
4694 trigger_characters: Some(vec![":".to_string()]),
4695 ..Default::default()
4696 }),
4697 ..Default::default()
4698 });
4699
4700 let rust_language = Arc::new(Language::new(
4701 LanguageConfig {
4702 name: "Rust".into(),
4703 path_suffixes: vec!["rs".to_string()],
4704 language_server: Some(rust_lsp_config),
4705 ..Default::default()
4706 },
4707 Some(tree_sitter_rust::language()),
4708 ));
4709 let json_language = Arc::new(Language::new(
4710 LanguageConfig {
4711 name: "JSON".into(),
4712 path_suffixes: vec!["json".to_string()],
4713 language_server: Some(json_lsp_config),
4714 ..Default::default()
4715 },
4716 None,
4717 ));
4718
4719 let fs = FakeFs::new(cx.background());
4720 fs.insert_tree(
4721 "/the-root",
4722 json!({
4723 "test.rs": "const A: i32 = 1;",
4724 "test2.rs": "",
4725 "Cargo.toml": "a = 1",
4726 "package.json": "{\"a\": 1}",
4727 }),
4728 )
4729 .await;
4730
4731 let project = Project::test(fs, cx);
4732 project.update(cx, |project, _| {
4733 project.languages.add(rust_language);
4734 project.languages.add(json_language);
4735 });
4736
4737 let worktree_id = project
4738 .update(cx, |project, cx| {
4739 project.find_or_create_local_worktree("/the-root", true, cx)
4740 })
4741 .await
4742 .unwrap()
4743 .0
4744 .read_with(cx, |tree, _| tree.id());
4745
4746 // Open a buffer without an associated language server.
4747 let toml_buffer = project
4748 .update(cx, |project, cx| {
4749 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4750 })
4751 .await
4752 .unwrap();
4753
4754 // Open a buffer with an associated language server.
4755 let rust_buffer = project
4756 .update(cx, |project, cx| {
4757 project.open_buffer((worktree_id, "test.rs"), cx)
4758 })
4759 .await
4760 .unwrap();
4761
4762 // A server is started up, and it is notified about Rust files.
4763 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4764 assert_eq!(
4765 fake_rust_server
4766 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4767 .await
4768 .text_document,
4769 lsp::TextDocumentItem {
4770 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4771 version: 0,
4772 text: "const A: i32 = 1;".to_string(),
4773 language_id: Default::default()
4774 }
4775 );
4776
4777 // The buffer is configured based on the language server's capabilities.
4778 rust_buffer.read_with(cx, |buffer, _| {
4779 assert_eq!(
4780 buffer.completion_triggers(),
4781 &[".".to_string(), "::".to_string()]
4782 );
4783 });
4784 toml_buffer.read_with(cx, |buffer, _| {
4785 assert!(buffer.completion_triggers().is_empty());
4786 });
4787
4788 // Edit a buffer. The changes are reported to the language server.
4789 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4790 assert_eq!(
4791 fake_rust_server
4792 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4793 .await
4794 .text_document,
4795 lsp::VersionedTextDocumentIdentifier::new(
4796 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4797 1
4798 )
4799 );
4800
4801 // Open a third buffer with a different associated language server.
4802 let json_buffer = project
4803 .update(cx, |project, cx| {
4804 project.open_buffer((worktree_id, "package.json"), cx)
4805 })
4806 .await
4807 .unwrap();
4808
4809 // Another language server is started up, and it is notified about
4810 // all three open buffers.
4811 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4812 assert_eq!(
4813 fake_json_server
4814 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4815 .await
4816 .text_document,
4817 lsp::TextDocumentItem {
4818 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4819 version: 0,
4820 text: "{\"a\": 1}".to_string(),
4821 language_id: Default::default()
4822 }
4823 );
4824
4825 // This buffer is configured based on the second language server's
4826 // capabilities.
4827 json_buffer.read_with(cx, |buffer, _| {
4828 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4829 });
4830
4831 // When opening another buffer whose language server is already running,
4832 // it is also configured based on the existing language server's capabilities.
4833 let rust_buffer2 = project
4834 .update(cx, |project, cx| {
4835 project.open_buffer((worktree_id, "test2.rs"), cx)
4836 })
4837 .await
4838 .unwrap();
4839 rust_buffer2.read_with(cx, |buffer, _| {
4840 assert_eq!(
4841 buffer.completion_triggers(),
4842 &[".".to_string(), "::".to_string()]
4843 );
4844 });
4845
4846 // Changes are reported only to servers matching the buffer's language.
4847 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4848 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4849 assert_eq!(
4850 fake_rust_server
4851 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4852 .await
4853 .text_document,
4854 lsp::VersionedTextDocumentIdentifier::new(
4855 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4856 1
4857 )
4858 );
4859
4860 // Save notifications are reported to all servers.
4861 toml_buffer
4862 .update(cx, |buffer, cx| buffer.save(cx))
4863 .await
4864 .unwrap();
4865 assert_eq!(
4866 fake_rust_server
4867 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4868 .await
4869 .text_document,
4870 lsp::TextDocumentIdentifier::new(
4871 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4872 )
4873 );
4874 assert_eq!(
4875 fake_json_server
4876 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4877 .await
4878 .text_document,
4879 lsp::TextDocumentIdentifier::new(
4880 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4881 )
4882 );
4883
4884 // Close notifications are reported only to servers matching the buffer's language.
4885 cx.update(|_| drop(json_buffer));
4886 let close_message = lsp::DidCloseTextDocumentParams {
4887 text_document: lsp::TextDocumentIdentifier::new(
4888 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4889 ),
4890 };
4891 assert_eq!(
4892 fake_json_server
4893 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4894 .await,
4895 close_message,
4896 );
4897 }
4898
4899 #[gpui::test]
4900 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4901 cx.foreground().forbid_parking();
4902
4903 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4904 let progress_token = language_server_config
4905 .disk_based_diagnostics_progress_token
4906 .clone()
4907 .unwrap();
4908
4909 let language = Arc::new(Language::new(
4910 LanguageConfig {
4911 name: "Rust".into(),
4912 path_suffixes: vec!["rs".to_string()],
4913 language_server: Some(language_server_config),
4914 ..Default::default()
4915 },
4916 Some(tree_sitter_rust::language()),
4917 ));
4918
4919 let fs = FakeFs::new(cx.background());
4920 fs.insert_tree(
4921 "/dir",
4922 json!({
4923 "a.rs": "fn a() { A }",
4924 "b.rs": "const y: i32 = 1",
4925 }),
4926 )
4927 .await;
4928
4929 let project = Project::test(fs, cx);
4930 project.update(cx, |project, _| project.languages.add(language));
4931
4932 let (tree, _) = project
4933 .update(cx, |project, cx| {
4934 project.find_or_create_local_worktree("/dir", true, cx)
4935 })
4936 .await
4937 .unwrap();
4938 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4939
4940 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4941 .await;
4942
4943 // Cause worktree to start the fake language server
4944 let _buffer = project
4945 .update(cx, |project, cx| {
4946 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4947 })
4948 .await
4949 .unwrap();
4950
4951 let mut events = subscribe(&project, cx);
4952
4953 let mut fake_server = fake_servers.next().await.unwrap();
4954 fake_server.start_progress(&progress_token).await;
4955 assert_eq!(
4956 events.next().await.unwrap(),
4957 Event::DiskBasedDiagnosticsStarted
4958 );
4959
4960 fake_server.start_progress(&progress_token).await;
4961 fake_server.end_progress(&progress_token).await;
4962 fake_server.start_progress(&progress_token).await;
4963
4964 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4965 lsp::PublishDiagnosticsParams {
4966 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4967 version: None,
4968 diagnostics: vec![lsp::Diagnostic {
4969 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4970 severity: Some(lsp::DiagnosticSeverity::ERROR),
4971 message: "undefined variable 'A'".to_string(),
4972 ..Default::default()
4973 }],
4974 },
4975 );
4976 assert_eq!(
4977 events.next().await.unwrap(),
4978 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4979 );
4980
4981 fake_server.end_progress(&progress_token).await;
4982 fake_server.end_progress(&progress_token).await;
4983 assert_eq!(
4984 events.next().await.unwrap(),
4985 Event::DiskBasedDiagnosticsUpdated
4986 );
4987 assert_eq!(
4988 events.next().await.unwrap(),
4989 Event::DiskBasedDiagnosticsFinished
4990 );
4991
4992 let buffer = project
4993 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4994 .await
4995 .unwrap();
4996
4997 buffer.read_with(cx, |buffer, _| {
4998 let snapshot = buffer.snapshot();
4999 let diagnostics = snapshot
5000 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5001 .collect::<Vec<_>>();
5002 assert_eq!(
5003 diagnostics,
5004 &[DiagnosticEntry {
5005 range: Point::new(0, 9)..Point::new(0, 10),
5006 diagnostic: Diagnostic {
5007 severity: lsp::DiagnosticSeverity::ERROR,
5008 message: "undefined variable 'A'".to_string(),
5009 group_id: 0,
5010 is_primary: true,
5011 ..Default::default()
5012 }
5013 }]
5014 )
5015 });
5016 }
5017
5018 #[gpui::test]
5019 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
5020 cx.foreground().forbid_parking();
5021
5022 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5023 lsp_config
5024 .disk_based_diagnostic_sources
5025 .insert("disk".to_string());
5026 let language = Arc::new(Language::new(
5027 LanguageConfig {
5028 name: "Rust".into(),
5029 path_suffixes: vec!["rs".to_string()],
5030 language_server: Some(lsp_config),
5031 ..Default::default()
5032 },
5033 Some(tree_sitter_rust::language()),
5034 ));
5035
5036 let text = "
5037 fn a() { A }
5038 fn b() { BB }
5039 fn c() { CCC }
5040 "
5041 .unindent();
5042
5043 let fs = FakeFs::new(cx.background());
5044 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5045
5046 let project = Project::test(fs, cx);
5047 project.update(cx, |project, _| project.languages.add(language));
5048
5049 let worktree_id = project
5050 .update(cx, |project, cx| {
5051 project.find_or_create_local_worktree("/dir", true, cx)
5052 })
5053 .await
5054 .unwrap()
5055 .0
5056 .read_with(cx, |tree, _| tree.id());
5057
5058 let buffer = project
5059 .update(cx, |project, cx| {
5060 project.open_buffer((worktree_id, "a.rs"), cx)
5061 })
5062 .await
5063 .unwrap();
5064
5065 let mut fake_server = fake_servers.next().await.unwrap();
5066 let open_notification = fake_server
5067 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5068 .await;
5069
5070 // Edit the buffer, moving the content down
5071 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5072 let change_notification_1 = fake_server
5073 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5074 .await;
5075 assert!(
5076 change_notification_1.text_document.version > open_notification.text_document.version
5077 );
5078
5079 // Report some diagnostics for the initial version of the buffer
5080 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5081 lsp::PublishDiagnosticsParams {
5082 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5083 version: Some(open_notification.text_document.version),
5084 diagnostics: vec![
5085 lsp::Diagnostic {
5086 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5087 severity: Some(DiagnosticSeverity::ERROR),
5088 message: "undefined variable 'A'".to_string(),
5089 source: Some("disk".to_string()),
5090 ..Default::default()
5091 },
5092 lsp::Diagnostic {
5093 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5094 severity: Some(DiagnosticSeverity::ERROR),
5095 message: "undefined variable 'BB'".to_string(),
5096 source: Some("disk".to_string()),
5097 ..Default::default()
5098 },
5099 lsp::Diagnostic {
5100 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5101 severity: Some(DiagnosticSeverity::ERROR),
5102 source: Some("disk".to_string()),
5103 message: "undefined variable 'CCC'".to_string(),
5104 ..Default::default()
5105 },
5106 ],
5107 },
5108 );
5109
5110 // The diagnostics have moved down since they were created.
5111 buffer.next_notification(cx).await;
5112 buffer.read_with(cx, |buffer, _| {
5113 assert_eq!(
5114 buffer
5115 .snapshot()
5116 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5117 .collect::<Vec<_>>(),
5118 &[
5119 DiagnosticEntry {
5120 range: Point::new(3, 9)..Point::new(3, 11),
5121 diagnostic: Diagnostic {
5122 severity: DiagnosticSeverity::ERROR,
5123 message: "undefined variable 'BB'".to_string(),
5124 is_disk_based: true,
5125 group_id: 1,
5126 is_primary: true,
5127 ..Default::default()
5128 },
5129 },
5130 DiagnosticEntry {
5131 range: Point::new(4, 9)..Point::new(4, 12),
5132 diagnostic: Diagnostic {
5133 severity: DiagnosticSeverity::ERROR,
5134 message: "undefined variable 'CCC'".to_string(),
5135 is_disk_based: true,
5136 group_id: 2,
5137 is_primary: true,
5138 ..Default::default()
5139 }
5140 }
5141 ]
5142 );
5143 assert_eq!(
5144 chunks_with_diagnostics(buffer, 0..buffer.len()),
5145 [
5146 ("\n\nfn a() { ".to_string(), None),
5147 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5148 (" }\nfn b() { ".to_string(), None),
5149 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5150 (" }\nfn c() { ".to_string(), None),
5151 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5152 (" }\n".to_string(), None),
5153 ]
5154 );
5155 assert_eq!(
5156 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5157 [
5158 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5159 (" }\nfn c() { ".to_string(), None),
5160 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5161 ]
5162 );
5163 });
5164
5165 // Ensure overlapping diagnostics are highlighted correctly.
5166 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5167 lsp::PublishDiagnosticsParams {
5168 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5169 version: Some(open_notification.text_document.version),
5170 diagnostics: vec![
5171 lsp::Diagnostic {
5172 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5173 severity: Some(DiagnosticSeverity::ERROR),
5174 message: "undefined variable 'A'".to_string(),
5175 source: Some("disk".to_string()),
5176 ..Default::default()
5177 },
5178 lsp::Diagnostic {
5179 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5180 severity: Some(DiagnosticSeverity::WARNING),
5181 message: "unreachable statement".to_string(),
5182 source: Some("disk".to_string()),
5183 ..Default::default()
5184 },
5185 ],
5186 },
5187 );
5188
5189 buffer.next_notification(cx).await;
5190 buffer.read_with(cx, |buffer, _| {
5191 assert_eq!(
5192 buffer
5193 .snapshot()
5194 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5195 .collect::<Vec<_>>(),
5196 &[
5197 DiagnosticEntry {
5198 range: Point::new(2, 9)..Point::new(2, 12),
5199 diagnostic: Diagnostic {
5200 severity: DiagnosticSeverity::WARNING,
5201 message: "unreachable statement".to_string(),
5202 is_disk_based: true,
5203 group_id: 1,
5204 is_primary: true,
5205 ..Default::default()
5206 }
5207 },
5208 DiagnosticEntry {
5209 range: Point::new(2, 9)..Point::new(2, 10),
5210 diagnostic: Diagnostic {
5211 severity: DiagnosticSeverity::ERROR,
5212 message: "undefined variable 'A'".to_string(),
5213 is_disk_based: true,
5214 group_id: 0,
5215 is_primary: true,
5216 ..Default::default()
5217 },
5218 }
5219 ]
5220 );
5221 assert_eq!(
5222 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5223 [
5224 ("fn a() { ".to_string(), None),
5225 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5226 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5227 ("\n".to_string(), None),
5228 ]
5229 );
5230 assert_eq!(
5231 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5232 [
5233 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5234 ("\n".to_string(), None),
5235 ]
5236 );
5237 });
5238
5239 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5240 // changes since the last save.
5241 buffer.update(cx, |buffer, cx| {
5242 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5243 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5244 });
5245 let change_notification_2 =
5246 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5247 assert!(
5248 change_notification_2.await.text_document.version
5249 > change_notification_1.text_document.version
5250 );
5251
5252 // Handle out-of-order diagnostics
5253 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5254 lsp::PublishDiagnosticsParams {
5255 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5256 version: Some(open_notification.text_document.version),
5257 diagnostics: vec![
5258 lsp::Diagnostic {
5259 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5260 severity: Some(DiagnosticSeverity::ERROR),
5261 message: "undefined variable 'BB'".to_string(),
5262 source: Some("disk".to_string()),
5263 ..Default::default()
5264 },
5265 lsp::Diagnostic {
5266 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5267 severity: Some(DiagnosticSeverity::WARNING),
5268 message: "undefined variable 'A'".to_string(),
5269 source: Some("disk".to_string()),
5270 ..Default::default()
5271 },
5272 ],
5273 },
5274 );
5275
5276 buffer.next_notification(cx).await;
5277 buffer.read_with(cx, |buffer, _| {
5278 assert_eq!(
5279 buffer
5280 .snapshot()
5281 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5282 .collect::<Vec<_>>(),
5283 &[
5284 DiagnosticEntry {
5285 range: Point::new(2, 21)..Point::new(2, 22),
5286 diagnostic: Diagnostic {
5287 severity: DiagnosticSeverity::WARNING,
5288 message: "undefined variable 'A'".to_string(),
5289 is_disk_based: true,
5290 group_id: 1,
5291 is_primary: true,
5292 ..Default::default()
5293 }
5294 },
5295 DiagnosticEntry {
5296 range: Point::new(3, 9)..Point::new(3, 11),
5297 diagnostic: Diagnostic {
5298 severity: DiagnosticSeverity::ERROR,
5299 message: "undefined variable 'BB'".to_string(),
5300 is_disk_based: true,
5301 group_id: 0,
5302 is_primary: true,
5303 ..Default::default()
5304 },
5305 }
5306 ]
5307 );
5308 });
5309 }
5310
5311 #[gpui::test]
5312 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5313 cx.foreground().forbid_parking();
5314
5315 let text = concat!(
5316 "let one = ;\n", //
5317 "let two = \n",
5318 "let three = 3;\n",
5319 );
5320
5321 let fs = FakeFs::new(cx.background());
5322 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5323
5324 let project = Project::test(fs, cx);
5325 let worktree_id = project
5326 .update(cx, |project, cx| {
5327 project.find_or_create_local_worktree("/dir", true, cx)
5328 })
5329 .await
5330 .unwrap()
5331 .0
5332 .read_with(cx, |tree, _| tree.id());
5333
5334 let buffer = project
5335 .update(cx, |project, cx| {
5336 project.open_buffer((worktree_id, "a.rs"), cx)
5337 })
5338 .await
5339 .unwrap();
5340
5341 project.update(cx, |project, cx| {
5342 project
5343 .update_buffer_diagnostics(
5344 &buffer,
5345 vec![
5346 DiagnosticEntry {
5347 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5348 diagnostic: Diagnostic {
5349 severity: DiagnosticSeverity::ERROR,
5350 message: "syntax error 1".to_string(),
5351 ..Default::default()
5352 },
5353 },
5354 DiagnosticEntry {
5355 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5356 diagnostic: Diagnostic {
5357 severity: DiagnosticSeverity::ERROR,
5358 message: "syntax error 2".to_string(),
5359 ..Default::default()
5360 },
5361 },
5362 ],
5363 None,
5364 cx,
5365 )
5366 .unwrap();
5367 });
5368
5369 // An empty range is extended forward to include the following character.
5370 // At the end of a line, an empty range is extended backward to include
5371 // the preceding character.
5372 buffer.read_with(cx, |buffer, _| {
5373 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5374 assert_eq!(
5375 chunks
5376 .iter()
5377 .map(|(s, d)| (s.as_str(), *d))
5378 .collect::<Vec<_>>(),
5379 &[
5380 ("let one = ", None),
5381 (";", Some(DiagnosticSeverity::ERROR)),
5382 ("\nlet two =", None),
5383 (" ", Some(DiagnosticSeverity::ERROR)),
5384 ("\nlet three = 3;\n", None)
5385 ]
5386 );
5387 });
5388 }
5389
5390 #[gpui::test]
5391 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5392 cx.foreground().forbid_parking();
5393
5394 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5395 let language = Arc::new(Language::new(
5396 LanguageConfig {
5397 name: "Rust".into(),
5398 path_suffixes: vec!["rs".to_string()],
5399 language_server: Some(lsp_config),
5400 ..Default::default()
5401 },
5402 Some(tree_sitter_rust::language()),
5403 ));
5404
5405 let text = "
5406 fn a() {
5407 f1();
5408 }
5409 fn b() {
5410 f2();
5411 }
5412 fn c() {
5413 f3();
5414 }
5415 "
5416 .unindent();
5417
5418 let fs = FakeFs::new(cx.background());
5419 fs.insert_tree(
5420 "/dir",
5421 json!({
5422 "a.rs": text.clone(),
5423 }),
5424 )
5425 .await;
5426
5427 let project = Project::test(fs, cx);
5428 project.update(cx, |project, _| project.languages.add(language));
5429
5430 let worktree_id = project
5431 .update(cx, |project, cx| {
5432 project.find_or_create_local_worktree("/dir", true, cx)
5433 })
5434 .await
5435 .unwrap()
5436 .0
5437 .read_with(cx, |tree, _| tree.id());
5438
5439 let buffer = project
5440 .update(cx, |project, cx| {
5441 project.open_buffer((worktree_id, "a.rs"), cx)
5442 })
5443 .await
5444 .unwrap();
5445
5446 let mut fake_server = fake_servers.next().await.unwrap();
5447 let lsp_document_version = fake_server
5448 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5449 .await
5450 .text_document
5451 .version;
5452
5453 // Simulate editing the buffer after the language server computes some edits.
5454 buffer.update(cx, |buffer, cx| {
5455 buffer.edit(
5456 [Point::new(0, 0)..Point::new(0, 0)],
5457 "// above first function\n",
5458 cx,
5459 );
5460 buffer.edit(
5461 [Point::new(2, 0)..Point::new(2, 0)],
5462 " // inside first function\n",
5463 cx,
5464 );
5465 buffer.edit(
5466 [Point::new(6, 4)..Point::new(6, 4)],
5467 "// inside second function ",
5468 cx,
5469 );
5470
5471 assert_eq!(
5472 buffer.text(),
5473 "
5474 // above first function
5475 fn a() {
5476 // inside first function
5477 f1();
5478 }
5479 fn b() {
5480 // inside second function f2();
5481 }
5482 fn c() {
5483 f3();
5484 }
5485 "
5486 .unindent()
5487 );
5488 });
5489
5490 let edits = project
5491 .update(cx, |project, cx| {
5492 project.edits_from_lsp(
5493 &buffer,
5494 vec![
5495 // replace body of first function
5496 lsp::TextEdit {
5497 range: lsp::Range::new(
5498 lsp::Position::new(0, 0),
5499 lsp::Position::new(3, 0),
5500 ),
5501 new_text: "
5502 fn a() {
5503 f10();
5504 }
5505 "
5506 .unindent(),
5507 },
5508 // edit inside second function
5509 lsp::TextEdit {
5510 range: lsp::Range::new(
5511 lsp::Position::new(4, 6),
5512 lsp::Position::new(4, 6),
5513 ),
5514 new_text: "00".into(),
5515 },
5516 // edit inside third function via two distinct edits
5517 lsp::TextEdit {
5518 range: lsp::Range::new(
5519 lsp::Position::new(7, 5),
5520 lsp::Position::new(7, 5),
5521 ),
5522 new_text: "4000".into(),
5523 },
5524 lsp::TextEdit {
5525 range: lsp::Range::new(
5526 lsp::Position::new(7, 5),
5527 lsp::Position::new(7, 6),
5528 ),
5529 new_text: "".into(),
5530 },
5531 ],
5532 Some(lsp_document_version),
5533 cx,
5534 )
5535 })
5536 .await
5537 .unwrap();
5538
5539 buffer.update(cx, |buffer, cx| {
5540 for (range, new_text) in edits {
5541 buffer.edit([range], new_text, cx);
5542 }
5543 assert_eq!(
5544 buffer.text(),
5545 "
5546 // above first function
5547 fn a() {
5548 // inside first function
5549 f10();
5550 }
5551 fn b() {
5552 // inside second function f200();
5553 }
5554 fn c() {
5555 f4000();
5556 }
5557 "
5558 .unindent()
5559 );
5560 });
5561 }
5562
5563 #[gpui::test]
5564 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5565 cx.foreground().forbid_parking();
5566
5567 let text = "
5568 use a::b;
5569 use a::c;
5570
5571 fn f() {
5572 b();
5573 c();
5574 }
5575 "
5576 .unindent();
5577
5578 let fs = FakeFs::new(cx.background());
5579 fs.insert_tree(
5580 "/dir",
5581 json!({
5582 "a.rs": text.clone(),
5583 }),
5584 )
5585 .await;
5586
5587 let project = Project::test(fs, cx);
5588 let worktree_id = project
5589 .update(cx, |project, cx| {
5590 project.find_or_create_local_worktree("/dir", true, cx)
5591 })
5592 .await
5593 .unwrap()
5594 .0
5595 .read_with(cx, |tree, _| tree.id());
5596
5597 let buffer = project
5598 .update(cx, |project, cx| {
5599 project.open_buffer((worktree_id, "a.rs"), cx)
5600 })
5601 .await
5602 .unwrap();
5603
5604 // Simulate the language server sending us a small edit in the form of a very large diff.
5605 // Rust-analyzer does this when performing a merge-imports code action.
5606 let edits = project
5607 .update(cx, |project, cx| {
5608 project.edits_from_lsp(
5609 &buffer,
5610 [
5611 // Replace the first use statement without editing the semicolon.
5612 lsp::TextEdit {
5613 range: lsp::Range::new(
5614 lsp::Position::new(0, 4),
5615 lsp::Position::new(0, 8),
5616 ),
5617 new_text: "a::{b, c}".into(),
5618 },
5619 // Reinsert the remainder of the file between the semicolon and the final
5620 // newline of the file.
5621 lsp::TextEdit {
5622 range: lsp::Range::new(
5623 lsp::Position::new(0, 9),
5624 lsp::Position::new(0, 9),
5625 ),
5626 new_text: "\n\n".into(),
5627 },
5628 lsp::TextEdit {
5629 range: lsp::Range::new(
5630 lsp::Position::new(0, 9),
5631 lsp::Position::new(0, 9),
5632 ),
5633 new_text: "
5634 fn f() {
5635 b();
5636 c();
5637 }"
5638 .unindent(),
5639 },
5640 // Delete everything after the first newline of the file.
5641 lsp::TextEdit {
5642 range: lsp::Range::new(
5643 lsp::Position::new(1, 0),
5644 lsp::Position::new(7, 0),
5645 ),
5646 new_text: "".into(),
5647 },
5648 ],
5649 None,
5650 cx,
5651 )
5652 })
5653 .await
5654 .unwrap();
5655
5656 buffer.update(cx, |buffer, cx| {
5657 let edits = edits
5658 .into_iter()
5659 .map(|(range, text)| {
5660 (
5661 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5662 text,
5663 )
5664 })
5665 .collect::<Vec<_>>();
5666
5667 assert_eq!(
5668 edits,
5669 [
5670 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5671 (Point::new(1, 0)..Point::new(2, 0), "".into())
5672 ]
5673 );
5674
5675 for (range, new_text) in edits {
5676 buffer.edit([range], new_text, cx);
5677 }
5678 assert_eq!(
5679 buffer.text(),
5680 "
5681 use a::{b, c};
5682
5683 fn f() {
5684 b();
5685 c();
5686 }
5687 "
5688 .unindent()
5689 );
5690 });
5691 }
5692
5693 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5694 buffer: &Buffer,
5695 range: Range<T>,
5696 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5697 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5698 for chunk in buffer.snapshot().chunks(range, true) {
5699 if chunks.last().map_or(false, |prev_chunk| {
5700 prev_chunk.1 == chunk.diagnostic_severity
5701 }) {
5702 chunks.last_mut().unwrap().0.push_str(chunk.text);
5703 } else {
5704 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5705 }
5706 }
5707 chunks
5708 }
5709
5710 #[gpui::test]
5711 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5712 let dir = temp_tree(json!({
5713 "root": {
5714 "dir1": {},
5715 "dir2": {
5716 "dir3": {}
5717 }
5718 }
5719 }));
5720
5721 let project = Project::test(Arc::new(RealFs), cx);
5722 let (tree, _) = project
5723 .update(cx, |project, cx| {
5724 project.find_or_create_local_worktree(&dir.path(), true, cx)
5725 })
5726 .await
5727 .unwrap();
5728
5729 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5730 .await;
5731
5732 let cancel_flag = Default::default();
5733 let results = project
5734 .read_with(cx, |project, cx| {
5735 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5736 })
5737 .await;
5738
5739 assert!(results.is_empty());
5740 }
5741
5742 #[gpui::test]
5743 async fn test_definition(cx: &mut gpui::TestAppContext) {
5744 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5745 let language = Arc::new(Language::new(
5746 LanguageConfig {
5747 name: "Rust".into(),
5748 path_suffixes: vec!["rs".to_string()],
5749 language_server: Some(language_server_config),
5750 ..Default::default()
5751 },
5752 Some(tree_sitter_rust::language()),
5753 ));
5754
5755 let fs = FakeFs::new(cx.background());
5756 fs.insert_tree(
5757 "/dir",
5758 json!({
5759 "a.rs": "const fn a() { A }",
5760 "b.rs": "const y: i32 = crate::a()",
5761 }),
5762 )
5763 .await;
5764
5765 let project = Project::test(fs, cx);
5766 project.update(cx, |project, _| {
5767 Arc::get_mut(&mut project.languages).unwrap().add(language);
5768 });
5769
5770 let (tree, _) = project
5771 .update(cx, |project, cx| {
5772 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5773 })
5774 .await
5775 .unwrap();
5776 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5777 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5778 .await;
5779
5780 let buffer = project
5781 .update(cx, |project, cx| {
5782 project.open_buffer(
5783 ProjectPath {
5784 worktree_id,
5785 path: Path::new("").into(),
5786 },
5787 cx,
5788 )
5789 })
5790 .await
5791 .unwrap();
5792
5793 let mut fake_server = fake_servers.next().await.unwrap();
5794 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5795 let params = params.text_document_position_params;
5796 assert_eq!(
5797 params.text_document.uri.to_file_path().unwrap(),
5798 Path::new("/dir/b.rs"),
5799 );
5800 assert_eq!(params.position, lsp::Position::new(0, 22));
5801
5802 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5803 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5804 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5805 )))
5806 });
5807
5808 let mut definitions = project
5809 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5810 .await
5811 .unwrap();
5812
5813 assert_eq!(definitions.len(), 1);
5814 let definition = definitions.pop().unwrap();
5815 cx.update(|cx| {
5816 let target_buffer = definition.buffer.read(cx);
5817 assert_eq!(
5818 target_buffer
5819 .file()
5820 .unwrap()
5821 .as_local()
5822 .unwrap()
5823 .abs_path(cx),
5824 Path::new("/dir/a.rs"),
5825 );
5826 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5827 assert_eq!(
5828 list_worktrees(&project, cx),
5829 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5830 );
5831
5832 drop(definition);
5833 });
5834 cx.read(|cx| {
5835 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5836 });
5837
5838 fn list_worktrees<'a>(
5839 project: &'a ModelHandle<Project>,
5840 cx: &'a AppContext,
5841 ) -> Vec<(&'a Path, bool)> {
5842 project
5843 .read(cx)
5844 .worktrees(cx)
5845 .map(|worktree| {
5846 let worktree = worktree.read(cx);
5847 (
5848 worktree.as_local().unwrap().abs_path().as_ref(),
5849 worktree.is_visible(),
5850 )
5851 })
5852 .collect::<Vec<_>>()
5853 }
5854 }
5855
5856 #[gpui::test]
5857 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5858 let fs = FakeFs::new(cx.background());
5859 fs.insert_tree(
5860 "/dir",
5861 json!({
5862 "file1": "the old contents",
5863 }),
5864 )
5865 .await;
5866
5867 let project = Project::test(fs.clone(), cx);
5868 let worktree_id = project
5869 .update(cx, |p, cx| {
5870 p.find_or_create_local_worktree("/dir", true, cx)
5871 })
5872 .await
5873 .unwrap()
5874 .0
5875 .read_with(cx, |tree, _| tree.id());
5876
5877 let buffer = project
5878 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5879 .await
5880 .unwrap();
5881 buffer
5882 .update(cx, |buffer, cx| {
5883 assert_eq!(buffer.text(), "the old contents");
5884 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5885 buffer.save(cx)
5886 })
5887 .await
5888 .unwrap();
5889
5890 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5891 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5892 }
5893
5894 #[gpui::test]
5895 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5896 let fs = FakeFs::new(cx.background());
5897 fs.insert_tree(
5898 "/dir",
5899 json!({
5900 "file1": "the old contents",
5901 }),
5902 )
5903 .await;
5904
5905 let project = Project::test(fs.clone(), cx);
5906 let worktree_id = project
5907 .update(cx, |p, cx| {
5908 p.find_or_create_local_worktree("/dir/file1", true, cx)
5909 })
5910 .await
5911 .unwrap()
5912 .0
5913 .read_with(cx, |tree, _| tree.id());
5914
5915 let buffer = project
5916 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5917 .await
5918 .unwrap();
5919 buffer
5920 .update(cx, |buffer, cx| {
5921 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5922 buffer.save(cx)
5923 })
5924 .await
5925 .unwrap();
5926
5927 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5928 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5929 }
5930
5931 #[gpui::test]
5932 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5933 let fs = FakeFs::new(cx.background());
5934 fs.insert_tree("/dir", json!({})).await;
5935
5936 let project = Project::test(fs.clone(), cx);
5937 let (worktree, _) = project
5938 .update(cx, |project, cx| {
5939 project.find_or_create_local_worktree("/dir", true, cx)
5940 })
5941 .await
5942 .unwrap();
5943 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5944
5945 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5946 buffer.update(cx, |buffer, cx| {
5947 buffer.edit([0..0], "abc", cx);
5948 assert!(buffer.is_dirty());
5949 assert!(!buffer.has_conflict());
5950 });
5951 project
5952 .update(cx, |project, cx| {
5953 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5954 })
5955 .await
5956 .unwrap();
5957 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5958 buffer.read_with(cx, |buffer, cx| {
5959 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5960 assert!(!buffer.is_dirty());
5961 assert!(!buffer.has_conflict());
5962 });
5963
5964 let opened_buffer = project
5965 .update(cx, |project, cx| {
5966 project.open_buffer((worktree_id, "file1"), cx)
5967 })
5968 .await
5969 .unwrap();
5970 assert_eq!(opened_buffer, buffer);
5971 }
5972
5973 #[gpui::test(retries = 5)]
5974 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5975 let dir = temp_tree(json!({
5976 "a": {
5977 "file1": "",
5978 "file2": "",
5979 "file3": "",
5980 },
5981 "b": {
5982 "c": {
5983 "file4": "",
5984 "file5": "",
5985 }
5986 }
5987 }));
5988
5989 let project = Project::test(Arc::new(RealFs), cx);
5990 let rpc = project.read_with(cx, |p, _| p.client.clone());
5991
5992 let (tree, _) = project
5993 .update(cx, |p, cx| {
5994 p.find_or_create_local_worktree(dir.path(), true, cx)
5995 })
5996 .await
5997 .unwrap();
5998 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5999
6000 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6001 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6002 async move { buffer.await.unwrap() }
6003 };
6004 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6005 tree.read_with(cx, |tree, _| {
6006 tree.entry_for_path(path)
6007 .expect(&format!("no entry for path {}", path))
6008 .id
6009 })
6010 };
6011
6012 let buffer2 = buffer_for_path("a/file2", cx).await;
6013 let buffer3 = buffer_for_path("a/file3", cx).await;
6014 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6015 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6016
6017 let file2_id = id_for_path("a/file2", &cx);
6018 let file3_id = id_for_path("a/file3", &cx);
6019 let file4_id = id_for_path("b/c/file4", &cx);
6020
6021 // Wait for the initial scan.
6022 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6023 .await;
6024
6025 // Create a remote copy of this worktree.
6026 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6027 let (remote, load_task) = cx.update(|cx| {
6028 Worktree::remote(
6029 1,
6030 1,
6031 initial_snapshot.to_proto(&Default::default(), true),
6032 rpc.clone(),
6033 cx,
6034 )
6035 });
6036 load_task.await;
6037
6038 cx.read(|cx| {
6039 assert!(!buffer2.read(cx).is_dirty());
6040 assert!(!buffer3.read(cx).is_dirty());
6041 assert!(!buffer4.read(cx).is_dirty());
6042 assert!(!buffer5.read(cx).is_dirty());
6043 });
6044
6045 // Rename and delete files and directories.
6046 tree.flush_fs_events(&cx).await;
6047 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6048 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6049 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6050 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6051 tree.flush_fs_events(&cx).await;
6052
6053 let expected_paths = vec![
6054 "a",
6055 "a/file1",
6056 "a/file2.new",
6057 "b",
6058 "d",
6059 "d/file3",
6060 "d/file4",
6061 ];
6062
6063 cx.read(|app| {
6064 assert_eq!(
6065 tree.read(app)
6066 .paths()
6067 .map(|p| p.to_str().unwrap())
6068 .collect::<Vec<_>>(),
6069 expected_paths
6070 );
6071
6072 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6073 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6074 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6075
6076 assert_eq!(
6077 buffer2.read(app).file().unwrap().path().as_ref(),
6078 Path::new("a/file2.new")
6079 );
6080 assert_eq!(
6081 buffer3.read(app).file().unwrap().path().as_ref(),
6082 Path::new("d/file3")
6083 );
6084 assert_eq!(
6085 buffer4.read(app).file().unwrap().path().as_ref(),
6086 Path::new("d/file4")
6087 );
6088 assert_eq!(
6089 buffer5.read(app).file().unwrap().path().as_ref(),
6090 Path::new("b/c/file5")
6091 );
6092
6093 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6094 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6095 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6096 assert!(buffer5.read(app).file().unwrap().is_deleted());
6097 });
6098
6099 // Update the remote worktree. Check that it becomes consistent with the
6100 // local worktree.
6101 remote.update(cx, |remote, cx| {
6102 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6103 &initial_snapshot,
6104 1,
6105 1,
6106 true,
6107 );
6108 remote
6109 .as_remote_mut()
6110 .unwrap()
6111 .snapshot
6112 .apply_remote_update(update_message)
6113 .unwrap();
6114
6115 assert_eq!(
6116 remote
6117 .paths()
6118 .map(|p| p.to_str().unwrap())
6119 .collect::<Vec<_>>(),
6120 expected_paths
6121 );
6122 });
6123 }
6124
6125 #[gpui::test]
6126 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6127 let fs = FakeFs::new(cx.background());
6128 fs.insert_tree(
6129 "/the-dir",
6130 json!({
6131 "a.txt": "a-contents",
6132 "b.txt": "b-contents",
6133 }),
6134 )
6135 .await;
6136
6137 let project = Project::test(fs.clone(), cx);
6138 let worktree_id = project
6139 .update(cx, |p, cx| {
6140 p.find_or_create_local_worktree("/the-dir", true, cx)
6141 })
6142 .await
6143 .unwrap()
6144 .0
6145 .read_with(cx, |tree, _| tree.id());
6146
6147 // Spawn multiple tasks to open paths, repeating some paths.
6148 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6149 (
6150 p.open_buffer((worktree_id, "a.txt"), cx),
6151 p.open_buffer((worktree_id, "b.txt"), cx),
6152 p.open_buffer((worktree_id, "a.txt"), cx),
6153 )
6154 });
6155
6156 let buffer_a_1 = buffer_a_1.await.unwrap();
6157 let buffer_a_2 = buffer_a_2.await.unwrap();
6158 let buffer_b = buffer_b.await.unwrap();
6159 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6160 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6161
6162 // There is only one buffer per path.
6163 let buffer_a_id = buffer_a_1.id();
6164 assert_eq!(buffer_a_2.id(), buffer_a_id);
6165
6166 // Open the same path again while it is still open.
6167 drop(buffer_a_1);
6168 let buffer_a_3 = project
6169 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6170 .await
6171 .unwrap();
6172
6173 // There's still only one buffer per path.
6174 assert_eq!(buffer_a_3.id(), buffer_a_id);
6175 }
6176
6177 #[gpui::test]
6178 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6179 use std::fs;
6180
6181 let dir = temp_tree(json!({
6182 "file1": "abc",
6183 "file2": "def",
6184 "file3": "ghi",
6185 }));
6186
6187 let project = Project::test(Arc::new(RealFs), cx);
6188 let (worktree, _) = project
6189 .update(cx, |p, cx| {
6190 p.find_or_create_local_worktree(dir.path(), true, cx)
6191 })
6192 .await
6193 .unwrap();
6194 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6195
6196 worktree.flush_fs_events(&cx).await;
6197 worktree
6198 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6199 .await;
6200
6201 let buffer1 = project
6202 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6203 .await
6204 .unwrap();
6205 let events = Rc::new(RefCell::new(Vec::new()));
6206
6207 // initially, the buffer isn't dirty.
6208 buffer1.update(cx, |buffer, cx| {
6209 cx.subscribe(&buffer1, {
6210 let events = events.clone();
6211 move |_, _, event, _| match event {
6212 BufferEvent::Operation(_) => {}
6213 _ => events.borrow_mut().push(event.clone()),
6214 }
6215 })
6216 .detach();
6217
6218 assert!(!buffer.is_dirty());
6219 assert!(events.borrow().is_empty());
6220
6221 buffer.edit(vec![1..2], "", cx);
6222 });
6223
6224 // after the first edit, the buffer is dirty, and emits a dirtied event.
6225 buffer1.update(cx, |buffer, cx| {
6226 assert!(buffer.text() == "ac");
6227 assert!(buffer.is_dirty());
6228 assert_eq!(
6229 *events.borrow(),
6230 &[
6231 language::Event::Edited { local: true },
6232 language::Event::Dirtied
6233 ]
6234 );
6235 events.borrow_mut().clear();
6236 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6237 });
6238
6239 // after saving, the buffer is not dirty, and emits a saved event.
6240 buffer1.update(cx, |buffer, cx| {
6241 assert!(!buffer.is_dirty());
6242 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6243 events.borrow_mut().clear();
6244
6245 buffer.edit(vec![1..1], "B", cx);
6246 buffer.edit(vec![2..2], "D", cx);
6247 });
6248
6249 // after editing again, the buffer is dirty, and emits another dirty event.
6250 buffer1.update(cx, |buffer, cx| {
6251 assert!(buffer.text() == "aBDc");
6252 assert!(buffer.is_dirty());
6253 assert_eq!(
6254 *events.borrow(),
6255 &[
6256 language::Event::Edited { local: true },
6257 language::Event::Dirtied,
6258 language::Event::Edited { local: true },
6259 ],
6260 );
6261 events.borrow_mut().clear();
6262
6263 // TODO - currently, after restoring the buffer to its
6264 // previously-saved state, the is still considered dirty.
6265 buffer.edit([1..3], "", cx);
6266 assert!(buffer.text() == "ac");
6267 assert!(buffer.is_dirty());
6268 });
6269
6270 assert_eq!(*events.borrow(), &[language::Event::Edited { local: true }]);
6271
6272 // When a file is deleted, the buffer is considered dirty.
6273 let events = Rc::new(RefCell::new(Vec::new()));
6274 let buffer2 = project
6275 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6276 .await
6277 .unwrap();
6278 buffer2.update(cx, |_, cx| {
6279 cx.subscribe(&buffer2, {
6280 let events = events.clone();
6281 move |_, _, event, _| events.borrow_mut().push(event.clone())
6282 })
6283 .detach();
6284 });
6285
6286 fs::remove_file(dir.path().join("file2")).unwrap();
6287 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6288 assert_eq!(
6289 *events.borrow(),
6290 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6291 );
6292
6293 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6294 let events = Rc::new(RefCell::new(Vec::new()));
6295 let buffer3 = project
6296 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6297 .await
6298 .unwrap();
6299 buffer3.update(cx, |_, cx| {
6300 cx.subscribe(&buffer3, {
6301 let events = events.clone();
6302 move |_, _, event, _| events.borrow_mut().push(event.clone())
6303 })
6304 .detach();
6305 });
6306
6307 worktree.flush_fs_events(&cx).await;
6308 buffer3.update(cx, |buffer, cx| {
6309 buffer.edit(Some(0..0), "x", cx);
6310 });
6311 events.borrow_mut().clear();
6312 fs::remove_file(dir.path().join("file3")).unwrap();
6313 buffer3
6314 .condition(&cx, |_, _| !events.borrow().is_empty())
6315 .await;
6316 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6317 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6318 }
6319
6320 #[gpui::test]
6321 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6322 use std::fs;
6323
6324 let initial_contents = "aaa\nbbbbb\nc\n";
6325 let dir = temp_tree(json!({ "the-file": initial_contents }));
6326
6327 let project = Project::test(Arc::new(RealFs), cx);
6328 let (worktree, _) = project
6329 .update(cx, |p, cx| {
6330 p.find_or_create_local_worktree(dir.path(), true, cx)
6331 })
6332 .await
6333 .unwrap();
6334 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6335
6336 worktree
6337 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6338 .await;
6339
6340 let abs_path = dir.path().join("the-file");
6341 let buffer = project
6342 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6343 .await
6344 .unwrap();
6345
6346 // TODO
6347 // Add a cursor on each row.
6348 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6349 // assert!(!buffer.is_dirty());
6350 // buffer.add_selection_set(
6351 // &(0..3)
6352 // .map(|row| Selection {
6353 // id: row as usize,
6354 // start: Point::new(row, 1),
6355 // end: Point::new(row, 1),
6356 // reversed: false,
6357 // goal: SelectionGoal::None,
6358 // })
6359 // .collect::<Vec<_>>(),
6360 // cx,
6361 // )
6362 // });
6363
6364 // Change the file on disk, adding two new lines of text, and removing
6365 // one line.
6366 buffer.read_with(cx, |buffer, _| {
6367 assert!(!buffer.is_dirty());
6368 assert!(!buffer.has_conflict());
6369 });
6370 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6371 fs::write(&abs_path, new_contents).unwrap();
6372
6373 // Because the buffer was not modified, it is reloaded from disk. Its
6374 // contents are edited according to the diff between the old and new
6375 // file contents.
6376 buffer
6377 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6378 .await;
6379
6380 buffer.update(cx, |buffer, _| {
6381 assert_eq!(buffer.text(), new_contents);
6382 assert!(!buffer.is_dirty());
6383 assert!(!buffer.has_conflict());
6384
6385 // TODO
6386 // let cursor_positions = buffer
6387 // .selection_set(selection_set_id)
6388 // .unwrap()
6389 // .selections::<Point>(&*buffer)
6390 // .map(|selection| {
6391 // assert_eq!(selection.start, selection.end);
6392 // selection.start
6393 // })
6394 // .collect::<Vec<_>>();
6395 // assert_eq!(
6396 // cursor_positions,
6397 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6398 // );
6399 });
6400
6401 // Modify the buffer
6402 buffer.update(cx, |buffer, cx| {
6403 buffer.edit(vec![0..0], " ", cx);
6404 assert!(buffer.is_dirty());
6405 assert!(!buffer.has_conflict());
6406 });
6407
6408 // Change the file on disk again, adding blank lines to the beginning.
6409 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6410
6411 // Because the buffer is modified, it doesn't reload from disk, but is
6412 // marked as having a conflict.
6413 buffer
6414 .condition(&cx, |buffer, _| buffer.has_conflict())
6415 .await;
6416 }
6417
6418 #[gpui::test]
6419 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6420 cx.foreground().forbid_parking();
6421
6422 let fs = FakeFs::new(cx.background());
6423 fs.insert_tree(
6424 "/the-dir",
6425 json!({
6426 "a.rs": "
6427 fn foo(mut v: Vec<usize>) {
6428 for x in &v {
6429 v.push(1);
6430 }
6431 }
6432 "
6433 .unindent(),
6434 }),
6435 )
6436 .await;
6437
6438 let project = Project::test(fs.clone(), cx);
6439 let (worktree, _) = project
6440 .update(cx, |p, cx| {
6441 p.find_or_create_local_worktree("/the-dir", true, cx)
6442 })
6443 .await
6444 .unwrap();
6445 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6446
6447 let buffer = project
6448 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6449 .await
6450 .unwrap();
6451
6452 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6453 let message = lsp::PublishDiagnosticsParams {
6454 uri: buffer_uri.clone(),
6455 diagnostics: vec![
6456 lsp::Diagnostic {
6457 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6458 severity: Some(DiagnosticSeverity::WARNING),
6459 message: "error 1".to_string(),
6460 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6461 location: lsp::Location {
6462 uri: buffer_uri.clone(),
6463 range: lsp::Range::new(
6464 lsp::Position::new(1, 8),
6465 lsp::Position::new(1, 9),
6466 ),
6467 },
6468 message: "error 1 hint 1".to_string(),
6469 }]),
6470 ..Default::default()
6471 },
6472 lsp::Diagnostic {
6473 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6474 severity: Some(DiagnosticSeverity::HINT),
6475 message: "error 1 hint 1".to_string(),
6476 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6477 location: lsp::Location {
6478 uri: buffer_uri.clone(),
6479 range: lsp::Range::new(
6480 lsp::Position::new(1, 8),
6481 lsp::Position::new(1, 9),
6482 ),
6483 },
6484 message: "original diagnostic".to_string(),
6485 }]),
6486 ..Default::default()
6487 },
6488 lsp::Diagnostic {
6489 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6490 severity: Some(DiagnosticSeverity::ERROR),
6491 message: "error 2".to_string(),
6492 related_information: Some(vec![
6493 lsp::DiagnosticRelatedInformation {
6494 location: lsp::Location {
6495 uri: buffer_uri.clone(),
6496 range: lsp::Range::new(
6497 lsp::Position::new(1, 13),
6498 lsp::Position::new(1, 15),
6499 ),
6500 },
6501 message: "error 2 hint 1".to_string(),
6502 },
6503 lsp::DiagnosticRelatedInformation {
6504 location: lsp::Location {
6505 uri: buffer_uri.clone(),
6506 range: lsp::Range::new(
6507 lsp::Position::new(1, 13),
6508 lsp::Position::new(1, 15),
6509 ),
6510 },
6511 message: "error 2 hint 2".to_string(),
6512 },
6513 ]),
6514 ..Default::default()
6515 },
6516 lsp::Diagnostic {
6517 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6518 severity: Some(DiagnosticSeverity::HINT),
6519 message: "error 2 hint 1".to_string(),
6520 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6521 location: lsp::Location {
6522 uri: buffer_uri.clone(),
6523 range: lsp::Range::new(
6524 lsp::Position::new(2, 8),
6525 lsp::Position::new(2, 17),
6526 ),
6527 },
6528 message: "original diagnostic".to_string(),
6529 }]),
6530 ..Default::default()
6531 },
6532 lsp::Diagnostic {
6533 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6534 severity: Some(DiagnosticSeverity::HINT),
6535 message: "error 2 hint 2".to_string(),
6536 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6537 location: lsp::Location {
6538 uri: buffer_uri.clone(),
6539 range: lsp::Range::new(
6540 lsp::Position::new(2, 8),
6541 lsp::Position::new(2, 17),
6542 ),
6543 },
6544 message: "original diagnostic".to_string(),
6545 }]),
6546 ..Default::default()
6547 },
6548 ],
6549 version: None,
6550 };
6551
6552 project
6553 .update(cx, |p, cx| {
6554 p.update_diagnostics(message, &Default::default(), cx)
6555 })
6556 .unwrap();
6557 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6558
6559 assert_eq!(
6560 buffer
6561 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6562 .collect::<Vec<_>>(),
6563 &[
6564 DiagnosticEntry {
6565 range: Point::new(1, 8)..Point::new(1, 9),
6566 diagnostic: Diagnostic {
6567 severity: DiagnosticSeverity::WARNING,
6568 message: "error 1".to_string(),
6569 group_id: 0,
6570 is_primary: true,
6571 ..Default::default()
6572 }
6573 },
6574 DiagnosticEntry {
6575 range: Point::new(1, 8)..Point::new(1, 9),
6576 diagnostic: Diagnostic {
6577 severity: DiagnosticSeverity::HINT,
6578 message: "error 1 hint 1".to_string(),
6579 group_id: 0,
6580 is_primary: false,
6581 ..Default::default()
6582 }
6583 },
6584 DiagnosticEntry {
6585 range: Point::new(1, 13)..Point::new(1, 15),
6586 diagnostic: Diagnostic {
6587 severity: DiagnosticSeverity::HINT,
6588 message: "error 2 hint 1".to_string(),
6589 group_id: 1,
6590 is_primary: false,
6591 ..Default::default()
6592 }
6593 },
6594 DiagnosticEntry {
6595 range: Point::new(1, 13)..Point::new(1, 15),
6596 diagnostic: Diagnostic {
6597 severity: DiagnosticSeverity::HINT,
6598 message: "error 2 hint 2".to_string(),
6599 group_id: 1,
6600 is_primary: false,
6601 ..Default::default()
6602 }
6603 },
6604 DiagnosticEntry {
6605 range: Point::new(2, 8)..Point::new(2, 17),
6606 diagnostic: Diagnostic {
6607 severity: DiagnosticSeverity::ERROR,
6608 message: "error 2".to_string(),
6609 group_id: 1,
6610 is_primary: true,
6611 ..Default::default()
6612 }
6613 }
6614 ]
6615 );
6616
6617 assert_eq!(
6618 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6619 &[
6620 DiagnosticEntry {
6621 range: Point::new(1, 8)..Point::new(1, 9),
6622 diagnostic: Diagnostic {
6623 severity: DiagnosticSeverity::WARNING,
6624 message: "error 1".to_string(),
6625 group_id: 0,
6626 is_primary: true,
6627 ..Default::default()
6628 }
6629 },
6630 DiagnosticEntry {
6631 range: Point::new(1, 8)..Point::new(1, 9),
6632 diagnostic: Diagnostic {
6633 severity: DiagnosticSeverity::HINT,
6634 message: "error 1 hint 1".to_string(),
6635 group_id: 0,
6636 is_primary: false,
6637 ..Default::default()
6638 }
6639 },
6640 ]
6641 );
6642 assert_eq!(
6643 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6644 &[
6645 DiagnosticEntry {
6646 range: Point::new(1, 13)..Point::new(1, 15),
6647 diagnostic: Diagnostic {
6648 severity: DiagnosticSeverity::HINT,
6649 message: "error 2 hint 1".to_string(),
6650 group_id: 1,
6651 is_primary: false,
6652 ..Default::default()
6653 }
6654 },
6655 DiagnosticEntry {
6656 range: Point::new(1, 13)..Point::new(1, 15),
6657 diagnostic: Diagnostic {
6658 severity: DiagnosticSeverity::HINT,
6659 message: "error 2 hint 2".to_string(),
6660 group_id: 1,
6661 is_primary: false,
6662 ..Default::default()
6663 }
6664 },
6665 DiagnosticEntry {
6666 range: Point::new(2, 8)..Point::new(2, 17),
6667 diagnostic: Diagnostic {
6668 severity: DiagnosticSeverity::ERROR,
6669 message: "error 2".to_string(),
6670 group_id: 1,
6671 is_primary: true,
6672 ..Default::default()
6673 }
6674 }
6675 ]
6676 );
6677 }
6678
6679 #[gpui::test]
6680 async fn test_rename(cx: &mut gpui::TestAppContext) {
6681 cx.foreground().forbid_parking();
6682
6683 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6684 let language = Arc::new(Language::new(
6685 LanguageConfig {
6686 name: "Rust".into(),
6687 path_suffixes: vec!["rs".to_string()],
6688 language_server: Some(language_server_config),
6689 ..Default::default()
6690 },
6691 Some(tree_sitter_rust::language()),
6692 ));
6693
6694 let fs = FakeFs::new(cx.background());
6695 fs.insert_tree(
6696 "/dir",
6697 json!({
6698 "one.rs": "const ONE: usize = 1;",
6699 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6700 }),
6701 )
6702 .await;
6703
6704 let project = Project::test(fs.clone(), cx);
6705 project.update(cx, |project, _| {
6706 Arc::get_mut(&mut project.languages).unwrap().add(language);
6707 });
6708
6709 let (tree, _) = project
6710 .update(cx, |project, cx| {
6711 project.find_or_create_local_worktree("/dir", true, cx)
6712 })
6713 .await
6714 .unwrap();
6715 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6716 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6717 .await;
6718
6719 let buffer = project
6720 .update(cx, |project, cx| {
6721 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6722 })
6723 .await
6724 .unwrap();
6725
6726 let mut fake_server = fake_servers.next().await.unwrap();
6727
6728 let response = project.update(cx, |project, cx| {
6729 project.prepare_rename(buffer.clone(), 7, cx)
6730 });
6731 fake_server
6732 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6733 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6734 assert_eq!(params.position, lsp::Position::new(0, 7));
6735 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6736 lsp::Position::new(0, 6),
6737 lsp::Position::new(0, 9),
6738 )))
6739 })
6740 .next()
6741 .await
6742 .unwrap();
6743 let range = response.await.unwrap().unwrap();
6744 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6745 assert_eq!(range, 6..9);
6746
6747 let response = project.update(cx, |project, cx| {
6748 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6749 });
6750 fake_server
6751 .handle_request::<lsp::request::Rename, _>(|params, _| {
6752 assert_eq!(
6753 params.text_document_position.text_document.uri.as_str(),
6754 "file:///dir/one.rs"
6755 );
6756 assert_eq!(
6757 params.text_document_position.position,
6758 lsp::Position::new(0, 7)
6759 );
6760 assert_eq!(params.new_name, "THREE");
6761 Some(lsp::WorkspaceEdit {
6762 changes: Some(
6763 [
6764 (
6765 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6766 vec![lsp::TextEdit::new(
6767 lsp::Range::new(
6768 lsp::Position::new(0, 6),
6769 lsp::Position::new(0, 9),
6770 ),
6771 "THREE".to_string(),
6772 )],
6773 ),
6774 (
6775 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6776 vec![
6777 lsp::TextEdit::new(
6778 lsp::Range::new(
6779 lsp::Position::new(0, 24),
6780 lsp::Position::new(0, 27),
6781 ),
6782 "THREE".to_string(),
6783 ),
6784 lsp::TextEdit::new(
6785 lsp::Range::new(
6786 lsp::Position::new(0, 35),
6787 lsp::Position::new(0, 38),
6788 ),
6789 "THREE".to_string(),
6790 ),
6791 ],
6792 ),
6793 ]
6794 .into_iter()
6795 .collect(),
6796 ),
6797 ..Default::default()
6798 })
6799 })
6800 .next()
6801 .await
6802 .unwrap();
6803 let mut transaction = response.await.unwrap().0;
6804 assert_eq!(transaction.len(), 2);
6805 assert_eq!(
6806 transaction
6807 .remove_entry(&buffer)
6808 .unwrap()
6809 .0
6810 .read_with(cx, |buffer, _| buffer.text()),
6811 "const THREE: usize = 1;"
6812 );
6813 assert_eq!(
6814 transaction
6815 .into_keys()
6816 .next()
6817 .unwrap()
6818 .read_with(cx, |buffer, _| buffer.text()),
6819 "const TWO: usize = one::THREE + one::THREE;"
6820 );
6821 }
6822
6823 #[gpui::test]
6824 async fn test_search(cx: &mut gpui::TestAppContext) {
6825 let fs = FakeFs::new(cx.background());
6826 fs.insert_tree(
6827 "/dir",
6828 json!({
6829 "one.rs": "const ONE: usize = 1;",
6830 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6831 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6832 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6833 }),
6834 )
6835 .await;
6836 let project = Project::test(fs.clone(), cx);
6837 let (tree, _) = project
6838 .update(cx, |project, cx| {
6839 project.find_or_create_local_worktree("/dir", true, cx)
6840 })
6841 .await
6842 .unwrap();
6843 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6844 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6845 .await;
6846
6847 assert_eq!(
6848 search(&project, SearchQuery::text("TWO", false, true), cx)
6849 .await
6850 .unwrap(),
6851 HashMap::from_iter([
6852 ("two.rs".to_string(), vec![6..9]),
6853 ("three.rs".to_string(), vec![37..40])
6854 ])
6855 );
6856
6857 let buffer_4 = project
6858 .update(cx, |project, cx| {
6859 project.open_buffer((worktree_id, "four.rs"), cx)
6860 })
6861 .await
6862 .unwrap();
6863 buffer_4.update(cx, |buffer, cx| {
6864 buffer.edit([20..28, 31..43], "two::TWO", cx);
6865 });
6866
6867 assert_eq!(
6868 search(&project, SearchQuery::text("TWO", false, true), cx)
6869 .await
6870 .unwrap(),
6871 HashMap::from_iter([
6872 ("two.rs".to_string(), vec![6..9]),
6873 ("three.rs".to_string(), vec![37..40]),
6874 ("four.rs".to_string(), vec![25..28, 36..39])
6875 ])
6876 );
6877
6878 async fn search(
6879 project: &ModelHandle<Project>,
6880 query: SearchQuery,
6881 cx: &mut gpui::TestAppContext,
6882 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6883 let results = project
6884 .update(cx, |project, cx| project.search(query, cx))
6885 .await?;
6886
6887 Ok(results
6888 .into_iter()
6889 .map(|(buffer, ranges)| {
6890 buffer.read_with(cx, |buffer, _| {
6891 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6892 let ranges = ranges
6893 .into_iter()
6894 .map(|range| range.to_offset(buffer))
6895 .collect::<Vec<_>>();
6896 (path, ranges)
6897 })
6898 })
6899 .collect())
6900 }
6901 }
6902}