1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, Patch, PointUtf16, TextBufferSnapshot, ToLspPosition,
22 ToOffset, ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
61 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
62 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
63 language_server_settings: Arc<Mutex<serde_json::Value>>,
64 next_language_server_id: usize,
65 client: Arc<client::Client>,
66 next_entry_id: Arc<AtomicUsize>,
67 user_store: ModelHandle<UserStore>,
68 fs: Arc<dyn Fs>,
69 client_state: ProjectClientState,
70 collaborators: HashMap<PeerId, Collaborator>,
71 subscriptions: Vec<client::Subscription>,
72 language_servers_with_diagnostics_running: isize,
73 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
74 shared_buffers: HashMap<PeerId, HashSet<u64>>,
75 loading_buffers: HashMap<
76 ProjectPath,
77 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
78 >,
79 loading_local_worktrees:
80 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
81 opened_buffers: HashMap<u64, OpenBuffer>,
82 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
83 nonce: u128,
84}
85
86enum OpenBuffer {
87 Strong(ModelHandle<Buffer>),
88 Weak(WeakModelHandle<Buffer>),
89 Loading(Vec<Operation>),
90}
91
92enum WorktreeHandle {
93 Strong(ModelHandle<Worktree>),
94 Weak(WeakModelHandle<Worktree>),
95}
96
97enum ProjectClientState {
98 Local {
99 is_shared: bool,
100 remote_id_tx: watch::Sender<Option<u64>>,
101 remote_id_rx: watch::Receiver<Option<u64>>,
102 _maintain_remote_id_task: Task<Option<()>>,
103 },
104 Remote {
105 sharing_has_stopped: bool,
106 remote_id: u64,
107 replica_id: ReplicaId,
108 _detect_unshare_task: Task<Option<()>>,
109 },
110}
111
112#[derive(Clone, Debug)]
113pub struct Collaborator {
114 pub user: Arc<User>,
115 pub peer_id: PeerId,
116 pub replica_id: ReplicaId,
117}
118
119#[derive(Clone, Debug, PartialEq)]
120pub enum Event {
121 ActiveEntryChanged(Option<ProjectEntryId>),
122 WorktreeRemoved(WorktreeId),
123 DiskBasedDiagnosticsStarted,
124 DiskBasedDiagnosticsUpdated,
125 DiskBasedDiagnosticsFinished,
126 DiagnosticsUpdated(ProjectPath),
127 RemoteIdChanged(Option<u64>),
128 CollaboratorLeft(PeerId),
129}
130
131enum LanguageServerEvent {
132 WorkStart {
133 token: String,
134 },
135 WorkProgress {
136 token: String,
137 progress: LanguageServerProgress,
138 },
139 WorkEnd {
140 token: String,
141 },
142 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
143}
144
145pub struct LanguageServerStatus {
146 pub name: String,
147 pub pending_work: BTreeMap<String, LanguageServerProgress>,
148 pending_diagnostic_updates: isize,
149}
150
151#[derive(Clone, Debug)]
152pub struct LanguageServerProgress {
153 pub message: Option<String>,
154 pub percentage: Option<usize>,
155 pub last_update_at: Instant,
156}
157
158#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
159pub struct ProjectPath {
160 pub worktree_id: WorktreeId,
161 pub path: Arc<Path>,
162}
163
164#[derive(Clone, Debug, Default, PartialEq)]
165pub struct DiagnosticSummary {
166 pub error_count: usize,
167 pub warning_count: usize,
168 pub info_count: usize,
169 pub hint_count: usize,
170}
171
172#[derive(Debug)]
173pub struct Location {
174 pub buffer: ModelHandle<Buffer>,
175 pub range: Range<language::Anchor>,
176}
177
178#[derive(Debug)]
179pub struct DocumentHighlight {
180 pub range: Range<language::Anchor>,
181 pub kind: DocumentHighlightKind,
182}
183
184#[derive(Clone, Debug)]
185pub struct Symbol {
186 pub source_worktree_id: WorktreeId,
187 pub worktree_id: WorktreeId,
188 pub language_name: String,
189 pub path: PathBuf,
190 pub label: CodeLabel,
191 pub name: String,
192 pub kind: lsp::SymbolKind,
193 pub range: Range<PointUtf16>,
194 pub signature: [u8; 32],
195}
196
197#[derive(Default)]
198pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
199
200impl DiagnosticSummary {
201 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
202 let mut this = Self {
203 error_count: 0,
204 warning_count: 0,
205 info_count: 0,
206 hint_count: 0,
207 };
208
209 for entry in diagnostics {
210 if entry.diagnostic.is_primary {
211 match entry.diagnostic.severity {
212 DiagnosticSeverity::ERROR => this.error_count += 1,
213 DiagnosticSeverity::WARNING => this.warning_count += 1,
214 DiagnosticSeverity::INFORMATION => this.info_count += 1,
215 DiagnosticSeverity::HINT => this.hint_count += 1,
216 _ => {}
217 }
218 }
219 }
220
221 this
222 }
223
224 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
225 proto::DiagnosticSummary {
226 path: path.to_string_lossy().to_string(),
227 error_count: self.error_count as u32,
228 warning_count: self.warning_count as u32,
229 info_count: self.info_count as u32,
230 hint_count: self.hint_count as u32,
231 }
232 }
233}
234
235#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
236pub struct ProjectEntryId(usize);
237
238impl ProjectEntryId {
239 pub fn new(counter: &AtomicUsize) -> Self {
240 Self(counter.fetch_add(1, SeqCst))
241 }
242
243 pub fn from_proto(id: u64) -> Self {
244 Self(id as usize)
245 }
246
247 pub fn to_proto(&self) -> u64 {
248 self.0 as u64
249 }
250
251 pub fn to_usize(&self) -> usize {
252 self.0
253 }
254}
255
256impl Project {
257 pub fn init(client: &Arc<Client>) {
258 client.add_model_message_handler(Self::handle_add_collaborator);
259 client.add_model_message_handler(Self::handle_buffer_reloaded);
260 client.add_model_message_handler(Self::handle_buffer_saved);
261 client.add_model_message_handler(Self::handle_start_language_server);
262 client.add_model_message_handler(Self::handle_update_language_server);
263 client.add_model_message_handler(Self::handle_remove_collaborator);
264 client.add_model_message_handler(Self::handle_register_worktree);
265 client.add_model_message_handler(Self::handle_unregister_worktree);
266 client.add_model_message_handler(Self::handle_unshare_project);
267 client.add_model_message_handler(Self::handle_update_buffer_file);
268 client.add_model_message_handler(Self::handle_update_buffer);
269 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
270 client.add_model_message_handler(Self::handle_update_worktree);
271 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
272 client.add_model_request_handler(Self::handle_apply_code_action);
273 client.add_model_request_handler(Self::handle_format_buffers);
274 client.add_model_request_handler(Self::handle_get_code_actions);
275 client.add_model_request_handler(Self::handle_get_completions);
276 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
277 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
278 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
279 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
280 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
281 client.add_model_request_handler(Self::handle_search_project);
282 client.add_model_request_handler(Self::handle_get_project_symbols);
283 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
284 client.add_model_request_handler(Self::handle_open_buffer_by_id);
285 client.add_model_request_handler(Self::handle_open_buffer_by_path);
286 client.add_model_request_handler(Self::handle_save_buffer);
287 }
288
289 pub fn local(
290 client: Arc<Client>,
291 user_store: ModelHandle<UserStore>,
292 languages: Arc<LanguageRegistry>,
293 fs: Arc<dyn Fs>,
294 cx: &mut MutableAppContext,
295 ) -> ModelHandle<Self> {
296 cx.add_model(|cx: &mut ModelContext<Self>| {
297 let (remote_id_tx, remote_id_rx) = watch::channel();
298 let _maintain_remote_id_task = cx.spawn_weak({
299 let rpc = client.clone();
300 move |this, mut cx| {
301 async move {
302 let mut status = rpc.status();
303 while let Some(status) = status.next().await {
304 if let Some(this) = this.upgrade(&cx) {
305 let remote_id = if status.is_connected() {
306 let response = rpc.request(proto::RegisterProject {}).await?;
307 Some(response.project_id)
308 } else {
309 None
310 };
311
312 if let Some(project_id) = remote_id {
313 let mut registrations = Vec::new();
314 this.update(&mut cx, |this, cx| {
315 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
316 registrations.push(worktree.update(
317 cx,
318 |worktree, cx| {
319 let worktree = worktree.as_local_mut().unwrap();
320 worktree.register(project_id, cx)
321 },
322 ));
323 }
324 });
325 for registration in registrations {
326 registration.await?;
327 }
328 }
329 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
330 }
331 }
332 Ok(())
333 }
334 .log_err()
335 }
336 });
337
338 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
339 Self {
340 worktrees: Default::default(),
341 collaborators: Default::default(),
342 opened_buffers: Default::default(),
343 shared_buffers: Default::default(),
344 loading_buffers: Default::default(),
345 loading_local_worktrees: Default::default(),
346 buffer_snapshots: Default::default(),
347 client_state: ProjectClientState::Local {
348 is_shared: false,
349 remote_id_tx,
350 remote_id_rx,
351 _maintain_remote_id_task,
352 },
353 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
354 subscriptions: Vec::new(),
355 active_entry: None,
356 languages,
357 client,
358 user_store,
359 fs,
360 next_entry_id: Default::default(),
361 language_servers_with_diagnostics_running: 0,
362 language_servers: Default::default(),
363 started_language_servers: Default::default(),
364 language_server_statuses: Default::default(),
365 language_server_settings: Default::default(),
366 next_language_server_id: 0,
367 nonce: StdRng::from_entropy().gen(),
368 }
369 })
370 }
371
372 pub async fn remote(
373 remote_id: u64,
374 client: Arc<Client>,
375 user_store: ModelHandle<UserStore>,
376 languages: Arc<LanguageRegistry>,
377 fs: Arc<dyn Fs>,
378 cx: &mut AsyncAppContext,
379 ) -> Result<ModelHandle<Self>> {
380 client.authenticate_and_connect(true, &cx).await?;
381
382 let response = client
383 .request(proto::JoinProject {
384 project_id: remote_id,
385 })
386 .await?;
387
388 let replica_id = response.replica_id as ReplicaId;
389
390 let mut worktrees = Vec::new();
391 for worktree in response.worktrees {
392 let (worktree, load_task) = cx
393 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
394 worktrees.push(worktree);
395 load_task.detach();
396 }
397
398 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
399 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
400 let mut this = Self {
401 worktrees: Vec::new(),
402 loading_buffers: Default::default(),
403 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
404 shared_buffers: Default::default(),
405 loading_local_worktrees: Default::default(),
406 active_entry: None,
407 collaborators: Default::default(),
408 languages,
409 user_store: user_store.clone(),
410 fs,
411 next_entry_id: Default::default(),
412 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
413 client: client.clone(),
414 client_state: ProjectClientState::Remote {
415 sharing_has_stopped: false,
416 remote_id,
417 replica_id,
418 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
419 async move {
420 let mut status = client.status();
421 let is_connected =
422 status.next().await.map_or(false, |s| s.is_connected());
423 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
424 if !is_connected || status.next().await.is_some() {
425 if let Some(this) = this.upgrade(&cx) {
426 this.update(&mut cx, |this, cx| this.project_unshared(cx))
427 }
428 }
429 Ok(())
430 }
431 .log_err()
432 }),
433 },
434 language_servers_with_diagnostics_running: 0,
435 language_servers: Default::default(),
436 started_language_servers: Default::default(),
437 language_server_settings: Default::default(),
438 language_server_statuses: response
439 .language_servers
440 .into_iter()
441 .map(|server| {
442 (
443 server.id as usize,
444 LanguageServerStatus {
445 name: server.name,
446 pending_work: Default::default(),
447 pending_diagnostic_updates: 0,
448 },
449 )
450 })
451 .collect(),
452 next_language_server_id: 0,
453 opened_buffers: Default::default(),
454 buffer_snapshots: Default::default(),
455 nonce: StdRng::from_entropy().gen(),
456 };
457 for worktree in worktrees {
458 this.add_worktree(&worktree, cx);
459 }
460 this
461 });
462
463 let user_ids = response
464 .collaborators
465 .iter()
466 .map(|peer| peer.user_id)
467 .collect();
468 user_store
469 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
470 .await?;
471 let mut collaborators = HashMap::default();
472 for message in response.collaborators {
473 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
474 collaborators.insert(collaborator.peer_id, collaborator);
475 }
476
477 this.update(cx, |this, _| {
478 this.collaborators = collaborators;
479 });
480
481 Ok(this)
482 }
483
484 #[cfg(any(test, feature = "test-support"))]
485 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
486 let languages = Arc::new(LanguageRegistry::test());
487 let http_client = client::test::FakeHttpClient::with_404_response();
488 let client = client::Client::new(http_client.clone());
489 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
490 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
491 }
492
493 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
494 self.opened_buffers
495 .get(&remote_id)
496 .and_then(|buffer| buffer.upgrade(cx))
497 }
498
499 #[cfg(any(test, feature = "test-support"))]
500 pub fn languages(&self) -> &Arc<LanguageRegistry> {
501 &self.languages
502 }
503
504 #[cfg(any(test, feature = "test-support"))]
505 pub fn check_invariants(&self, cx: &AppContext) {
506 if self.is_local() {
507 let mut worktree_root_paths = HashMap::default();
508 for worktree in self.worktrees(cx) {
509 let worktree = worktree.read(cx);
510 let abs_path = worktree.as_local().unwrap().abs_path().clone();
511 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
512 assert_eq!(
513 prev_worktree_id,
514 None,
515 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
516 abs_path,
517 worktree.id(),
518 prev_worktree_id
519 )
520 }
521 } else {
522 let replica_id = self.replica_id();
523 for buffer in self.opened_buffers.values() {
524 if let Some(buffer) = buffer.upgrade(cx) {
525 let buffer = buffer.read(cx);
526 assert_eq!(
527 buffer.deferred_ops_len(),
528 0,
529 "replica {}, buffer {} has deferred operations",
530 replica_id,
531 buffer.remote_id()
532 );
533 }
534 }
535 }
536 }
537
538 #[cfg(any(test, feature = "test-support"))]
539 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
540 let path = path.into();
541 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
542 self.opened_buffers.iter().any(|(_, buffer)| {
543 if let Some(buffer) = buffer.upgrade(cx) {
544 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
545 if file.worktree == worktree && file.path() == &path.path {
546 return true;
547 }
548 }
549 }
550 false
551 })
552 } else {
553 false
554 }
555 }
556
557 pub fn fs(&self) -> &Arc<dyn Fs> {
558 &self.fs
559 }
560
561 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
562 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
563 *remote_id_tx.borrow_mut() = remote_id;
564 }
565
566 self.subscriptions.clear();
567 if let Some(remote_id) = remote_id {
568 self.subscriptions
569 .push(self.client.add_model_for_remote_entity(remote_id, cx));
570 }
571 cx.emit(Event::RemoteIdChanged(remote_id))
572 }
573
574 pub fn remote_id(&self) -> Option<u64> {
575 match &self.client_state {
576 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
577 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
578 }
579 }
580
581 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
582 let mut id = None;
583 let mut watch = None;
584 match &self.client_state {
585 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
586 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
587 }
588
589 async move {
590 if let Some(id) = id {
591 return id;
592 }
593 let mut watch = watch.unwrap();
594 loop {
595 let id = *watch.borrow();
596 if let Some(id) = id {
597 return id;
598 }
599 watch.next().await;
600 }
601 }
602 }
603
604 pub fn replica_id(&self) -> ReplicaId {
605 match &self.client_state {
606 ProjectClientState::Local { .. } => 0,
607 ProjectClientState::Remote { replica_id, .. } => *replica_id,
608 }
609 }
610
611 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
612 &self.collaborators
613 }
614
615 pub fn worktrees<'a>(
616 &'a self,
617 cx: &'a AppContext,
618 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
619 self.worktrees
620 .iter()
621 .filter_map(move |worktree| worktree.upgrade(cx))
622 }
623
624 pub fn visible_worktrees<'a>(
625 &'a self,
626 cx: &'a AppContext,
627 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
628 self.worktrees.iter().filter_map(|worktree| {
629 worktree.upgrade(cx).and_then(|worktree| {
630 if worktree.read(cx).is_visible() {
631 Some(worktree)
632 } else {
633 None
634 }
635 })
636 })
637 }
638
639 pub fn worktree_for_id(
640 &self,
641 id: WorktreeId,
642 cx: &AppContext,
643 ) -> Option<ModelHandle<Worktree>> {
644 self.worktrees(cx)
645 .find(|worktree| worktree.read(cx).id() == id)
646 }
647
648 pub fn worktree_for_entry(
649 &self,
650 entry_id: ProjectEntryId,
651 cx: &AppContext,
652 ) -> Option<ModelHandle<Worktree>> {
653 self.worktrees(cx)
654 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
655 }
656
657 pub fn worktree_id_for_entry(
658 &self,
659 entry_id: ProjectEntryId,
660 cx: &AppContext,
661 ) -> Option<WorktreeId> {
662 self.worktree_for_entry(entry_id, cx)
663 .map(|worktree| worktree.read(cx).id())
664 }
665
666 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
667 let rpc = self.client.clone();
668 cx.spawn(|this, mut cx| async move {
669 let project_id = this.update(&mut cx, |this, cx| {
670 if let ProjectClientState::Local {
671 is_shared,
672 remote_id_rx,
673 ..
674 } = &mut this.client_state
675 {
676 *is_shared = true;
677
678 for open_buffer in this.opened_buffers.values_mut() {
679 match open_buffer {
680 OpenBuffer::Strong(_) => {}
681 OpenBuffer::Weak(buffer) => {
682 if let Some(buffer) = buffer.upgrade(cx) {
683 *open_buffer = OpenBuffer::Strong(buffer);
684 }
685 }
686 OpenBuffer::Loading(_) => unreachable!(),
687 }
688 }
689
690 for worktree_handle in this.worktrees.iter_mut() {
691 match worktree_handle {
692 WorktreeHandle::Strong(_) => {}
693 WorktreeHandle::Weak(worktree) => {
694 if let Some(worktree) = worktree.upgrade(cx) {
695 *worktree_handle = WorktreeHandle::Strong(worktree);
696 }
697 }
698 }
699 }
700
701 remote_id_rx
702 .borrow()
703 .ok_or_else(|| anyhow!("no project id"))
704 } else {
705 Err(anyhow!("can't share a remote project"))
706 }
707 })?;
708
709 rpc.request(proto::ShareProject { project_id }).await?;
710
711 let mut tasks = Vec::new();
712 this.update(&mut cx, |this, cx| {
713 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
714 worktree.update(cx, |worktree, cx| {
715 let worktree = worktree.as_local_mut().unwrap();
716 tasks.push(worktree.share(project_id, cx));
717 });
718 }
719 });
720 for task in tasks {
721 task.await?;
722 }
723 this.update(&mut cx, |_, cx| cx.notify());
724 Ok(())
725 })
726 }
727
728 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
729 let rpc = self.client.clone();
730 cx.spawn(|this, mut cx| async move {
731 let project_id = this.update(&mut cx, |this, cx| {
732 if let ProjectClientState::Local {
733 is_shared,
734 remote_id_rx,
735 ..
736 } = &mut this.client_state
737 {
738 *is_shared = false;
739
740 for open_buffer in this.opened_buffers.values_mut() {
741 match open_buffer {
742 OpenBuffer::Strong(buffer) => {
743 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
744 }
745 _ => {}
746 }
747 }
748
749 for worktree_handle in this.worktrees.iter_mut() {
750 match worktree_handle {
751 WorktreeHandle::Strong(worktree) => {
752 if !worktree.read(cx).is_visible() {
753 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
754 }
755 }
756 _ => {}
757 }
758 }
759
760 remote_id_rx
761 .borrow()
762 .ok_or_else(|| anyhow!("no project id"))
763 } else {
764 Err(anyhow!("can't share a remote project"))
765 }
766 })?;
767
768 rpc.send(proto::UnshareProject { project_id })?;
769 this.update(&mut cx, |this, cx| {
770 this.collaborators.clear();
771 this.shared_buffers.clear();
772 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
773 worktree.update(cx, |worktree, _| {
774 worktree.as_local_mut().unwrap().unshare();
775 });
776 }
777 cx.notify()
778 });
779 Ok(())
780 })
781 }
782
783 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
784 if let ProjectClientState::Remote {
785 sharing_has_stopped,
786 ..
787 } = &mut self.client_state
788 {
789 *sharing_has_stopped = true;
790 self.collaborators.clear();
791 cx.notify();
792 }
793 }
794
795 pub fn is_read_only(&self) -> bool {
796 match &self.client_state {
797 ProjectClientState::Local { .. } => false,
798 ProjectClientState::Remote {
799 sharing_has_stopped,
800 ..
801 } => *sharing_has_stopped,
802 }
803 }
804
805 pub fn is_local(&self) -> bool {
806 match &self.client_state {
807 ProjectClientState::Local { .. } => true,
808 ProjectClientState::Remote { .. } => false,
809 }
810 }
811
812 pub fn is_remote(&self) -> bool {
813 !self.is_local()
814 }
815
816 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
817 if self.is_remote() {
818 return Err(anyhow!("creating buffers as a guest is not supported yet"));
819 }
820
821 let buffer = cx.add_model(|cx| {
822 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
823 });
824 self.register_buffer(&buffer, cx)?;
825 Ok(buffer)
826 }
827
828 pub fn open_path(
829 &mut self,
830 path: impl Into<ProjectPath>,
831 cx: &mut ModelContext<Self>,
832 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
833 let task = self.open_buffer(path, cx);
834 cx.spawn_weak(|_, cx| async move {
835 let buffer = task.await?;
836 let project_entry_id = buffer
837 .read_with(&cx, |buffer, cx| {
838 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
839 })
840 .ok_or_else(|| anyhow!("no project entry"))?;
841 Ok((project_entry_id, buffer.into()))
842 })
843 }
844
845 pub fn open_buffer(
846 &mut self,
847 path: impl Into<ProjectPath>,
848 cx: &mut ModelContext<Self>,
849 ) -> Task<Result<ModelHandle<Buffer>>> {
850 let project_path = path.into();
851 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
852 worktree
853 } else {
854 return Task::ready(Err(anyhow!("no such worktree")));
855 };
856
857 // If there is already a buffer for the given path, then return it.
858 let existing_buffer = self.get_open_buffer(&project_path, cx);
859 if let Some(existing_buffer) = existing_buffer {
860 return Task::ready(Ok(existing_buffer));
861 }
862
863 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
864 // If the given path is already being loaded, then wait for that existing
865 // task to complete and return the same buffer.
866 hash_map::Entry::Occupied(e) => e.get().clone(),
867
868 // Otherwise, record the fact that this path is now being loaded.
869 hash_map::Entry::Vacant(entry) => {
870 let (mut tx, rx) = postage::watch::channel();
871 entry.insert(rx.clone());
872
873 let load_buffer = if worktree.read(cx).is_local() {
874 self.open_local_buffer(&project_path.path, &worktree, cx)
875 } else {
876 self.open_remote_buffer(&project_path.path, &worktree, cx)
877 };
878
879 cx.spawn(move |this, mut cx| async move {
880 let load_result = load_buffer.await;
881 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
882 // Record the fact that the buffer is no longer loading.
883 this.loading_buffers.remove(&project_path);
884 let buffer = load_result.map_err(Arc::new)?;
885 Ok(buffer)
886 }));
887 })
888 .detach();
889 rx
890 }
891 };
892
893 cx.foreground().spawn(async move {
894 loop {
895 if let Some(result) = loading_watch.borrow().as_ref() {
896 match result {
897 Ok(buffer) => return Ok(buffer.clone()),
898 Err(error) => return Err(anyhow!("{}", error)),
899 }
900 }
901 loading_watch.next().await;
902 }
903 })
904 }
905
906 fn open_local_buffer(
907 &mut self,
908 path: &Arc<Path>,
909 worktree: &ModelHandle<Worktree>,
910 cx: &mut ModelContext<Self>,
911 ) -> Task<Result<ModelHandle<Buffer>>> {
912 let load_buffer = worktree.update(cx, |worktree, cx| {
913 let worktree = worktree.as_local_mut().unwrap();
914 worktree.load_buffer(path, cx)
915 });
916 cx.spawn(|this, mut cx| async move {
917 let buffer = load_buffer.await?;
918 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
919 Ok(buffer)
920 })
921 }
922
923 fn open_remote_buffer(
924 &mut self,
925 path: &Arc<Path>,
926 worktree: &ModelHandle<Worktree>,
927 cx: &mut ModelContext<Self>,
928 ) -> Task<Result<ModelHandle<Buffer>>> {
929 let rpc = self.client.clone();
930 let project_id = self.remote_id().unwrap();
931 let remote_worktree_id = worktree.read(cx).id();
932 let path = path.clone();
933 let path_string = path.to_string_lossy().to_string();
934 cx.spawn(|this, mut cx| async move {
935 let response = rpc
936 .request(proto::OpenBufferByPath {
937 project_id,
938 worktree_id: remote_worktree_id.to_proto(),
939 path: path_string,
940 })
941 .await?;
942 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
943 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
944 .await
945 })
946 }
947
948 fn open_local_buffer_via_lsp(
949 &mut self,
950 abs_path: lsp::Url,
951 lang_name: Arc<str>,
952 lang_server: Arc<LanguageServer>,
953 cx: &mut ModelContext<Self>,
954 ) -> Task<Result<ModelHandle<Buffer>>> {
955 cx.spawn(|this, mut cx| async move {
956 let abs_path = abs_path
957 .to_file_path()
958 .map_err(|_| anyhow!("can't convert URI to path"))?;
959 let (worktree, relative_path) = if let Some(result) =
960 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
961 {
962 result
963 } else {
964 let worktree = this
965 .update(&mut cx, |this, cx| {
966 this.create_local_worktree(&abs_path, false, cx)
967 })
968 .await?;
969 this.update(&mut cx, |this, cx| {
970 this.language_servers
971 .insert((worktree.read(cx).id(), lang_name), lang_server);
972 });
973 (worktree, PathBuf::new())
974 };
975
976 let project_path = ProjectPath {
977 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
978 path: relative_path.into(),
979 };
980 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
981 .await
982 })
983 }
984
985 pub fn open_buffer_by_id(
986 &mut self,
987 id: u64,
988 cx: &mut ModelContext<Self>,
989 ) -> Task<Result<ModelHandle<Buffer>>> {
990 if let Some(buffer) = self.buffer_for_id(id, cx) {
991 Task::ready(Ok(buffer))
992 } else if self.is_local() {
993 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
994 } else if let Some(project_id) = self.remote_id() {
995 let request = self
996 .client
997 .request(proto::OpenBufferById { project_id, id });
998 cx.spawn(|this, mut cx| async move {
999 let buffer = request
1000 .await?
1001 .buffer
1002 .ok_or_else(|| anyhow!("invalid buffer"))?;
1003 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1004 .await
1005 })
1006 } else {
1007 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1008 }
1009 }
1010
1011 pub fn save_buffer_as(
1012 &mut self,
1013 buffer: ModelHandle<Buffer>,
1014 abs_path: PathBuf,
1015 cx: &mut ModelContext<Project>,
1016 ) -> Task<Result<()>> {
1017 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1018 cx.spawn(|this, mut cx| async move {
1019 let (worktree, path) = worktree_task.await?;
1020 worktree
1021 .update(&mut cx, |worktree, cx| {
1022 worktree
1023 .as_local_mut()
1024 .unwrap()
1025 .save_buffer_as(buffer.clone(), path, cx)
1026 })
1027 .await?;
1028 this.update(&mut cx, |this, cx| {
1029 this.assign_language_to_buffer(&buffer, cx);
1030 this.register_buffer_with_language_server(&buffer, cx);
1031 });
1032 Ok(())
1033 })
1034 }
1035
1036 pub fn get_open_buffer(
1037 &mut self,
1038 path: &ProjectPath,
1039 cx: &mut ModelContext<Self>,
1040 ) -> Option<ModelHandle<Buffer>> {
1041 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1042 self.opened_buffers.values().find_map(|buffer| {
1043 let buffer = buffer.upgrade(cx)?;
1044 let file = File::from_dyn(buffer.read(cx).file())?;
1045 if file.worktree == worktree && file.path() == &path.path {
1046 Some(buffer)
1047 } else {
1048 None
1049 }
1050 })
1051 }
1052
1053 fn register_buffer(
1054 &mut self,
1055 buffer: &ModelHandle<Buffer>,
1056 cx: &mut ModelContext<Self>,
1057 ) -> Result<()> {
1058 let remote_id = buffer.read(cx).remote_id();
1059 let open_buffer = if self.is_remote() || self.is_shared() {
1060 OpenBuffer::Strong(buffer.clone())
1061 } else {
1062 OpenBuffer::Weak(buffer.downgrade())
1063 };
1064
1065 match self.opened_buffers.insert(remote_id, open_buffer) {
1066 None => {}
1067 Some(OpenBuffer::Loading(operations)) => {
1068 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1069 }
1070 Some(OpenBuffer::Weak(existing_handle)) => {
1071 if existing_handle.upgrade(cx).is_some() {
1072 Err(anyhow!(
1073 "already registered buffer with remote id {}",
1074 remote_id
1075 ))?
1076 }
1077 }
1078 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1079 "already registered buffer with remote id {}",
1080 remote_id
1081 ))?,
1082 }
1083 cx.subscribe(buffer, |this, buffer, event, cx| {
1084 this.on_buffer_event(buffer, event, cx);
1085 })
1086 .detach();
1087
1088 self.assign_language_to_buffer(buffer, cx);
1089 self.register_buffer_with_language_server(buffer, cx);
1090
1091 Ok(())
1092 }
1093
1094 fn register_buffer_with_language_server(
1095 &mut self,
1096 buffer_handle: &ModelHandle<Buffer>,
1097 cx: &mut ModelContext<Self>,
1098 ) {
1099 let buffer = buffer_handle.read(cx);
1100 let buffer_id = buffer.remote_id();
1101 if let Some(file) = File::from_dyn(buffer.file()) {
1102 if file.is_local() {
1103 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1104 let initial_snapshot = buffer.text_snapshot();
1105 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1106
1107 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1108 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1109 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1110 .log_err();
1111 }
1112 }
1113
1114 if let Some(server) = language_server {
1115 server
1116 .notify::<lsp::notification::DidOpenTextDocument>(
1117 lsp::DidOpenTextDocumentParams {
1118 text_document: lsp::TextDocumentItem::new(
1119 uri,
1120 Default::default(),
1121 0,
1122 initial_snapshot.text(),
1123 ),
1124 }
1125 .clone(),
1126 )
1127 .log_err();
1128 buffer_handle.update(cx, |buffer, cx| {
1129 buffer.set_completion_triggers(
1130 server
1131 .capabilities()
1132 .completion_provider
1133 .as_ref()
1134 .and_then(|provider| provider.trigger_characters.clone())
1135 .unwrap_or(Vec::new()),
1136 cx,
1137 )
1138 });
1139 self.buffer_snapshots
1140 .insert(buffer_id, vec![(0, initial_snapshot)]);
1141 }
1142
1143 cx.observe_release(buffer_handle, |this, buffer, cx| {
1144 if let Some(file) = File::from_dyn(buffer.file()) {
1145 if file.is_local() {
1146 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1147 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1148 server
1149 .notify::<lsp::notification::DidCloseTextDocument>(
1150 lsp::DidCloseTextDocumentParams {
1151 text_document: lsp::TextDocumentIdentifier::new(
1152 uri.clone(),
1153 ),
1154 },
1155 )
1156 .log_err();
1157 }
1158 }
1159 }
1160 })
1161 .detach();
1162 }
1163 }
1164 }
1165
1166 fn on_buffer_event(
1167 &mut self,
1168 buffer: ModelHandle<Buffer>,
1169 event: &BufferEvent,
1170 cx: &mut ModelContext<Self>,
1171 ) -> Option<()> {
1172 match event {
1173 BufferEvent::Operation(operation) => {
1174 let project_id = self.remote_id()?;
1175 let request = self.client.request(proto::UpdateBuffer {
1176 project_id,
1177 buffer_id: buffer.read(cx).remote_id(),
1178 operations: vec![language::proto::serialize_operation(&operation)],
1179 });
1180 cx.background().spawn(request).detach_and_log_err(cx);
1181 }
1182 BufferEvent::Edited { .. } => {
1183 let language_server = self
1184 .language_server_for_buffer(buffer.read(cx), cx)?
1185 .clone();
1186 let buffer = buffer.read(cx);
1187 let file = File::from_dyn(buffer.file())?;
1188 let abs_path = file.as_local()?.abs_path(cx);
1189 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1190 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1191 let (version, prev_snapshot) = buffer_snapshots.last()?;
1192 let next_snapshot = buffer.text_snapshot();
1193 let next_version = version + 1;
1194
1195 let content_changes = buffer
1196 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1197 .map(|edit| {
1198 let edit_start = edit.new.start.0;
1199 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1200 let new_text = next_snapshot
1201 .text_for_range(edit.new.start.1..edit.new.end.1)
1202 .collect();
1203 lsp::TextDocumentContentChangeEvent {
1204 range: Some(lsp::Range::new(
1205 edit_start.to_lsp_position(),
1206 edit_end.to_lsp_position(),
1207 )),
1208 range_length: None,
1209 text: new_text,
1210 }
1211 })
1212 .collect();
1213
1214 buffer_snapshots.push((next_version, next_snapshot));
1215
1216 language_server
1217 .notify::<lsp::notification::DidChangeTextDocument>(
1218 lsp::DidChangeTextDocumentParams {
1219 text_document: lsp::VersionedTextDocumentIdentifier::new(
1220 uri,
1221 next_version,
1222 ),
1223 content_changes,
1224 },
1225 )
1226 .log_err();
1227 }
1228 BufferEvent::Saved => {
1229 let file = File::from_dyn(buffer.read(cx).file())?;
1230 let worktree_id = file.worktree_id(cx);
1231 let abs_path = file.as_local()?.abs_path(cx);
1232 let text_document = lsp::TextDocumentIdentifier {
1233 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1234 };
1235
1236 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1237 server
1238 .notify::<lsp::notification::DidSaveTextDocument>(
1239 lsp::DidSaveTextDocumentParams {
1240 text_document: text_document.clone(),
1241 text: None,
1242 },
1243 )
1244 .log_err();
1245 }
1246 }
1247 _ => {}
1248 }
1249
1250 None
1251 }
1252
1253 fn language_servers_for_worktree(
1254 &self,
1255 worktree_id: WorktreeId,
1256 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1257 self.language_servers.iter().filter_map(
1258 move |((language_server_worktree_id, language_name), server)| {
1259 if *language_server_worktree_id == worktree_id {
1260 Some((language_name.as_ref(), server))
1261 } else {
1262 None
1263 }
1264 },
1265 )
1266 }
1267
1268 fn assign_language_to_buffer(
1269 &mut self,
1270 buffer: &ModelHandle<Buffer>,
1271 cx: &mut ModelContext<Self>,
1272 ) -> Option<()> {
1273 // If the buffer has a language, set it and start the language server if we haven't already.
1274 let full_path = buffer.read(cx).file()?.full_path(cx);
1275 let language = self.languages.select_language(&full_path)?;
1276 buffer.update(cx, |buffer, cx| {
1277 buffer.set_language(Some(language.clone()), cx);
1278 });
1279
1280 let file = File::from_dyn(buffer.read(cx).file())?;
1281 let worktree = file.worktree.read(cx).as_local()?;
1282 let worktree_id = worktree.id();
1283 let worktree_abs_path = worktree.abs_path().clone();
1284 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1285
1286 None
1287 }
1288
1289 fn start_language_server(
1290 &mut self,
1291 worktree_id: WorktreeId,
1292 worktree_path: Arc<Path>,
1293 language: Arc<Language>,
1294 cx: &mut ModelContext<Self>,
1295 ) {
1296 let key = (worktree_id, language.name());
1297 self.started_language_servers
1298 .entry(key.clone())
1299 .or_insert_with(|| {
1300 let server_id = post_inc(&mut self.next_language_server_id);
1301 let language_server = self.languages.start_language_server(
1302 language.clone(),
1303 worktree_path,
1304 self.client.http_client(),
1305 cx,
1306 );
1307 cx.spawn_weak(|this, mut cx| async move {
1308 let mut language_server = language_server?.await.log_err()?;
1309 let this = this.upgrade(&cx)?;
1310 let (language_server_events_tx, language_server_events_rx) =
1311 smol::channel::unbounded();
1312
1313 language_server
1314 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1315 let language_server_events_tx = language_server_events_tx.clone();
1316 move |params| {
1317 language_server_events_tx
1318 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1319 .ok();
1320 }
1321 })
1322 .detach();
1323
1324 language_server
1325 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1326 let settings = this
1327 .read_with(&cx, |this, _| this.language_server_settings.clone());
1328 move |params| {
1329 let settings = settings.lock();
1330 Ok(params
1331 .items
1332 .into_iter()
1333 .map(|item| {
1334 if let Some(section) = &item.section {
1335 settings
1336 .get(section)
1337 .cloned()
1338 .unwrap_or(serde_json::Value::Null)
1339 } else {
1340 settings.clone()
1341 }
1342 })
1343 .collect())
1344 }
1345 })
1346 .detach();
1347
1348 language_server
1349 .on_notification::<lsp::notification::Progress, _>(move |params| {
1350 let token = match params.token {
1351 lsp::NumberOrString::String(token) => token,
1352 lsp::NumberOrString::Number(token) => {
1353 log::info!("skipping numeric progress token {}", token);
1354 return;
1355 }
1356 };
1357
1358 match params.value {
1359 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1360 lsp::WorkDoneProgress::Begin(_) => {
1361 language_server_events_tx
1362 .try_send(LanguageServerEvent::WorkStart { token })
1363 .ok();
1364 }
1365 lsp::WorkDoneProgress::Report(report) => {
1366 language_server_events_tx
1367 .try_send(LanguageServerEvent::WorkProgress {
1368 token,
1369 progress: LanguageServerProgress {
1370 message: report.message,
1371 percentage: report
1372 .percentage
1373 .map(|p| p as usize),
1374 last_update_at: Instant::now(),
1375 },
1376 })
1377 .ok();
1378 }
1379 lsp::WorkDoneProgress::End(_) => {
1380 language_server_events_tx
1381 .try_send(LanguageServerEvent::WorkEnd { token })
1382 .ok();
1383 }
1384 },
1385 }
1386 })
1387 .detach();
1388
1389 // Process all the LSP events.
1390 cx.spawn(|mut cx| {
1391 let this = this.downgrade();
1392 async move {
1393 while let Ok(event) = language_server_events_rx.recv().await {
1394 let this = this.upgrade(&cx)?;
1395 this.update(&mut cx, |this, cx| {
1396 this.on_lsp_event(server_id, event, &language, cx)
1397 });
1398
1399 // Don't starve the main thread when lots of events arrive all at once.
1400 smol::future::yield_now().await;
1401 }
1402 Some(())
1403 }
1404 })
1405 .detach();
1406
1407 let language_server = language_server.initialize().await.log_err()?;
1408 this.update(&mut cx, |this, cx| {
1409 this.language_servers
1410 .insert(key.clone(), language_server.clone());
1411 this.language_server_statuses.insert(
1412 server_id,
1413 LanguageServerStatus {
1414 name: language_server.name().to_string(),
1415 pending_work: Default::default(),
1416 pending_diagnostic_updates: 0,
1417 },
1418 );
1419 language_server
1420 .notify::<lsp::notification::DidChangeConfiguration>(
1421 lsp::DidChangeConfigurationParams {
1422 settings: this.language_server_settings.lock().clone(),
1423 },
1424 )
1425 .ok();
1426
1427 if let Some(project_id) = this.remote_id() {
1428 this.client
1429 .send(proto::StartLanguageServer {
1430 project_id,
1431 server: Some(proto::LanguageServer {
1432 id: server_id as u64,
1433 name: language_server.name().to_string(),
1434 }),
1435 })
1436 .log_err();
1437 }
1438
1439 // Tell the language server about every open buffer in the worktree that matches the language.
1440 for buffer in this.opened_buffers.values() {
1441 if let Some(buffer_handle) = buffer.upgrade(cx) {
1442 let buffer = buffer_handle.read(cx);
1443 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1444 file
1445 } else {
1446 continue;
1447 };
1448 let language = if let Some(language) = buffer.language() {
1449 language
1450 } else {
1451 continue;
1452 };
1453 if (file.worktree.read(cx).id(), language.name()) != key {
1454 continue;
1455 }
1456
1457 let file = file.as_local()?;
1458 let versions = this
1459 .buffer_snapshots
1460 .entry(buffer.remote_id())
1461 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1462 let (version, initial_snapshot) = versions.last().unwrap();
1463 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1464 language_server
1465 .notify::<lsp::notification::DidOpenTextDocument>(
1466 lsp::DidOpenTextDocumentParams {
1467 text_document: lsp::TextDocumentItem::new(
1468 uri,
1469 Default::default(),
1470 *version,
1471 initial_snapshot.text(),
1472 ),
1473 },
1474 )
1475 .log_err()?;
1476 buffer_handle.update(cx, |buffer, cx| {
1477 buffer.set_completion_triggers(
1478 language_server
1479 .capabilities()
1480 .completion_provider
1481 .as_ref()
1482 .and_then(|provider| {
1483 provider.trigger_characters.clone()
1484 })
1485 .unwrap_or(Vec::new()),
1486 cx,
1487 )
1488 });
1489 }
1490 }
1491
1492 cx.notify();
1493 Some(())
1494 });
1495
1496 Some(language_server)
1497 })
1498 });
1499 }
1500
1501 fn on_lsp_event(
1502 &mut self,
1503 language_server_id: usize,
1504 event: LanguageServerEvent,
1505 language: &Arc<Language>,
1506 cx: &mut ModelContext<Self>,
1507 ) {
1508 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1509 let language_server_status =
1510 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1511 status
1512 } else {
1513 return;
1514 };
1515
1516 match event {
1517 LanguageServerEvent::WorkStart { token } => {
1518 if Some(&token) == disk_diagnostics_token {
1519 language_server_status.pending_diagnostic_updates += 1;
1520 if language_server_status.pending_diagnostic_updates == 1 {
1521 self.disk_based_diagnostics_started(cx);
1522 self.broadcast_language_server_update(
1523 language_server_id,
1524 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1525 proto::LspDiskBasedDiagnosticsUpdating {},
1526 ),
1527 );
1528 }
1529 } else {
1530 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1531 self.broadcast_language_server_update(
1532 language_server_id,
1533 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1534 token,
1535 }),
1536 );
1537 }
1538 }
1539 LanguageServerEvent::WorkProgress { token, progress } => {
1540 if Some(&token) != disk_diagnostics_token {
1541 self.on_lsp_work_progress(
1542 language_server_id,
1543 token.clone(),
1544 progress.clone(),
1545 cx,
1546 );
1547 self.broadcast_language_server_update(
1548 language_server_id,
1549 proto::update_language_server::Variant::WorkProgress(
1550 proto::LspWorkProgress {
1551 token,
1552 message: progress.message,
1553 percentage: progress.percentage.map(|p| p as u32),
1554 },
1555 ),
1556 );
1557 }
1558 }
1559 LanguageServerEvent::WorkEnd { token } => {
1560 if Some(&token) == disk_diagnostics_token {
1561 language_server_status.pending_diagnostic_updates -= 1;
1562 if language_server_status.pending_diagnostic_updates == 0 {
1563 self.disk_based_diagnostics_finished(cx);
1564 self.broadcast_language_server_update(
1565 language_server_id,
1566 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1567 proto::LspDiskBasedDiagnosticsUpdated {},
1568 ),
1569 );
1570 }
1571 } else {
1572 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1573 self.broadcast_language_server_update(
1574 language_server_id,
1575 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1576 token,
1577 }),
1578 );
1579 }
1580 }
1581 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1582 language.process_diagnostics(&mut params);
1583
1584 if disk_diagnostics_token.is_none() {
1585 self.disk_based_diagnostics_started(cx);
1586 self.broadcast_language_server_update(
1587 language_server_id,
1588 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1589 proto::LspDiskBasedDiagnosticsUpdating {},
1590 ),
1591 );
1592 }
1593 self.update_diagnostics(
1594 params,
1595 language
1596 .disk_based_diagnostic_sources()
1597 .unwrap_or(&Default::default()),
1598 cx,
1599 )
1600 .log_err();
1601 if disk_diagnostics_token.is_none() {
1602 self.disk_based_diagnostics_finished(cx);
1603 self.broadcast_language_server_update(
1604 language_server_id,
1605 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1606 proto::LspDiskBasedDiagnosticsUpdated {},
1607 ),
1608 );
1609 }
1610 }
1611 }
1612 }
1613
1614 fn on_lsp_work_start(
1615 &mut self,
1616 language_server_id: usize,
1617 token: String,
1618 cx: &mut ModelContext<Self>,
1619 ) {
1620 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1621 status.pending_work.insert(
1622 token,
1623 LanguageServerProgress {
1624 message: None,
1625 percentage: None,
1626 last_update_at: Instant::now(),
1627 },
1628 );
1629 cx.notify();
1630 }
1631 }
1632
1633 fn on_lsp_work_progress(
1634 &mut self,
1635 language_server_id: usize,
1636 token: String,
1637 progress: LanguageServerProgress,
1638 cx: &mut ModelContext<Self>,
1639 ) {
1640 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1641 status.pending_work.insert(token, progress);
1642 cx.notify();
1643 }
1644 }
1645
1646 fn on_lsp_work_end(
1647 &mut self,
1648 language_server_id: usize,
1649 token: String,
1650 cx: &mut ModelContext<Self>,
1651 ) {
1652 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1653 status.pending_work.remove(&token);
1654 cx.notify();
1655 }
1656 }
1657
1658 fn broadcast_language_server_update(
1659 &self,
1660 language_server_id: usize,
1661 event: proto::update_language_server::Variant,
1662 ) {
1663 if let Some(project_id) = self.remote_id() {
1664 self.client
1665 .send(proto::UpdateLanguageServer {
1666 project_id,
1667 language_server_id: language_server_id as u64,
1668 variant: Some(event),
1669 })
1670 .log_err();
1671 }
1672 }
1673
1674 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1675 for server in self.language_servers.values() {
1676 server
1677 .notify::<lsp::notification::DidChangeConfiguration>(
1678 lsp::DidChangeConfigurationParams {
1679 settings: settings.clone(),
1680 },
1681 )
1682 .ok();
1683 }
1684 *self.language_server_settings.lock() = settings;
1685 }
1686
1687 pub fn language_server_statuses(
1688 &self,
1689 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1690 self.language_server_statuses.values()
1691 }
1692
1693 pub fn update_diagnostics(
1694 &mut self,
1695 params: lsp::PublishDiagnosticsParams,
1696 disk_based_sources: &HashSet<String>,
1697 cx: &mut ModelContext<Self>,
1698 ) -> Result<()> {
1699 let abs_path = params
1700 .uri
1701 .to_file_path()
1702 .map_err(|_| anyhow!("URI is not a file"))?;
1703 let mut next_group_id = 0;
1704 let mut diagnostics = Vec::default();
1705 let mut primary_diagnostic_group_ids = HashMap::default();
1706 let mut sources_by_group_id = HashMap::default();
1707 let mut supporting_diagnostics = HashMap::default();
1708 for diagnostic in ¶ms.diagnostics {
1709 let source = diagnostic.source.as_ref();
1710 let code = diagnostic.code.as_ref().map(|code| match code {
1711 lsp::NumberOrString::Number(code) => code.to_string(),
1712 lsp::NumberOrString::String(code) => code.clone(),
1713 });
1714 let range = range_from_lsp(diagnostic.range);
1715 let is_supporting = diagnostic
1716 .related_information
1717 .as_ref()
1718 .map_or(false, |infos| {
1719 infos.iter().any(|info| {
1720 primary_diagnostic_group_ids.contains_key(&(
1721 source,
1722 code.clone(),
1723 range_from_lsp(info.location.range),
1724 ))
1725 })
1726 });
1727
1728 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1729 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1730 });
1731
1732 if is_supporting {
1733 supporting_diagnostics.insert(
1734 (source, code.clone(), range),
1735 (diagnostic.severity, is_unnecessary),
1736 );
1737 } else {
1738 let group_id = post_inc(&mut next_group_id);
1739 let is_disk_based =
1740 source.map_or(false, |source| disk_based_sources.contains(source));
1741
1742 sources_by_group_id.insert(group_id, source);
1743 primary_diagnostic_group_ids
1744 .insert((source, code.clone(), range.clone()), group_id);
1745
1746 diagnostics.push(DiagnosticEntry {
1747 range,
1748 diagnostic: Diagnostic {
1749 code: code.clone(),
1750 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1751 message: diagnostic.message.clone(),
1752 group_id,
1753 is_primary: true,
1754 is_valid: true,
1755 is_disk_based,
1756 is_unnecessary,
1757 },
1758 });
1759 if let Some(infos) = &diagnostic.related_information {
1760 for info in infos {
1761 if info.location.uri == params.uri && !info.message.is_empty() {
1762 let range = range_from_lsp(info.location.range);
1763 diagnostics.push(DiagnosticEntry {
1764 range,
1765 diagnostic: Diagnostic {
1766 code: code.clone(),
1767 severity: DiagnosticSeverity::INFORMATION,
1768 message: info.message.clone(),
1769 group_id,
1770 is_primary: false,
1771 is_valid: true,
1772 is_disk_based,
1773 is_unnecessary: false,
1774 },
1775 });
1776 }
1777 }
1778 }
1779 }
1780 }
1781
1782 for entry in &mut diagnostics {
1783 let diagnostic = &mut entry.diagnostic;
1784 if !diagnostic.is_primary {
1785 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1786 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1787 source,
1788 diagnostic.code.clone(),
1789 entry.range.clone(),
1790 )) {
1791 if let Some(severity) = severity {
1792 diagnostic.severity = severity;
1793 }
1794 diagnostic.is_unnecessary = is_unnecessary;
1795 }
1796 }
1797 }
1798
1799 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1800 Ok(())
1801 }
1802
1803 pub fn update_diagnostic_entries(
1804 &mut self,
1805 abs_path: PathBuf,
1806 version: Option<i32>,
1807 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1808 cx: &mut ModelContext<Project>,
1809 ) -> Result<(), anyhow::Error> {
1810 let (worktree, relative_path) = self
1811 .find_local_worktree(&abs_path, cx)
1812 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1813 if !worktree.read(cx).is_visible() {
1814 return Ok(());
1815 }
1816
1817 let project_path = ProjectPath {
1818 worktree_id: worktree.read(cx).id(),
1819 path: relative_path.into(),
1820 };
1821
1822 for buffer in self.opened_buffers.values() {
1823 if let Some(buffer) = buffer.upgrade(cx) {
1824 if buffer
1825 .read(cx)
1826 .file()
1827 .map_or(false, |file| *file.path() == project_path.path)
1828 {
1829 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1830 break;
1831 }
1832 }
1833 }
1834 worktree.update(cx, |worktree, cx| {
1835 worktree
1836 .as_local_mut()
1837 .ok_or_else(|| anyhow!("not a local worktree"))?
1838 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1839 })?;
1840 cx.emit(Event::DiagnosticsUpdated(project_path));
1841 Ok(())
1842 }
1843
1844 fn update_buffer_diagnostics(
1845 &mut self,
1846 buffer: &ModelHandle<Buffer>,
1847 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1848 version: Option<i32>,
1849 cx: &mut ModelContext<Self>,
1850 ) -> Result<()> {
1851 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1852 Ordering::Equal
1853 .then_with(|| b.is_primary.cmp(&a.is_primary))
1854 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1855 .then_with(|| a.severity.cmp(&b.severity))
1856 .then_with(|| a.message.cmp(&b.message))
1857 }
1858
1859 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1860
1861 diagnostics.sort_unstable_by(|a, b| {
1862 Ordering::Equal
1863 .then_with(|| a.range.start.cmp(&b.range.start))
1864 .then_with(|| b.range.end.cmp(&a.range.end))
1865 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1866 });
1867
1868 let mut sanitized_diagnostics = Vec::new();
1869 let edits_since_save = Patch::new(
1870 snapshot
1871 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1872 .collect(),
1873 );
1874 for entry in diagnostics {
1875 let start;
1876 let end;
1877 if entry.diagnostic.is_disk_based {
1878 // Some diagnostics are based on files on disk instead of buffers'
1879 // current contents. Adjust these diagnostics' ranges to reflect
1880 // any unsaved edits.
1881 start = edits_since_save.old_to_new(entry.range.start);
1882 end = edits_since_save.old_to_new(entry.range.end);
1883 } else {
1884 start = entry.range.start;
1885 end = entry.range.end;
1886 }
1887
1888 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1889 ..snapshot.clip_point_utf16(end, Bias::Right);
1890
1891 // Expand empty ranges by one character
1892 if range.start == range.end {
1893 range.end.column += 1;
1894 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1895 if range.start == range.end && range.end.column > 0 {
1896 range.start.column -= 1;
1897 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1898 }
1899 }
1900
1901 sanitized_diagnostics.push(DiagnosticEntry {
1902 range,
1903 diagnostic: entry.diagnostic,
1904 });
1905 }
1906 drop(edits_since_save);
1907
1908 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1909 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1910 Ok(())
1911 }
1912
1913 pub fn format(
1914 &self,
1915 buffers: HashSet<ModelHandle<Buffer>>,
1916 push_to_history: bool,
1917 cx: &mut ModelContext<Project>,
1918 ) -> Task<Result<ProjectTransaction>> {
1919 let mut local_buffers = Vec::new();
1920 let mut remote_buffers = None;
1921 for buffer_handle in buffers {
1922 let buffer = buffer_handle.read(cx);
1923 if let Some(file) = File::from_dyn(buffer.file()) {
1924 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1925 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1926 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1927 }
1928 } else {
1929 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1930 }
1931 } else {
1932 return Task::ready(Ok(Default::default()));
1933 }
1934 }
1935
1936 let remote_buffers = self.remote_id().zip(remote_buffers);
1937 let client = self.client.clone();
1938
1939 cx.spawn(|this, mut cx| async move {
1940 let mut project_transaction = ProjectTransaction::default();
1941
1942 if let Some((project_id, remote_buffers)) = remote_buffers {
1943 let response = client
1944 .request(proto::FormatBuffers {
1945 project_id,
1946 buffer_ids: remote_buffers
1947 .iter()
1948 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1949 .collect(),
1950 })
1951 .await?
1952 .transaction
1953 .ok_or_else(|| anyhow!("missing transaction"))?;
1954 project_transaction = this
1955 .update(&mut cx, |this, cx| {
1956 this.deserialize_project_transaction(response, push_to_history, cx)
1957 })
1958 .await?;
1959 }
1960
1961 for (buffer, buffer_abs_path, language_server) in local_buffers {
1962 let text_document = lsp::TextDocumentIdentifier::new(
1963 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1964 );
1965 let capabilities = &language_server.capabilities();
1966 let lsp_edits = if capabilities
1967 .document_formatting_provider
1968 .as_ref()
1969 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1970 {
1971 language_server
1972 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1973 text_document,
1974 options: Default::default(),
1975 work_done_progress_params: Default::default(),
1976 })
1977 .await?
1978 } else if capabilities
1979 .document_range_formatting_provider
1980 .as_ref()
1981 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1982 {
1983 let buffer_start = lsp::Position::new(0, 0);
1984 let buffer_end = buffer
1985 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1986 .to_lsp_position();
1987 language_server
1988 .request::<lsp::request::RangeFormatting>(
1989 lsp::DocumentRangeFormattingParams {
1990 text_document,
1991 range: lsp::Range::new(buffer_start, buffer_end),
1992 options: Default::default(),
1993 work_done_progress_params: Default::default(),
1994 },
1995 )
1996 .await?
1997 } else {
1998 continue;
1999 };
2000
2001 if let Some(lsp_edits) = lsp_edits {
2002 let edits = this
2003 .update(&mut cx, |this, cx| {
2004 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2005 })
2006 .await?;
2007 buffer.update(&mut cx, |buffer, cx| {
2008 buffer.finalize_last_transaction();
2009 buffer.start_transaction();
2010 for (range, text) in edits {
2011 buffer.edit([range], text, cx);
2012 }
2013 if buffer.end_transaction(cx).is_some() {
2014 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2015 if !push_to_history {
2016 buffer.forget_transaction(transaction.id);
2017 }
2018 project_transaction.0.insert(cx.handle(), transaction);
2019 }
2020 });
2021 }
2022 }
2023
2024 Ok(project_transaction)
2025 })
2026 }
2027
2028 pub fn definition<T: ToPointUtf16>(
2029 &self,
2030 buffer: &ModelHandle<Buffer>,
2031 position: T,
2032 cx: &mut ModelContext<Self>,
2033 ) -> Task<Result<Vec<Location>>> {
2034 let position = position.to_point_utf16(buffer.read(cx));
2035 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2036 }
2037
2038 pub fn references<T: ToPointUtf16>(
2039 &self,
2040 buffer: &ModelHandle<Buffer>,
2041 position: T,
2042 cx: &mut ModelContext<Self>,
2043 ) -> Task<Result<Vec<Location>>> {
2044 let position = position.to_point_utf16(buffer.read(cx));
2045 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2046 }
2047
2048 pub fn document_highlights<T: ToPointUtf16>(
2049 &self,
2050 buffer: &ModelHandle<Buffer>,
2051 position: T,
2052 cx: &mut ModelContext<Self>,
2053 ) -> Task<Result<Vec<DocumentHighlight>>> {
2054 let position = position.to_point_utf16(buffer.read(cx));
2055
2056 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2057 }
2058
2059 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2060 if self.is_local() {
2061 let mut language_servers = HashMap::default();
2062 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2063 if let Some((worktree, language)) = self
2064 .worktree_for_id(*worktree_id, cx)
2065 .and_then(|worktree| worktree.read(cx).as_local())
2066 .zip(self.languages.get_language(language_name))
2067 {
2068 language_servers
2069 .entry(Arc::as_ptr(language_server))
2070 .or_insert((
2071 language_server.clone(),
2072 *worktree_id,
2073 worktree.abs_path().clone(),
2074 language.clone(),
2075 ));
2076 }
2077 }
2078
2079 let mut requests = Vec::new();
2080 for (language_server, _, _, _) in language_servers.values() {
2081 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2082 lsp::WorkspaceSymbolParams {
2083 query: query.to_string(),
2084 ..Default::default()
2085 },
2086 ));
2087 }
2088
2089 cx.spawn_weak(|this, cx| async move {
2090 let responses = futures::future::try_join_all(requests).await?;
2091
2092 let mut symbols = Vec::new();
2093 if let Some(this) = this.upgrade(&cx) {
2094 this.read_with(&cx, |this, cx| {
2095 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2096 language_servers.into_values().zip(responses)
2097 {
2098 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2099 |lsp_symbol| {
2100 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2101 let mut worktree_id = source_worktree_id;
2102 let path;
2103 if let Some((worktree, rel_path)) =
2104 this.find_local_worktree(&abs_path, cx)
2105 {
2106 worktree_id = worktree.read(cx).id();
2107 path = rel_path;
2108 } else {
2109 path = relativize_path(&worktree_abs_path, &abs_path);
2110 }
2111
2112 let label = language
2113 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2114 .unwrap_or_else(|| {
2115 CodeLabel::plain(lsp_symbol.name.clone(), None)
2116 });
2117 let signature = this.symbol_signature(worktree_id, &path);
2118
2119 Some(Symbol {
2120 source_worktree_id,
2121 worktree_id,
2122 language_name: language.name().to_string(),
2123 name: lsp_symbol.name,
2124 kind: lsp_symbol.kind,
2125 label,
2126 path,
2127 range: range_from_lsp(lsp_symbol.location.range),
2128 signature,
2129 })
2130 },
2131 ));
2132 }
2133 })
2134 }
2135
2136 Ok(symbols)
2137 })
2138 } else if let Some(project_id) = self.remote_id() {
2139 let request = self.client.request(proto::GetProjectSymbols {
2140 project_id,
2141 query: query.to_string(),
2142 });
2143 cx.spawn_weak(|this, cx| async move {
2144 let response = request.await?;
2145 let mut symbols = Vec::new();
2146 if let Some(this) = this.upgrade(&cx) {
2147 this.read_with(&cx, |this, _| {
2148 symbols.extend(
2149 response
2150 .symbols
2151 .into_iter()
2152 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2153 );
2154 })
2155 }
2156 Ok(symbols)
2157 })
2158 } else {
2159 Task::ready(Ok(Default::default()))
2160 }
2161 }
2162
2163 pub fn open_buffer_for_symbol(
2164 &mut self,
2165 symbol: &Symbol,
2166 cx: &mut ModelContext<Self>,
2167 ) -> Task<Result<ModelHandle<Buffer>>> {
2168 if self.is_local() {
2169 let language_server = if let Some(server) = self.language_servers.get(&(
2170 symbol.source_worktree_id,
2171 Arc::from(symbol.language_name.as_str()),
2172 )) {
2173 server.clone()
2174 } else {
2175 return Task::ready(Err(anyhow!(
2176 "language server for worktree and language not found"
2177 )));
2178 };
2179
2180 let worktree_abs_path = if let Some(worktree_abs_path) = self
2181 .worktree_for_id(symbol.worktree_id, cx)
2182 .and_then(|worktree| worktree.read(cx).as_local())
2183 .map(|local_worktree| local_worktree.abs_path())
2184 {
2185 worktree_abs_path
2186 } else {
2187 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2188 };
2189 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2190 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2191 uri
2192 } else {
2193 return Task::ready(Err(anyhow!("invalid symbol path")));
2194 };
2195
2196 self.open_local_buffer_via_lsp(
2197 symbol_uri,
2198 Arc::from(symbol.language_name.as_str()),
2199 language_server,
2200 cx,
2201 )
2202 } else if let Some(project_id) = self.remote_id() {
2203 let request = self.client.request(proto::OpenBufferForSymbol {
2204 project_id,
2205 symbol: Some(serialize_symbol(symbol)),
2206 });
2207 cx.spawn(|this, mut cx| async move {
2208 let response = request.await?;
2209 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2210 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2211 .await
2212 })
2213 } else {
2214 Task::ready(Err(anyhow!("project does not have a remote id")))
2215 }
2216 }
2217
2218 pub fn completions<T: ToPointUtf16>(
2219 &self,
2220 source_buffer_handle: &ModelHandle<Buffer>,
2221 position: T,
2222 cx: &mut ModelContext<Self>,
2223 ) -> Task<Result<Vec<Completion>>> {
2224 let source_buffer_handle = source_buffer_handle.clone();
2225 let source_buffer = source_buffer_handle.read(cx);
2226 let buffer_id = source_buffer.remote_id();
2227 let language = source_buffer.language().cloned();
2228 let worktree;
2229 let buffer_abs_path;
2230 if let Some(file) = File::from_dyn(source_buffer.file()) {
2231 worktree = file.worktree.clone();
2232 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2233 } else {
2234 return Task::ready(Ok(Default::default()));
2235 };
2236
2237 let position = position.to_point_utf16(source_buffer);
2238 let anchor = source_buffer.anchor_after(position);
2239
2240 if worktree.read(cx).as_local().is_some() {
2241 let buffer_abs_path = buffer_abs_path.unwrap();
2242 let lang_server =
2243 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2244 server.clone()
2245 } else {
2246 return Task::ready(Ok(Default::default()));
2247 };
2248
2249 cx.spawn(|_, cx| async move {
2250 let completions = lang_server
2251 .request::<lsp::request::Completion>(lsp::CompletionParams {
2252 text_document_position: lsp::TextDocumentPositionParams::new(
2253 lsp::TextDocumentIdentifier::new(
2254 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2255 ),
2256 position.to_lsp_position(),
2257 ),
2258 context: Default::default(),
2259 work_done_progress_params: Default::default(),
2260 partial_result_params: Default::default(),
2261 })
2262 .await
2263 .context("lsp completion request failed")?;
2264
2265 let completions = if let Some(completions) = completions {
2266 match completions {
2267 lsp::CompletionResponse::Array(completions) => completions,
2268 lsp::CompletionResponse::List(list) => list.items,
2269 }
2270 } else {
2271 Default::default()
2272 };
2273
2274 source_buffer_handle.read_with(&cx, |this, _| {
2275 Ok(completions
2276 .into_iter()
2277 .filter_map(|lsp_completion| {
2278 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2279 lsp::CompletionTextEdit::Edit(edit) => {
2280 (range_from_lsp(edit.range), edit.new_text.clone())
2281 }
2282 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2283 log::info!("unsupported insert/replace completion");
2284 return None;
2285 }
2286 };
2287
2288 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2289 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2290 if clipped_start == old_range.start && clipped_end == old_range.end {
2291 Some(Completion {
2292 old_range: this.anchor_before(old_range.start)
2293 ..this.anchor_after(old_range.end),
2294 new_text,
2295 label: language
2296 .as_ref()
2297 .and_then(|l| l.label_for_completion(&lsp_completion))
2298 .unwrap_or_else(|| {
2299 CodeLabel::plain(
2300 lsp_completion.label.clone(),
2301 lsp_completion.filter_text.as_deref(),
2302 )
2303 }),
2304 lsp_completion,
2305 })
2306 } else {
2307 None
2308 }
2309 })
2310 .collect())
2311 })
2312 })
2313 } else if let Some(project_id) = self.remote_id() {
2314 let rpc = self.client.clone();
2315 let message = proto::GetCompletions {
2316 project_id,
2317 buffer_id,
2318 position: Some(language::proto::serialize_anchor(&anchor)),
2319 version: serialize_version(&source_buffer.version()),
2320 };
2321 cx.spawn_weak(|_, mut cx| async move {
2322 let response = rpc.request(message).await?;
2323
2324 source_buffer_handle
2325 .update(&mut cx, |buffer, _| {
2326 buffer.wait_for_version(deserialize_version(response.version))
2327 })
2328 .await;
2329
2330 response
2331 .completions
2332 .into_iter()
2333 .map(|completion| {
2334 language::proto::deserialize_completion(completion, language.as_ref())
2335 })
2336 .collect()
2337 })
2338 } else {
2339 Task::ready(Ok(Default::default()))
2340 }
2341 }
2342
2343 pub fn apply_additional_edits_for_completion(
2344 &self,
2345 buffer_handle: ModelHandle<Buffer>,
2346 completion: Completion,
2347 push_to_history: bool,
2348 cx: &mut ModelContext<Self>,
2349 ) -> Task<Result<Option<Transaction>>> {
2350 let buffer = buffer_handle.read(cx);
2351 let buffer_id = buffer.remote_id();
2352
2353 if self.is_local() {
2354 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2355 server.clone()
2356 } else {
2357 return Task::ready(Ok(Default::default()));
2358 };
2359
2360 cx.spawn(|this, mut cx| async move {
2361 let resolved_completion = lang_server
2362 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2363 .await?;
2364 if let Some(edits) = resolved_completion.additional_text_edits {
2365 let edits = this
2366 .update(&mut cx, |this, cx| {
2367 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2368 })
2369 .await?;
2370 buffer_handle.update(&mut cx, |buffer, cx| {
2371 buffer.finalize_last_transaction();
2372 buffer.start_transaction();
2373 for (range, text) in edits {
2374 buffer.edit([range], text, cx);
2375 }
2376 let transaction = if buffer.end_transaction(cx).is_some() {
2377 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2378 if !push_to_history {
2379 buffer.forget_transaction(transaction.id);
2380 }
2381 Some(transaction)
2382 } else {
2383 None
2384 };
2385 Ok(transaction)
2386 })
2387 } else {
2388 Ok(None)
2389 }
2390 })
2391 } else if let Some(project_id) = self.remote_id() {
2392 let client = self.client.clone();
2393 cx.spawn(|_, mut cx| async move {
2394 let response = client
2395 .request(proto::ApplyCompletionAdditionalEdits {
2396 project_id,
2397 buffer_id,
2398 completion: Some(language::proto::serialize_completion(&completion)),
2399 })
2400 .await?;
2401
2402 if let Some(transaction) = response.transaction {
2403 let transaction = language::proto::deserialize_transaction(transaction)?;
2404 buffer_handle
2405 .update(&mut cx, |buffer, _| {
2406 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2407 })
2408 .await;
2409 if push_to_history {
2410 buffer_handle.update(&mut cx, |buffer, _| {
2411 buffer.push_transaction(transaction.clone(), Instant::now());
2412 });
2413 }
2414 Ok(Some(transaction))
2415 } else {
2416 Ok(None)
2417 }
2418 })
2419 } else {
2420 Task::ready(Err(anyhow!("project does not have a remote id")))
2421 }
2422 }
2423
2424 pub fn code_actions<T: ToOffset>(
2425 &self,
2426 buffer_handle: &ModelHandle<Buffer>,
2427 range: Range<T>,
2428 cx: &mut ModelContext<Self>,
2429 ) -> Task<Result<Vec<CodeAction>>> {
2430 let buffer_handle = buffer_handle.clone();
2431 let buffer = buffer_handle.read(cx);
2432 let buffer_id = buffer.remote_id();
2433 let worktree;
2434 let buffer_abs_path;
2435 if let Some(file) = File::from_dyn(buffer.file()) {
2436 worktree = file.worktree.clone();
2437 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2438 } else {
2439 return Task::ready(Ok(Default::default()));
2440 };
2441 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2442
2443 if worktree.read(cx).as_local().is_some() {
2444 let buffer_abs_path = buffer_abs_path.unwrap();
2445 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2446 server.clone()
2447 } else {
2448 return Task::ready(Ok(Default::default()));
2449 };
2450
2451 let lsp_range = lsp::Range::new(
2452 range.start.to_point_utf16(buffer).to_lsp_position(),
2453 range.end.to_point_utf16(buffer).to_lsp_position(),
2454 );
2455 cx.foreground().spawn(async move {
2456 if !lang_server.capabilities().code_action_provider.is_some() {
2457 return Ok(Default::default());
2458 }
2459
2460 Ok(lang_server
2461 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2462 text_document: lsp::TextDocumentIdentifier::new(
2463 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2464 ),
2465 range: lsp_range,
2466 work_done_progress_params: Default::default(),
2467 partial_result_params: Default::default(),
2468 context: lsp::CodeActionContext {
2469 diagnostics: Default::default(),
2470 only: Some(vec![
2471 lsp::CodeActionKind::QUICKFIX,
2472 lsp::CodeActionKind::REFACTOR,
2473 lsp::CodeActionKind::REFACTOR_EXTRACT,
2474 ]),
2475 },
2476 })
2477 .await?
2478 .unwrap_or_default()
2479 .into_iter()
2480 .filter_map(|entry| {
2481 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2482 Some(CodeAction {
2483 range: range.clone(),
2484 lsp_action,
2485 })
2486 } else {
2487 None
2488 }
2489 })
2490 .collect())
2491 })
2492 } else if let Some(project_id) = self.remote_id() {
2493 let rpc = self.client.clone();
2494 let version = buffer.version();
2495 cx.spawn_weak(|_, mut cx| async move {
2496 let response = rpc
2497 .request(proto::GetCodeActions {
2498 project_id,
2499 buffer_id,
2500 start: Some(language::proto::serialize_anchor(&range.start)),
2501 end: Some(language::proto::serialize_anchor(&range.end)),
2502 version: serialize_version(&version),
2503 })
2504 .await?;
2505
2506 buffer_handle
2507 .update(&mut cx, |buffer, _| {
2508 buffer.wait_for_version(deserialize_version(response.version))
2509 })
2510 .await;
2511
2512 response
2513 .actions
2514 .into_iter()
2515 .map(language::proto::deserialize_code_action)
2516 .collect()
2517 })
2518 } else {
2519 Task::ready(Ok(Default::default()))
2520 }
2521 }
2522
2523 pub fn apply_code_action(
2524 &self,
2525 buffer_handle: ModelHandle<Buffer>,
2526 mut action: CodeAction,
2527 push_to_history: bool,
2528 cx: &mut ModelContext<Self>,
2529 ) -> Task<Result<ProjectTransaction>> {
2530 if self.is_local() {
2531 let buffer = buffer_handle.read(cx);
2532 let lang_name = if let Some(lang) = buffer.language() {
2533 lang.name()
2534 } else {
2535 return Task::ready(Ok(Default::default()));
2536 };
2537 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2538 server.clone()
2539 } else {
2540 return Task::ready(Ok(Default::default()));
2541 };
2542 let range = action.range.to_point_utf16(buffer);
2543
2544 cx.spawn(|this, mut cx| async move {
2545 if let Some(lsp_range) = action
2546 .lsp_action
2547 .data
2548 .as_mut()
2549 .and_then(|d| d.get_mut("codeActionParams"))
2550 .and_then(|d| d.get_mut("range"))
2551 {
2552 *lsp_range = serde_json::to_value(&lsp::Range::new(
2553 range.start.to_lsp_position(),
2554 range.end.to_lsp_position(),
2555 ))
2556 .unwrap();
2557 action.lsp_action = lang_server
2558 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2559 .await?;
2560 } else {
2561 let actions = this
2562 .update(&mut cx, |this, cx| {
2563 this.code_actions(&buffer_handle, action.range, cx)
2564 })
2565 .await?;
2566 action.lsp_action = actions
2567 .into_iter()
2568 .find(|a| a.lsp_action.title == action.lsp_action.title)
2569 .ok_or_else(|| anyhow!("code action is outdated"))?
2570 .lsp_action;
2571 }
2572
2573 if let Some(edit) = action.lsp_action.edit {
2574 Self::deserialize_workspace_edit(
2575 this,
2576 edit,
2577 push_to_history,
2578 lang_name,
2579 lang_server,
2580 &mut cx,
2581 )
2582 .await
2583 } else {
2584 Ok(ProjectTransaction::default())
2585 }
2586 })
2587 } else if let Some(project_id) = self.remote_id() {
2588 let client = self.client.clone();
2589 let request = proto::ApplyCodeAction {
2590 project_id,
2591 buffer_id: buffer_handle.read(cx).remote_id(),
2592 action: Some(language::proto::serialize_code_action(&action)),
2593 };
2594 cx.spawn(|this, mut cx| async move {
2595 let response = client
2596 .request(request)
2597 .await?
2598 .transaction
2599 .ok_or_else(|| anyhow!("missing transaction"))?;
2600 this.update(&mut cx, |this, cx| {
2601 this.deserialize_project_transaction(response, push_to_history, cx)
2602 })
2603 .await
2604 })
2605 } else {
2606 Task::ready(Err(anyhow!("project does not have a remote id")))
2607 }
2608 }
2609
2610 async fn deserialize_workspace_edit(
2611 this: ModelHandle<Self>,
2612 edit: lsp::WorkspaceEdit,
2613 push_to_history: bool,
2614 language_name: Arc<str>,
2615 language_server: Arc<LanguageServer>,
2616 cx: &mut AsyncAppContext,
2617 ) -> Result<ProjectTransaction> {
2618 let fs = this.read_with(cx, |this, _| this.fs.clone());
2619 let mut operations = Vec::new();
2620 if let Some(document_changes) = edit.document_changes {
2621 match document_changes {
2622 lsp::DocumentChanges::Edits(edits) => {
2623 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2624 }
2625 lsp::DocumentChanges::Operations(ops) => operations = ops,
2626 }
2627 } else if let Some(changes) = edit.changes {
2628 operations.extend(changes.into_iter().map(|(uri, edits)| {
2629 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2630 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2631 uri,
2632 version: None,
2633 },
2634 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2635 })
2636 }));
2637 }
2638
2639 let mut project_transaction = ProjectTransaction::default();
2640 for operation in operations {
2641 match operation {
2642 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2643 let abs_path = op
2644 .uri
2645 .to_file_path()
2646 .map_err(|_| anyhow!("can't convert URI to path"))?;
2647
2648 if let Some(parent_path) = abs_path.parent() {
2649 fs.create_dir(parent_path).await?;
2650 }
2651 if abs_path.ends_with("/") {
2652 fs.create_dir(&abs_path).await?;
2653 } else {
2654 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2655 .await?;
2656 }
2657 }
2658 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2659 let source_abs_path = op
2660 .old_uri
2661 .to_file_path()
2662 .map_err(|_| anyhow!("can't convert URI to path"))?;
2663 let target_abs_path = op
2664 .new_uri
2665 .to_file_path()
2666 .map_err(|_| anyhow!("can't convert URI to path"))?;
2667 fs.rename(
2668 &source_abs_path,
2669 &target_abs_path,
2670 op.options.map(Into::into).unwrap_or_default(),
2671 )
2672 .await?;
2673 }
2674 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2675 let abs_path = op
2676 .uri
2677 .to_file_path()
2678 .map_err(|_| anyhow!("can't convert URI to path"))?;
2679 let options = op.options.map(Into::into).unwrap_or_default();
2680 if abs_path.ends_with("/") {
2681 fs.remove_dir(&abs_path, options).await?;
2682 } else {
2683 fs.remove_file(&abs_path, options).await?;
2684 }
2685 }
2686 lsp::DocumentChangeOperation::Edit(op) => {
2687 let buffer_to_edit = this
2688 .update(cx, |this, cx| {
2689 this.open_local_buffer_via_lsp(
2690 op.text_document.uri,
2691 language_name.clone(),
2692 language_server.clone(),
2693 cx,
2694 )
2695 })
2696 .await?;
2697
2698 let edits = this
2699 .update(cx, |this, cx| {
2700 let edits = op.edits.into_iter().map(|edit| match edit {
2701 lsp::OneOf::Left(edit) => edit,
2702 lsp::OneOf::Right(edit) => edit.text_edit,
2703 });
2704 this.edits_from_lsp(
2705 &buffer_to_edit,
2706 edits,
2707 op.text_document.version,
2708 cx,
2709 )
2710 })
2711 .await?;
2712
2713 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2714 buffer.finalize_last_transaction();
2715 buffer.start_transaction();
2716 for (range, text) in edits {
2717 buffer.edit([range], text, cx);
2718 }
2719 let transaction = if buffer.end_transaction(cx).is_some() {
2720 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2721 if !push_to_history {
2722 buffer.forget_transaction(transaction.id);
2723 }
2724 Some(transaction)
2725 } else {
2726 None
2727 };
2728
2729 transaction
2730 });
2731 if let Some(transaction) = transaction {
2732 project_transaction.0.insert(buffer_to_edit, transaction);
2733 }
2734 }
2735 }
2736 }
2737
2738 Ok(project_transaction)
2739 }
2740
2741 pub fn prepare_rename<T: ToPointUtf16>(
2742 &self,
2743 buffer: ModelHandle<Buffer>,
2744 position: T,
2745 cx: &mut ModelContext<Self>,
2746 ) -> Task<Result<Option<Range<Anchor>>>> {
2747 let position = position.to_point_utf16(buffer.read(cx));
2748 self.request_lsp(buffer, PrepareRename { position }, cx)
2749 }
2750
2751 pub fn perform_rename<T: ToPointUtf16>(
2752 &self,
2753 buffer: ModelHandle<Buffer>,
2754 position: T,
2755 new_name: String,
2756 push_to_history: bool,
2757 cx: &mut ModelContext<Self>,
2758 ) -> Task<Result<ProjectTransaction>> {
2759 let position = position.to_point_utf16(buffer.read(cx));
2760 self.request_lsp(
2761 buffer,
2762 PerformRename {
2763 position,
2764 new_name,
2765 push_to_history,
2766 },
2767 cx,
2768 )
2769 }
2770
2771 pub fn search(
2772 &self,
2773 query: SearchQuery,
2774 cx: &mut ModelContext<Self>,
2775 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2776 if self.is_local() {
2777 let snapshots = self
2778 .visible_worktrees(cx)
2779 .filter_map(|tree| {
2780 let tree = tree.read(cx).as_local()?;
2781 Some(tree.snapshot())
2782 })
2783 .collect::<Vec<_>>();
2784
2785 let background = cx.background().clone();
2786 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2787 if path_count == 0 {
2788 return Task::ready(Ok(Default::default()));
2789 }
2790 let workers = background.num_cpus().min(path_count);
2791 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2792 cx.background()
2793 .spawn({
2794 let fs = self.fs.clone();
2795 let background = cx.background().clone();
2796 let query = query.clone();
2797 async move {
2798 let fs = &fs;
2799 let query = &query;
2800 let matching_paths_tx = &matching_paths_tx;
2801 let paths_per_worker = (path_count + workers - 1) / workers;
2802 let snapshots = &snapshots;
2803 background
2804 .scoped(|scope| {
2805 for worker_ix in 0..workers {
2806 let worker_start_ix = worker_ix * paths_per_worker;
2807 let worker_end_ix = worker_start_ix + paths_per_worker;
2808 scope.spawn(async move {
2809 let mut snapshot_start_ix = 0;
2810 let mut abs_path = PathBuf::new();
2811 for snapshot in snapshots {
2812 let snapshot_end_ix =
2813 snapshot_start_ix + snapshot.visible_file_count();
2814 if worker_end_ix <= snapshot_start_ix {
2815 break;
2816 } else if worker_start_ix > snapshot_end_ix {
2817 snapshot_start_ix = snapshot_end_ix;
2818 continue;
2819 } else {
2820 let start_in_snapshot = worker_start_ix
2821 .saturating_sub(snapshot_start_ix);
2822 let end_in_snapshot =
2823 cmp::min(worker_end_ix, snapshot_end_ix)
2824 - snapshot_start_ix;
2825
2826 for entry in snapshot
2827 .files(false, start_in_snapshot)
2828 .take(end_in_snapshot - start_in_snapshot)
2829 {
2830 if matching_paths_tx.is_closed() {
2831 break;
2832 }
2833
2834 abs_path.clear();
2835 abs_path.push(&snapshot.abs_path());
2836 abs_path.push(&entry.path);
2837 let matches = if let Some(file) =
2838 fs.open_sync(&abs_path).await.log_err()
2839 {
2840 query.detect(file).unwrap_or(false)
2841 } else {
2842 false
2843 };
2844
2845 if matches {
2846 let project_path =
2847 (snapshot.id(), entry.path.clone());
2848 if matching_paths_tx
2849 .send(project_path)
2850 .await
2851 .is_err()
2852 {
2853 break;
2854 }
2855 }
2856 }
2857
2858 snapshot_start_ix = snapshot_end_ix;
2859 }
2860 }
2861 });
2862 }
2863 })
2864 .await;
2865 }
2866 })
2867 .detach();
2868
2869 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2870 let open_buffers = self
2871 .opened_buffers
2872 .values()
2873 .filter_map(|b| b.upgrade(cx))
2874 .collect::<HashSet<_>>();
2875 cx.spawn(|this, cx| async move {
2876 for buffer in &open_buffers {
2877 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2878 buffers_tx.send((buffer.clone(), snapshot)).await?;
2879 }
2880
2881 let open_buffers = Rc::new(RefCell::new(open_buffers));
2882 while let Some(project_path) = matching_paths_rx.next().await {
2883 if buffers_tx.is_closed() {
2884 break;
2885 }
2886
2887 let this = this.clone();
2888 let open_buffers = open_buffers.clone();
2889 let buffers_tx = buffers_tx.clone();
2890 cx.spawn(|mut cx| async move {
2891 if let Some(buffer) = this
2892 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2893 .await
2894 .log_err()
2895 {
2896 if open_buffers.borrow_mut().insert(buffer.clone()) {
2897 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2898 buffers_tx.send((buffer, snapshot)).await?;
2899 }
2900 }
2901
2902 Ok::<_, anyhow::Error>(())
2903 })
2904 .detach();
2905 }
2906
2907 Ok::<_, anyhow::Error>(())
2908 })
2909 .detach_and_log_err(cx);
2910
2911 let background = cx.background().clone();
2912 cx.background().spawn(async move {
2913 let query = &query;
2914 let mut matched_buffers = Vec::new();
2915 for _ in 0..workers {
2916 matched_buffers.push(HashMap::default());
2917 }
2918 background
2919 .scoped(|scope| {
2920 for worker_matched_buffers in matched_buffers.iter_mut() {
2921 let mut buffers_rx = buffers_rx.clone();
2922 scope.spawn(async move {
2923 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2924 let buffer_matches = query
2925 .search(snapshot.as_rope())
2926 .await
2927 .iter()
2928 .map(|range| {
2929 snapshot.anchor_before(range.start)
2930 ..snapshot.anchor_after(range.end)
2931 })
2932 .collect::<Vec<_>>();
2933 if !buffer_matches.is_empty() {
2934 worker_matched_buffers
2935 .insert(buffer.clone(), buffer_matches);
2936 }
2937 }
2938 });
2939 }
2940 })
2941 .await;
2942 Ok(matched_buffers.into_iter().flatten().collect())
2943 })
2944 } else if let Some(project_id) = self.remote_id() {
2945 let request = self.client.request(query.to_proto(project_id));
2946 cx.spawn(|this, mut cx| async move {
2947 let response = request.await?;
2948 let mut result = HashMap::default();
2949 for location in response.locations {
2950 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2951 let target_buffer = this
2952 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2953 .await?;
2954 let start = location
2955 .start
2956 .and_then(deserialize_anchor)
2957 .ok_or_else(|| anyhow!("missing target start"))?;
2958 let end = location
2959 .end
2960 .and_then(deserialize_anchor)
2961 .ok_or_else(|| anyhow!("missing target end"))?;
2962 result
2963 .entry(target_buffer)
2964 .or_insert(Vec::new())
2965 .push(start..end)
2966 }
2967 Ok(result)
2968 })
2969 } else {
2970 Task::ready(Ok(Default::default()))
2971 }
2972 }
2973
2974 fn request_lsp<R: LspCommand>(
2975 &self,
2976 buffer_handle: ModelHandle<Buffer>,
2977 request: R,
2978 cx: &mut ModelContext<Self>,
2979 ) -> Task<Result<R::Response>>
2980 where
2981 <R::LspRequest as lsp::request::Request>::Result: Send,
2982 {
2983 let buffer = buffer_handle.read(cx);
2984 if self.is_local() {
2985 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2986 if let Some((file, language_server)) =
2987 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2988 {
2989 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2990 return cx.spawn(|this, cx| async move {
2991 if !request.check_capabilities(&language_server.capabilities()) {
2992 return Ok(Default::default());
2993 }
2994
2995 let response = language_server
2996 .request::<R::LspRequest>(lsp_params)
2997 .await
2998 .context("lsp request failed")?;
2999 request
3000 .response_from_lsp(response, this, buffer_handle, cx)
3001 .await
3002 });
3003 }
3004 } else if let Some(project_id) = self.remote_id() {
3005 let rpc = self.client.clone();
3006 let message = request.to_proto(project_id, buffer);
3007 return cx.spawn(|this, cx| async move {
3008 let response = rpc.request(message).await?;
3009 request
3010 .response_from_proto(response, this, buffer_handle, cx)
3011 .await
3012 });
3013 }
3014 Task::ready(Ok(Default::default()))
3015 }
3016
3017 pub fn find_or_create_local_worktree(
3018 &mut self,
3019 abs_path: impl AsRef<Path>,
3020 visible: bool,
3021 cx: &mut ModelContext<Self>,
3022 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3023 let abs_path = abs_path.as_ref();
3024 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3025 Task::ready(Ok((tree.clone(), relative_path.into())))
3026 } else {
3027 let worktree = self.create_local_worktree(abs_path, visible, cx);
3028 cx.foreground()
3029 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3030 }
3031 }
3032
3033 pub fn find_local_worktree(
3034 &self,
3035 abs_path: &Path,
3036 cx: &AppContext,
3037 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3038 for tree in self.worktrees(cx) {
3039 if let Some(relative_path) = tree
3040 .read(cx)
3041 .as_local()
3042 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3043 {
3044 return Some((tree.clone(), relative_path.into()));
3045 }
3046 }
3047 None
3048 }
3049
3050 pub fn is_shared(&self) -> bool {
3051 match &self.client_state {
3052 ProjectClientState::Local { is_shared, .. } => *is_shared,
3053 ProjectClientState::Remote { .. } => false,
3054 }
3055 }
3056
3057 fn create_local_worktree(
3058 &mut self,
3059 abs_path: impl AsRef<Path>,
3060 visible: bool,
3061 cx: &mut ModelContext<Self>,
3062 ) -> Task<Result<ModelHandle<Worktree>>> {
3063 let fs = self.fs.clone();
3064 let client = self.client.clone();
3065 let next_entry_id = self.next_entry_id.clone();
3066 let path: Arc<Path> = abs_path.as_ref().into();
3067 let task = self
3068 .loading_local_worktrees
3069 .entry(path.clone())
3070 .or_insert_with(|| {
3071 cx.spawn(|project, mut cx| {
3072 async move {
3073 let worktree = Worktree::local(
3074 client.clone(),
3075 path.clone(),
3076 visible,
3077 fs,
3078 next_entry_id,
3079 &mut cx,
3080 )
3081 .await;
3082 project.update(&mut cx, |project, _| {
3083 project.loading_local_worktrees.remove(&path);
3084 });
3085 let worktree = worktree?;
3086
3087 let (remote_project_id, is_shared) =
3088 project.update(&mut cx, |project, cx| {
3089 project.add_worktree(&worktree, cx);
3090 (project.remote_id(), project.is_shared())
3091 });
3092
3093 if let Some(project_id) = remote_project_id {
3094 if is_shared {
3095 worktree
3096 .update(&mut cx, |worktree, cx| {
3097 worktree.as_local_mut().unwrap().share(project_id, cx)
3098 })
3099 .await?;
3100 } else {
3101 worktree
3102 .update(&mut cx, |worktree, cx| {
3103 worktree.as_local_mut().unwrap().register(project_id, cx)
3104 })
3105 .await?;
3106 }
3107 }
3108
3109 Ok(worktree)
3110 }
3111 .map_err(|err| Arc::new(err))
3112 })
3113 .shared()
3114 })
3115 .clone();
3116 cx.foreground().spawn(async move {
3117 match task.await {
3118 Ok(worktree) => Ok(worktree),
3119 Err(err) => Err(anyhow!("{}", err)),
3120 }
3121 })
3122 }
3123
3124 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3125 self.worktrees.retain(|worktree| {
3126 worktree
3127 .upgrade(cx)
3128 .map_or(false, |w| w.read(cx).id() != id)
3129 });
3130 cx.notify();
3131 }
3132
3133 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3134 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3135 if worktree.read(cx).is_local() {
3136 cx.subscribe(&worktree, |this, worktree, _, cx| {
3137 this.update_local_worktree_buffers(worktree, cx);
3138 })
3139 .detach();
3140 }
3141
3142 let push_strong_handle = {
3143 let worktree = worktree.read(cx);
3144 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3145 };
3146 if push_strong_handle {
3147 self.worktrees
3148 .push(WorktreeHandle::Strong(worktree.clone()));
3149 } else {
3150 cx.observe_release(&worktree, |this, _, cx| {
3151 this.worktrees
3152 .retain(|worktree| worktree.upgrade(cx).is_some());
3153 cx.notify();
3154 })
3155 .detach();
3156 self.worktrees
3157 .push(WorktreeHandle::Weak(worktree.downgrade()));
3158 }
3159 cx.notify();
3160 }
3161
3162 fn update_local_worktree_buffers(
3163 &mut self,
3164 worktree_handle: ModelHandle<Worktree>,
3165 cx: &mut ModelContext<Self>,
3166 ) {
3167 let snapshot = worktree_handle.read(cx).snapshot();
3168 let mut buffers_to_delete = Vec::new();
3169 for (buffer_id, buffer) in &self.opened_buffers {
3170 if let Some(buffer) = buffer.upgrade(cx) {
3171 buffer.update(cx, |buffer, cx| {
3172 if let Some(old_file) = File::from_dyn(buffer.file()) {
3173 if old_file.worktree != worktree_handle {
3174 return;
3175 }
3176
3177 let new_file = if let Some(entry) = old_file
3178 .entry_id
3179 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3180 {
3181 File {
3182 is_local: true,
3183 entry_id: Some(entry.id),
3184 mtime: entry.mtime,
3185 path: entry.path.clone(),
3186 worktree: worktree_handle.clone(),
3187 }
3188 } else if let Some(entry) =
3189 snapshot.entry_for_path(old_file.path().as_ref())
3190 {
3191 File {
3192 is_local: true,
3193 entry_id: Some(entry.id),
3194 mtime: entry.mtime,
3195 path: entry.path.clone(),
3196 worktree: worktree_handle.clone(),
3197 }
3198 } else {
3199 File {
3200 is_local: true,
3201 entry_id: None,
3202 path: old_file.path().clone(),
3203 mtime: old_file.mtime(),
3204 worktree: worktree_handle.clone(),
3205 }
3206 };
3207
3208 if let Some(project_id) = self.remote_id() {
3209 self.client
3210 .send(proto::UpdateBufferFile {
3211 project_id,
3212 buffer_id: *buffer_id as u64,
3213 file: Some(new_file.to_proto()),
3214 })
3215 .log_err();
3216 }
3217 buffer.file_updated(Box::new(new_file), cx).detach();
3218 }
3219 });
3220 } else {
3221 buffers_to_delete.push(*buffer_id);
3222 }
3223 }
3224
3225 for buffer_id in buffers_to_delete {
3226 self.opened_buffers.remove(&buffer_id);
3227 }
3228 }
3229
3230 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3231 let new_active_entry = entry.and_then(|project_path| {
3232 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3233 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3234 Some(entry.id)
3235 });
3236 if new_active_entry != self.active_entry {
3237 self.active_entry = new_active_entry;
3238 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3239 }
3240 }
3241
3242 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3243 self.language_servers_with_diagnostics_running > 0
3244 }
3245
3246 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3247 let mut summary = DiagnosticSummary::default();
3248 for (_, path_summary) in self.diagnostic_summaries(cx) {
3249 summary.error_count += path_summary.error_count;
3250 summary.warning_count += path_summary.warning_count;
3251 summary.info_count += path_summary.info_count;
3252 summary.hint_count += path_summary.hint_count;
3253 }
3254 summary
3255 }
3256
3257 pub fn diagnostic_summaries<'a>(
3258 &'a self,
3259 cx: &'a AppContext,
3260 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3261 self.worktrees(cx).flat_map(move |worktree| {
3262 let worktree = worktree.read(cx);
3263 let worktree_id = worktree.id();
3264 worktree
3265 .diagnostic_summaries()
3266 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3267 })
3268 }
3269
3270 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3271 self.language_servers_with_diagnostics_running += 1;
3272 if self.language_servers_with_diagnostics_running == 1 {
3273 cx.emit(Event::DiskBasedDiagnosticsStarted);
3274 }
3275 }
3276
3277 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3278 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3279 self.language_servers_with_diagnostics_running -= 1;
3280 if self.language_servers_with_diagnostics_running == 0 {
3281 cx.emit(Event::DiskBasedDiagnosticsFinished);
3282 }
3283 }
3284
3285 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3286 self.active_entry
3287 }
3288
3289 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3290 self.worktree_for_id(path.worktree_id, cx)?
3291 .read(cx)
3292 .entry_for_path(&path.path)
3293 .map(|entry| entry.id)
3294 }
3295
3296 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3297 let worktree = self.worktree_for_entry(entry_id, cx)?;
3298 let worktree = worktree.read(cx);
3299 let worktree_id = worktree.id();
3300 let path = worktree.entry_for_id(entry_id)?.path.clone();
3301 Some(ProjectPath { worktree_id, path })
3302 }
3303
3304 // RPC message handlers
3305
3306 async fn handle_unshare_project(
3307 this: ModelHandle<Self>,
3308 _: TypedEnvelope<proto::UnshareProject>,
3309 _: Arc<Client>,
3310 mut cx: AsyncAppContext,
3311 ) -> Result<()> {
3312 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3313 Ok(())
3314 }
3315
3316 async fn handle_add_collaborator(
3317 this: ModelHandle<Self>,
3318 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3319 _: Arc<Client>,
3320 mut cx: AsyncAppContext,
3321 ) -> Result<()> {
3322 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3323 let collaborator = envelope
3324 .payload
3325 .collaborator
3326 .take()
3327 .ok_or_else(|| anyhow!("empty collaborator"))?;
3328
3329 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3330 this.update(&mut cx, |this, cx| {
3331 this.collaborators
3332 .insert(collaborator.peer_id, collaborator);
3333 cx.notify();
3334 });
3335
3336 Ok(())
3337 }
3338
3339 async fn handle_remove_collaborator(
3340 this: ModelHandle<Self>,
3341 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3342 _: Arc<Client>,
3343 mut cx: AsyncAppContext,
3344 ) -> Result<()> {
3345 this.update(&mut cx, |this, cx| {
3346 let peer_id = PeerId(envelope.payload.peer_id);
3347 let replica_id = this
3348 .collaborators
3349 .remove(&peer_id)
3350 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3351 .replica_id;
3352 for (_, buffer) in &this.opened_buffers {
3353 if let Some(buffer) = buffer.upgrade(cx) {
3354 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3355 }
3356 }
3357 cx.emit(Event::CollaboratorLeft(peer_id));
3358 cx.notify();
3359 Ok(())
3360 })
3361 }
3362
3363 async fn handle_register_worktree(
3364 this: ModelHandle<Self>,
3365 envelope: TypedEnvelope<proto::RegisterWorktree>,
3366 client: Arc<Client>,
3367 mut cx: AsyncAppContext,
3368 ) -> Result<()> {
3369 this.update(&mut cx, |this, cx| {
3370 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3371 let replica_id = this.replica_id();
3372 let worktree = proto::Worktree {
3373 id: envelope.payload.worktree_id,
3374 root_name: envelope.payload.root_name,
3375 entries: Default::default(),
3376 diagnostic_summaries: Default::default(),
3377 visible: envelope.payload.visible,
3378 };
3379 let (worktree, load_task) =
3380 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3381 this.add_worktree(&worktree, cx);
3382 load_task.detach();
3383 Ok(())
3384 })
3385 }
3386
3387 async fn handle_unregister_worktree(
3388 this: ModelHandle<Self>,
3389 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3390 _: Arc<Client>,
3391 mut cx: AsyncAppContext,
3392 ) -> Result<()> {
3393 this.update(&mut cx, |this, cx| {
3394 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3395 this.remove_worktree(worktree_id, cx);
3396 Ok(())
3397 })
3398 }
3399
3400 async fn handle_update_worktree(
3401 this: ModelHandle<Self>,
3402 envelope: TypedEnvelope<proto::UpdateWorktree>,
3403 _: Arc<Client>,
3404 mut cx: AsyncAppContext,
3405 ) -> Result<()> {
3406 this.update(&mut cx, |this, cx| {
3407 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3408 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3409 worktree.update(cx, |worktree, _| {
3410 let worktree = worktree.as_remote_mut().unwrap();
3411 worktree.update_from_remote(envelope)
3412 })?;
3413 }
3414 Ok(())
3415 })
3416 }
3417
3418 async fn handle_update_diagnostic_summary(
3419 this: ModelHandle<Self>,
3420 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3421 _: Arc<Client>,
3422 mut cx: AsyncAppContext,
3423 ) -> Result<()> {
3424 this.update(&mut cx, |this, cx| {
3425 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3426 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3427 if let Some(summary) = envelope.payload.summary {
3428 let project_path = ProjectPath {
3429 worktree_id,
3430 path: Path::new(&summary.path).into(),
3431 };
3432 worktree.update(cx, |worktree, _| {
3433 worktree
3434 .as_remote_mut()
3435 .unwrap()
3436 .update_diagnostic_summary(project_path.path.clone(), &summary);
3437 });
3438 cx.emit(Event::DiagnosticsUpdated(project_path));
3439 }
3440 }
3441 Ok(())
3442 })
3443 }
3444
3445 async fn handle_start_language_server(
3446 this: ModelHandle<Self>,
3447 envelope: TypedEnvelope<proto::StartLanguageServer>,
3448 _: Arc<Client>,
3449 mut cx: AsyncAppContext,
3450 ) -> Result<()> {
3451 let server = envelope
3452 .payload
3453 .server
3454 .ok_or_else(|| anyhow!("invalid server"))?;
3455 this.update(&mut cx, |this, cx| {
3456 this.language_server_statuses.insert(
3457 server.id as usize,
3458 LanguageServerStatus {
3459 name: server.name,
3460 pending_work: Default::default(),
3461 pending_diagnostic_updates: 0,
3462 },
3463 );
3464 cx.notify();
3465 });
3466 Ok(())
3467 }
3468
3469 async fn handle_update_language_server(
3470 this: ModelHandle<Self>,
3471 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3472 _: Arc<Client>,
3473 mut cx: AsyncAppContext,
3474 ) -> Result<()> {
3475 let language_server_id = envelope.payload.language_server_id as usize;
3476 match envelope
3477 .payload
3478 .variant
3479 .ok_or_else(|| anyhow!("invalid variant"))?
3480 {
3481 proto::update_language_server::Variant::WorkStart(payload) => {
3482 this.update(&mut cx, |this, cx| {
3483 this.on_lsp_work_start(language_server_id, payload.token, cx);
3484 })
3485 }
3486 proto::update_language_server::Variant::WorkProgress(payload) => {
3487 this.update(&mut cx, |this, cx| {
3488 this.on_lsp_work_progress(
3489 language_server_id,
3490 payload.token,
3491 LanguageServerProgress {
3492 message: payload.message,
3493 percentage: payload.percentage.map(|p| p as usize),
3494 last_update_at: Instant::now(),
3495 },
3496 cx,
3497 );
3498 })
3499 }
3500 proto::update_language_server::Variant::WorkEnd(payload) => {
3501 this.update(&mut cx, |this, cx| {
3502 this.on_lsp_work_end(language_server_id, payload.token, cx);
3503 })
3504 }
3505 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3506 this.update(&mut cx, |this, cx| {
3507 this.disk_based_diagnostics_started(cx);
3508 })
3509 }
3510 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3511 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3512 }
3513 }
3514
3515 Ok(())
3516 }
3517
3518 async fn handle_update_buffer(
3519 this: ModelHandle<Self>,
3520 envelope: TypedEnvelope<proto::UpdateBuffer>,
3521 _: Arc<Client>,
3522 mut cx: AsyncAppContext,
3523 ) -> Result<()> {
3524 this.update(&mut cx, |this, cx| {
3525 let payload = envelope.payload.clone();
3526 let buffer_id = payload.buffer_id;
3527 let ops = payload
3528 .operations
3529 .into_iter()
3530 .map(|op| language::proto::deserialize_operation(op))
3531 .collect::<Result<Vec<_>, _>>()?;
3532 match this.opened_buffers.entry(buffer_id) {
3533 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3534 OpenBuffer::Strong(buffer) => {
3535 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3536 }
3537 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3538 OpenBuffer::Weak(_) => {}
3539 },
3540 hash_map::Entry::Vacant(e) => {
3541 e.insert(OpenBuffer::Loading(ops));
3542 }
3543 }
3544 Ok(())
3545 })
3546 }
3547
3548 async fn handle_update_buffer_file(
3549 this: ModelHandle<Self>,
3550 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3551 _: Arc<Client>,
3552 mut cx: AsyncAppContext,
3553 ) -> Result<()> {
3554 this.update(&mut cx, |this, cx| {
3555 let payload = envelope.payload.clone();
3556 let buffer_id = payload.buffer_id;
3557 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3558 let worktree = this
3559 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3560 .ok_or_else(|| anyhow!("no such worktree"))?;
3561 let file = File::from_proto(file, worktree.clone(), cx)?;
3562 let buffer = this
3563 .opened_buffers
3564 .get_mut(&buffer_id)
3565 .and_then(|b| b.upgrade(cx))
3566 .ok_or_else(|| anyhow!("no such buffer"))?;
3567 buffer.update(cx, |buffer, cx| {
3568 buffer.file_updated(Box::new(file), cx).detach();
3569 });
3570 Ok(())
3571 })
3572 }
3573
3574 async fn handle_save_buffer(
3575 this: ModelHandle<Self>,
3576 envelope: TypedEnvelope<proto::SaveBuffer>,
3577 _: Arc<Client>,
3578 mut cx: AsyncAppContext,
3579 ) -> Result<proto::BufferSaved> {
3580 let buffer_id = envelope.payload.buffer_id;
3581 let requested_version = deserialize_version(envelope.payload.version);
3582
3583 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3584 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3585 let buffer = this
3586 .opened_buffers
3587 .get(&buffer_id)
3588 .map(|buffer| buffer.upgrade(cx).unwrap())
3589 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3590 Ok::<_, anyhow::Error>((project_id, buffer))
3591 })?;
3592 buffer
3593 .update(&mut cx, |buffer, _| {
3594 buffer.wait_for_version(requested_version)
3595 })
3596 .await;
3597
3598 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3599 Ok(proto::BufferSaved {
3600 project_id,
3601 buffer_id,
3602 version: serialize_version(&saved_version),
3603 mtime: Some(mtime.into()),
3604 })
3605 }
3606
3607 async fn handle_format_buffers(
3608 this: ModelHandle<Self>,
3609 envelope: TypedEnvelope<proto::FormatBuffers>,
3610 _: Arc<Client>,
3611 mut cx: AsyncAppContext,
3612 ) -> Result<proto::FormatBuffersResponse> {
3613 let sender_id = envelope.original_sender_id()?;
3614 let format = this.update(&mut cx, |this, cx| {
3615 let mut buffers = HashSet::default();
3616 for buffer_id in &envelope.payload.buffer_ids {
3617 buffers.insert(
3618 this.opened_buffers
3619 .get(buffer_id)
3620 .map(|buffer| buffer.upgrade(cx).unwrap())
3621 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3622 );
3623 }
3624 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3625 })?;
3626
3627 let project_transaction = format.await?;
3628 let project_transaction = this.update(&mut cx, |this, cx| {
3629 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3630 });
3631 Ok(proto::FormatBuffersResponse {
3632 transaction: Some(project_transaction),
3633 })
3634 }
3635
3636 async fn handle_get_completions(
3637 this: ModelHandle<Self>,
3638 envelope: TypedEnvelope<proto::GetCompletions>,
3639 _: Arc<Client>,
3640 mut cx: AsyncAppContext,
3641 ) -> Result<proto::GetCompletionsResponse> {
3642 let position = envelope
3643 .payload
3644 .position
3645 .and_then(language::proto::deserialize_anchor)
3646 .ok_or_else(|| anyhow!("invalid position"))?;
3647 let version = deserialize_version(envelope.payload.version);
3648 let buffer = this.read_with(&cx, |this, cx| {
3649 this.opened_buffers
3650 .get(&envelope.payload.buffer_id)
3651 .map(|buffer| buffer.upgrade(cx).unwrap())
3652 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3653 })?;
3654 buffer
3655 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3656 .await;
3657 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3658 let completions = this
3659 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3660 .await?;
3661
3662 Ok(proto::GetCompletionsResponse {
3663 completions: completions
3664 .iter()
3665 .map(language::proto::serialize_completion)
3666 .collect(),
3667 version: serialize_version(&version),
3668 })
3669 }
3670
3671 async fn handle_apply_additional_edits_for_completion(
3672 this: ModelHandle<Self>,
3673 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3674 _: Arc<Client>,
3675 mut cx: AsyncAppContext,
3676 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3677 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3678 let buffer = this
3679 .opened_buffers
3680 .get(&envelope.payload.buffer_id)
3681 .map(|buffer| buffer.upgrade(cx).unwrap())
3682 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3683 let language = buffer.read(cx).language();
3684 let completion = language::proto::deserialize_completion(
3685 envelope
3686 .payload
3687 .completion
3688 .ok_or_else(|| anyhow!("invalid completion"))?,
3689 language,
3690 )?;
3691 Ok::<_, anyhow::Error>(
3692 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3693 )
3694 })?;
3695
3696 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3697 transaction: apply_additional_edits
3698 .await?
3699 .as_ref()
3700 .map(language::proto::serialize_transaction),
3701 })
3702 }
3703
3704 async fn handle_get_code_actions(
3705 this: ModelHandle<Self>,
3706 envelope: TypedEnvelope<proto::GetCodeActions>,
3707 _: Arc<Client>,
3708 mut cx: AsyncAppContext,
3709 ) -> Result<proto::GetCodeActionsResponse> {
3710 let start = envelope
3711 .payload
3712 .start
3713 .and_then(language::proto::deserialize_anchor)
3714 .ok_or_else(|| anyhow!("invalid start"))?;
3715 let end = envelope
3716 .payload
3717 .end
3718 .and_then(language::proto::deserialize_anchor)
3719 .ok_or_else(|| anyhow!("invalid end"))?;
3720 let buffer = this.update(&mut cx, |this, cx| {
3721 this.opened_buffers
3722 .get(&envelope.payload.buffer_id)
3723 .map(|buffer| buffer.upgrade(cx).unwrap())
3724 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3725 })?;
3726 buffer
3727 .update(&mut cx, |buffer, _| {
3728 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3729 })
3730 .await;
3731
3732 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3733 let code_actions = this.update(&mut cx, |this, cx| {
3734 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3735 })?;
3736
3737 Ok(proto::GetCodeActionsResponse {
3738 actions: code_actions
3739 .await?
3740 .iter()
3741 .map(language::proto::serialize_code_action)
3742 .collect(),
3743 version: serialize_version(&version),
3744 })
3745 }
3746
3747 async fn handle_apply_code_action(
3748 this: ModelHandle<Self>,
3749 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3750 _: Arc<Client>,
3751 mut cx: AsyncAppContext,
3752 ) -> Result<proto::ApplyCodeActionResponse> {
3753 let sender_id = envelope.original_sender_id()?;
3754 let action = language::proto::deserialize_code_action(
3755 envelope
3756 .payload
3757 .action
3758 .ok_or_else(|| anyhow!("invalid action"))?,
3759 )?;
3760 let apply_code_action = this.update(&mut cx, |this, cx| {
3761 let buffer = this
3762 .opened_buffers
3763 .get(&envelope.payload.buffer_id)
3764 .map(|buffer| buffer.upgrade(cx).unwrap())
3765 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3766 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3767 })?;
3768
3769 let project_transaction = apply_code_action.await?;
3770 let project_transaction = this.update(&mut cx, |this, cx| {
3771 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3772 });
3773 Ok(proto::ApplyCodeActionResponse {
3774 transaction: Some(project_transaction),
3775 })
3776 }
3777
3778 async fn handle_lsp_command<T: LspCommand>(
3779 this: ModelHandle<Self>,
3780 envelope: TypedEnvelope<T::ProtoRequest>,
3781 _: Arc<Client>,
3782 mut cx: AsyncAppContext,
3783 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3784 where
3785 <T::LspRequest as lsp::request::Request>::Result: Send,
3786 {
3787 let sender_id = envelope.original_sender_id()?;
3788 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3789 let buffer_handle = this.read_with(&cx, |this, _| {
3790 this.opened_buffers
3791 .get(&buffer_id)
3792 .and_then(|buffer| buffer.upgrade(&cx))
3793 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3794 })?;
3795 let request = T::from_proto(
3796 envelope.payload,
3797 this.clone(),
3798 buffer_handle.clone(),
3799 cx.clone(),
3800 )
3801 .await?;
3802 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3803 let response = this
3804 .update(&mut cx, |this, cx| {
3805 this.request_lsp(buffer_handle, request, cx)
3806 })
3807 .await?;
3808 this.update(&mut cx, |this, cx| {
3809 Ok(T::response_to_proto(
3810 response,
3811 this,
3812 sender_id,
3813 &buffer_version,
3814 cx,
3815 ))
3816 })
3817 }
3818
3819 async fn handle_get_project_symbols(
3820 this: ModelHandle<Self>,
3821 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3822 _: Arc<Client>,
3823 mut cx: AsyncAppContext,
3824 ) -> Result<proto::GetProjectSymbolsResponse> {
3825 let symbols = this
3826 .update(&mut cx, |this, cx| {
3827 this.symbols(&envelope.payload.query, cx)
3828 })
3829 .await?;
3830
3831 Ok(proto::GetProjectSymbolsResponse {
3832 symbols: symbols.iter().map(serialize_symbol).collect(),
3833 })
3834 }
3835
3836 async fn handle_search_project(
3837 this: ModelHandle<Self>,
3838 envelope: TypedEnvelope<proto::SearchProject>,
3839 _: Arc<Client>,
3840 mut cx: AsyncAppContext,
3841 ) -> Result<proto::SearchProjectResponse> {
3842 let peer_id = envelope.original_sender_id()?;
3843 let query = SearchQuery::from_proto(envelope.payload)?;
3844 let result = this
3845 .update(&mut cx, |this, cx| this.search(query, cx))
3846 .await?;
3847
3848 this.update(&mut cx, |this, cx| {
3849 let mut locations = Vec::new();
3850 for (buffer, ranges) in result {
3851 for range in ranges {
3852 let start = serialize_anchor(&range.start);
3853 let end = serialize_anchor(&range.end);
3854 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3855 locations.push(proto::Location {
3856 buffer: Some(buffer),
3857 start: Some(start),
3858 end: Some(end),
3859 });
3860 }
3861 }
3862 Ok(proto::SearchProjectResponse { locations })
3863 })
3864 }
3865
3866 async fn handle_open_buffer_for_symbol(
3867 this: ModelHandle<Self>,
3868 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3869 _: Arc<Client>,
3870 mut cx: AsyncAppContext,
3871 ) -> Result<proto::OpenBufferForSymbolResponse> {
3872 let peer_id = envelope.original_sender_id()?;
3873 let symbol = envelope
3874 .payload
3875 .symbol
3876 .ok_or_else(|| anyhow!("invalid symbol"))?;
3877 let symbol = this.read_with(&cx, |this, _| {
3878 let symbol = this.deserialize_symbol(symbol)?;
3879 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3880 if signature == symbol.signature {
3881 Ok(symbol)
3882 } else {
3883 Err(anyhow!("invalid symbol signature"))
3884 }
3885 })?;
3886 let buffer = this
3887 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3888 .await?;
3889
3890 Ok(proto::OpenBufferForSymbolResponse {
3891 buffer: Some(this.update(&mut cx, |this, cx| {
3892 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3893 })),
3894 })
3895 }
3896
3897 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3898 let mut hasher = Sha256::new();
3899 hasher.update(worktree_id.to_proto().to_be_bytes());
3900 hasher.update(path.to_string_lossy().as_bytes());
3901 hasher.update(self.nonce.to_be_bytes());
3902 hasher.finalize().as_slice().try_into().unwrap()
3903 }
3904
3905 async fn handle_open_buffer_by_id(
3906 this: ModelHandle<Self>,
3907 envelope: TypedEnvelope<proto::OpenBufferById>,
3908 _: Arc<Client>,
3909 mut cx: AsyncAppContext,
3910 ) -> Result<proto::OpenBufferResponse> {
3911 let peer_id = envelope.original_sender_id()?;
3912 let buffer = this
3913 .update(&mut cx, |this, cx| {
3914 this.open_buffer_by_id(envelope.payload.id, cx)
3915 })
3916 .await?;
3917 this.update(&mut cx, |this, cx| {
3918 Ok(proto::OpenBufferResponse {
3919 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3920 })
3921 })
3922 }
3923
3924 async fn handle_open_buffer_by_path(
3925 this: ModelHandle<Self>,
3926 envelope: TypedEnvelope<proto::OpenBufferByPath>,
3927 _: Arc<Client>,
3928 mut cx: AsyncAppContext,
3929 ) -> Result<proto::OpenBufferResponse> {
3930 let peer_id = envelope.original_sender_id()?;
3931 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3932 let open_buffer = this.update(&mut cx, |this, cx| {
3933 this.open_buffer(
3934 ProjectPath {
3935 worktree_id,
3936 path: PathBuf::from(envelope.payload.path).into(),
3937 },
3938 cx,
3939 )
3940 });
3941
3942 let buffer = open_buffer.await?;
3943 this.update(&mut cx, |this, cx| {
3944 Ok(proto::OpenBufferResponse {
3945 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3946 })
3947 })
3948 }
3949
3950 fn serialize_project_transaction_for_peer(
3951 &mut self,
3952 project_transaction: ProjectTransaction,
3953 peer_id: PeerId,
3954 cx: &AppContext,
3955 ) -> proto::ProjectTransaction {
3956 let mut serialized_transaction = proto::ProjectTransaction {
3957 buffers: Default::default(),
3958 transactions: Default::default(),
3959 };
3960 for (buffer, transaction) in project_transaction.0 {
3961 serialized_transaction
3962 .buffers
3963 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3964 serialized_transaction
3965 .transactions
3966 .push(language::proto::serialize_transaction(&transaction));
3967 }
3968 serialized_transaction
3969 }
3970
3971 fn deserialize_project_transaction(
3972 &mut self,
3973 message: proto::ProjectTransaction,
3974 push_to_history: bool,
3975 cx: &mut ModelContext<Self>,
3976 ) -> Task<Result<ProjectTransaction>> {
3977 cx.spawn(|this, mut cx| async move {
3978 let mut project_transaction = ProjectTransaction::default();
3979 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3980 let buffer = this
3981 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3982 .await?;
3983 let transaction = language::proto::deserialize_transaction(transaction)?;
3984 project_transaction.0.insert(buffer, transaction);
3985 }
3986
3987 for (buffer, transaction) in &project_transaction.0 {
3988 buffer
3989 .update(&mut cx, |buffer, _| {
3990 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3991 })
3992 .await;
3993
3994 if push_to_history {
3995 buffer.update(&mut cx, |buffer, _| {
3996 buffer.push_transaction(transaction.clone(), Instant::now());
3997 });
3998 }
3999 }
4000
4001 Ok(project_transaction)
4002 })
4003 }
4004
4005 fn serialize_buffer_for_peer(
4006 &mut self,
4007 buffer: &ModelHandle<Buffer>,
4008 peer_id: PeerId,
4009 cx: &AppContext,
4010 ) -> proto::Buffer {
4011 let buffer_id = buffer.read(cx).remote_id();
4012 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4013 if shared_buffers.insert(buffer_id) {
4014 proto::Buffer {
4015 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4016 }
4017 } else {
4018 proto::Buffer {
4019 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4020 }
4021 }
4022 }
4023
4024 fn deserialize_buffer(
4025 &mut self,
4026 buffer: proto::Buffer,
4027 cx: &mut ModelContext<Self>,
4028 ) -> Task<Result<ModelHandle<Buffer>>> {
4029 let replica_id = self.replica_id();
4030
4031 let opened_buffer_tx = self.opened_buffer.0.clone();
4032 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4033 cx.spawn(|this, mut cx| async move {
4034 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4035 proto::buffer::Variant::Id(id) => {
4036 let buffer = loop {
4037 let buffer = this.read_with(&cx, |this, cx| {
4038 this.opened_buffers
4039 .get(&id)
4040 .and_then(|buffer| buffer.upgrade(cx))
4041 });
4042 if let Some(buffer) = buffer {
4043 break buffer;
4044 }
4045 opened_buffer_rx
4046 .next()
4047 .await
4048 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4049 };
4050 Ok(buffer)
4051 }
4052 proto::buffer::Variant::State(mut buffer) => {
4053 let mut buffer_worktree = None;
4054 let mut buffer_file = None;
4055 if let Some(file) = buffer.file.take() {
4056 this.read_with(&cx, |this, cx| {
4057 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4058 let worktree =
4059 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4060 anyhow!("no worktree found for id {}", file.worktree_id)
4061 })?;
4062 buffer_file =
4063 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4064 as Box<dyn language::File>);
4065 buffer_worktree = Some(worktree);
4066 Ok::<_, anyhow::Error>(())
4067 })?;
4068 }
4069
4070 let buffer = cx.add_model(|cx| {
4071 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4072 });
4073
4074 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4075
4076 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4077 Ok(buffer)
4078 }
4079 }
4080 })
4081 }
4082
4083 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4084 let language = self
4085 .languages
4086 .get_language(&serialized_symbol.language_name);
4087 let start = serialized_symbol
4088 .start
4089 .ok_or_else(|| anyhow!("invalid start"))?;
4090 let end = serialized_symbol
4091 .end
4092 .ok_or_else(|| anyhow!("invalid end"))?;
4093 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4094 Ok(Symbol {
4095 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4096 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4097 language_name: serialized_symbol.language_name.clone(),
4098 label: language
4099 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4100 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4101 name: serialized_symbol.name,
4102 path: PathBuf::from(serialized_symbol.path),
4103 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4104 kind,
4105 signature: serialized_symbol
4106 .signature
4107 .try_into()
4108 .map_err(|_| anyhow!("invalid signature"))?,
4109 })
4110 }
4111
4112 async fn handle_buffer_saved(
4113 this: ModelHandle<Self>,
4114 envelope: TypedEnvelope<proto::BufferSaved>,
4115 _: Arc<Client>,
4116 mut cx: AsyncAppContext,
4117 ) -> Result<()> {
4118 let version = deserialize_version(envelope.payload.version);
4119 let mtime = envelope
4120 .payload
4121 .mtime
4122 .ok_or_else(|| anyhow!("missing mtime"))?
4123 .into();
4124
4125 this.update(&mut cx, |this, cx| {
4126 let buffer = this
4127 .opened_buffers
4128 .get(&envelope.payload.buffer_id)
4129 .and_then(|buffer| buffer.upgrade(cx));
4130 if let Some(buffer) = buffer {
4131 buffer.update(cx, |buffer, cx| {
4132 buffer.did_save(version, mtime, None, cx);
4133 });
4134 }
4135 Ok(())
4136 })
4137 }
4138
4139 async fn handle_buffer_reloaded(
4140 this: ModelHandle<Self>,
4141 envelope: TypedEnvelope<proto::BufferReloaded>,
4142 _: Arc<Client>,
4143 mut cx: AsyncAppContext,
4144 ) -> Result<()> {
4145 let payload = envelope.payload.clone();
4146 let version = deserialize_version(payload.version);
4147 let mtime = payload
4148 .mtime
4149 .ok_or_else(|| anyhow!("missing mtime"))?
4150 .into();
4151 this.update(&mut cx, |this, cx| {
4152 let buffer = this
4153 .opened_buffers
4154 .get(&payload.buffer_id)
4155 .and_then(|buffer| buffer.upgrade(cx));
4156 if let Some(buffer) = buffer {
4157 buffer.update(cx, |buffer, cx| {
4158 buffer.did_reload(version, mtime, cx);
4159 });
4160 }
4161 Ok(())
4162 })
4163 }
4164
4165 pub fn match_paths<'a>(
4166 &self,
4167 query: &'a str,
4168 include_ignored: bool,
4169 smart_case: bool,
4170 max_results: usize,
4171 cancel_flag: &'a AtomicBool,
4172 cx: &AppContext,
4173 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4174 let worktrees = self
4175 .worktrees(cx)
4176 .filter(|worktree| worktree.read(cx).is_visible())
4177 .collect::<Vec<_>>();
4178 let include_root_name = worktrees.len() > 1;
4179 let candidate_sets = worktrees
4180 .into_iter()
4181 .map(|worktree| CandidateSet {
4182 snapshot: worktree.read(cx).snapshot(),
4183 include_ignored,
4184 include_root_name,
4185 })
4186 .collect::<Vec<_>>();
4187
4188 let background = cx.background().clone();
4189 async move {
4190 fuzzy::match_paths(
4191 candidate_sets.as_slice(),
4192 query,
4193 smart_case,
4194 max_results,
4195 cancel_flag,
4196 background,
4197 )
4198 .await
4199 }
4200 }
4201
4202 fn edits_from_lsp(
4203 &mut self,
4204 buffer: &ModelHandle<Buffer>,
4205 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4206 version: Option<i32>,
4207 cx: &mut ModelContext<Self>,
4208 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4209 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4210 cx.background().spawn(async move {
4211 let snapshot = snapshot?;
4212 let mut lsp_edits = lsp_edits
4213 .into_iter()
4214 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4215 .peekable();
4216
4217 let mut edits = Vec::new();
4218 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4219 // Combine any LSP edits that are adjacent.
4220 //
4221 // Also, combine LSP edits that are separated from each other by only
4222 // a newline. This is important because for some code actions,
4223 // Rust-analyzer rewrites the entire buffer via a series of edits that
4224 // are separated by unchanged newline characters.
4225 //
4226 // In order for the diffing logic below to work properly, any edits that
4227 // cancel each other out must be combined into one.
4228 while let Some((next_range, next_text)) = lsp_edits.peek() {
4229 if next_range.start > range.end {
4230 if next_range.start.row > range.end.row + 1
4231 || next_range.start.column > 0
4232 || snapshot.clip_point_utf16(
4233 PointUtf16::new(range.end.row, u32::MAX),
4234 Bias::Left,
4235 ) > range.end
4236 {
4237 break;
4238 }
4239 new_text.push('\n');
4240 }
4241 range.end = next_range.end;
4242 new_text.push_str(&next_text);
4243 lsp_edits.next();
4244 }
4245
4246 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4247 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4248 {
4249 return Err(anyhow!("invalid edits received from language server"));
4250 }
4251
4252 // For multiline edits, perform a diff of the old and new text so that
4253 // we can identify the changes more precisely, preserving the locations
4254 // of any anchors positioned in the unchanged regions.
4255 if range.end.row > range.start.row {
4256 let mut offset = range.start.to_offset(&snapshot);
4257 let old_text = snapshot.text_for_range(range).collect::<String>();
4258
4259 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4260 let mut moved_since_edit = true;
4261 for change in diff.iter_all_changes() {
4262 let tag = change.tag();
4263 let value = change.value();
4264 match tag {
4265 ChangeTag::Equal => {
4266 offset += value.len();
4267 moved_since_edit = true;
4268 }
4269 ChangeTag::Delete => {
4270 let start = snapshot.anchor_after(offset);
4271 let end = snapshot.anchor_before(offset + value.len());
4272 if moved_since_edit {
4273 edits.push((start..end, String::new()));
4274 } else {
4275 edits.last_mut().unwrap().0.end = end;
4276 }
4277 offset += value.len();
4278 moved_since_edit = false;
4279 }
4280 ChangeTag::Insert => {
4281 if moved_since_edit {
4282 let anchor = snapshot.anchor_after(offset);
4283 edits.push((anchor.clone()..anchor, value.to_string()));
4284 } else {
4285 edits.last_mut().unwrap().1.push_str(value);
4286 }
4287 moved_since_edit = false;
4288 }
4289 }
4290 }
4291 } else if range.end == range.start {
4292 let anchor = snapshot.anchor_after(range.start);
4293 edits.push((anchor.clone()..anchor, new_text));
4294 } else {
4295 let edit_start = snapshot.anchor_after(range.start);
4296 let edit_end = snapshot.anchor_before(range.end);
4297 edits.push((edit_start..edit_end, new_text));
4298 }
4299 }
4300
4301 Ok(edits)
4302 })
4303 }
4304
4305 fn buffer_snapshot_for_lsp_version(
4306 &mut self,
4307 buffer: &ModelHandle<Buffer>,
4308 version: Option<i32>,
4309 cx: &AppContext,
4310 ) -> Result<TextBufferSnapshot> {
4311 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4312
4313 if let Some(version) = version {
4314 let buffer_id = buffer.read(cx).remote_id();
4315 let snapshots = self
4316 .buffer_snapshots
4317 .get_mut(&buffer_id)
4318 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4319 let mut found_snapshot = None;
4320 snapshots.retain(|(snapshot_version, snapshot)| {
4321 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4322 false
4323 } else {
4324 if *snapshot_version == version {
4325 found_snapshot = Some(snapshot.clone());
4326 }
4327 true
4328 }
4329 });
4330
4331 found_snapshot.ok_or_else(|| {
4332 anyhow!(
4333 "snapshot not found for buffer {} at version {}",
4334 buffer_id,
4335 version
4336 )
4337 })
4338 } else {
4339 Ok((buffer.read(cx)).text_snapshot())
4340 }
4341 }
4342
4343 fn language_server_for_buffer(
4344 &self,
4345 buffer: &Buffer,
4346 cx: &AppContext,
4347 ) -> Option<&Arc<LanguageServer>> {
4348 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4349 let worktree_id = file.worktree_id(cx);
4350 self.language_servers.get(&(worktree_id, language.name()))
4351 } else {
4352 None
4353 }
4354 }
4355}
4356
4357impl WorktreeHandle {
4358 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4359 match self {
4360 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4361 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4362 }
4363 }
4364}
4365
4366impl OpenBuffer {
4367 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4368 match self {
4369 OpenBuffer::Strong(handle) => Some(handle.clone()),
4370 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4371 OpenBuffer::Loading(_) => None,
4372 }
4373 }
4374}
4375
4376struct CandidateSet {
4377 snapshot: Snapshot,
4378 include_ignored: bool,
4379 include_root_name: bool,
4380}
4381
4382impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4383 type Candidates = CandidateSetIter<'a>;
4384
4385 fn id(&self) -> usize {
4386 self.snapshot.id().to_usize()
4387 }
4388
4389 fn len(&self) -> usize {
4390 if self.include_ignored {
4391 self.snapshot.file_count()
4392 } else {
4393 self.snapshot.visible_file_count()
4394 }
4395 }
4396
4397 fn prefix(&self) -> Arc<str> {
4398 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4399 self.snapshot.root_name().into()
4400 } else if self.include_root_name {
4401 format!("{}/", self.snapshot.root_name()).into()
4402 } else {
4403 "".into()
4404 }
4405 }
4406
4407 fn candidates(&'a self, start: usize) -> Self::Candidates {
4408 CandidateSetIter {
4409 traversal: self.snapshot.files(self.include_ignored, start),
4410 }
4411 }
4412}
4413
4414struct CandidateSetIter<'a> {
4415 traversal: Traversal<'a>,
4416}
4417
4418impl<'a> Iterator for CandidateSetIter<'a> {
4419 type Item = PathMatchCandidate<'a>;
4420
4421 fn next(&mut self) -> Option<Self::Item> {
4422 self.traversal.next().map(|entry| {
4423 if let EntryKind::File(char_bag) = entry.kind {
4424 PathMatchCandidate {
4425 path: &entry.path,
4426 char_bag,
4427 }
4428 } else {
4429 unreachable!()
4430 }
4431 })
4432 }
4433}
4434
4435impl Entity for Project {
4436 type Event = Event;
4437
4438 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4439 match &self.client_state {
4440 ProjectClientState::Local { remote_id_rx, .. } => {
4441 if let Some(project_id) = *remote_id_rx.borrow() {
4442 self.client
4443 .send(proto::UnregisterProject { project_id })
4444 .log_err();
4445 }
4446 }
4447 ProjectClientState::Remote { remote_id, .. } => {
4448 self.client
4449 .send(proto::LeaveProject {
4450 project_id: *remote_id,
4451 })
4452 .log_err();
4453 }
4454 }
4455 }
4456
4457 fn app_will_quit(
4458 &mut self,
4459 _: &mut MutableAppContext,
4460 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4461 let shutdown_futures = self
4462 .language_servers
4463 .drain()
4464 .filter_map(|(_, server)| server.shutdown())
4465 .collect::<Vec<_>>();
4466 Some(
4467 async move {
4468 futures::future::join_all(shutdown_futures).await;
4469 }
4470 .boxed(),
4471 )
4472 }
4473}
4474
4475impl Collaborator {
4476 fn from_proto(
4477 message: proto::Collaborator,
4478 user_store: &ModelHandle<UserStore>,
4479 cx: &mut AsyncAppContext,
4480 ) -> impl Future<Output = Result<Self>> {
4481 let user = user_store.update(cx, |user_store, cx| {
4482 user_store.fetch_user(message.user_id, cx)
4483 });
4484
4485 async move {
4486 Ok(Self {
4487 peer_id: PeerId(message.peer_id),
4488 user: user.await?,
4489 replica_id: message.replica_id as ReplicaId,
4490 })
4491 }
4492 }
4493}
4494
4495impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4496 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4497 Self {
4498 worktree_id,
4499 path: path.as_ref().into(),
4500 }
4501 }
4502}
4503
4504impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4505 fn from(options: lsp::CreateFileOptions) -> Self {
4506 Self {
4507 overwrite: options.overwrite.unwrap_or(false),
4508 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4509 }
4510 }
4511}
4512
4513impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4514 fn from(options: lsp::RenameFileOptions) -> Self {
4515 Self {
4516 overwrite: options.overwrite.unwrap_or(false),
4517 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4518 }
4519 }
4520}
4521
4522impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4523 fn from(options: lsp::DeleteFileOptions) -> Self {
4524 Self {
4525 recursive: options.recursive.unwrap_or(false),
4526 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4527 }
4528 }
4529}
4530
4531fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4532 proto::Symbol {
4533 source_worktree_id: symbol.source_worktree_id.to_proto(),
4534 worktree_id: symbol.worktree_id.to_proto(),
4535 language_name: symbol.language_name.clone(),
4536 name: symbol.name.clone(),
4537 kind: unsafe { mem::transmute(symbol.kind) },
4538 path: symbol.path.to_string_lossy().to_string(),
4539 start: Some(proto::Point {
4540 row: symbol.range.start.row,
4541 column: symbol.range.start.column,
4542 }),
4543 end: Some(proto::Point {
4544 row: symbol.range.end.row,
4545 column: symbol.range.end.column,
4546 }),
4547 signature: symbol.signature.to_vec(),
4548 }
4549}
4550
4551fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4552 let mut path_components = path.components();
4553 let mut base_components = base.components();
4554 let mut components: Vec<Component> = Vec::new();
4555 loop {
4556 match (path_components.next(), base_components.next()) {
4557 (None, None) => break,
4558 (Some(a), None) => {
4559 components.push(a);
4560 components.extend(path_components.by_ref());
4561 break;
4562 }
4563 (None, _) => components.push(Component::ParentDir),
4564 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4565 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4566 (Some(a), Some(_)) => {
4567 components.push(Component::ParentDir);
4568 for _ in base_components {
4569 components.push(Component::ParentDir);
4570 }
4571 components.push(a);
4572 components.extend(path_components.by_ref());
4573 break;
4574 }
4575 }
4576 }
4577 components.iter().map(|c| c.as_os_str()).collect()
4578}
4579
4580impl Item for Buffer {
4581 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4582 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4583 }
4584}
4585
4586#[cfg(test)]
4587mod tests {
4588 use super::{Event, *};
4589 use fs::RealFs;
4590 use futures::StreamExt;
4591 use gpui::test::subscribe;
4592 use language::{
4593 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4594 ToPoint,
4595 };
4596 use lsp::Url;
4597 use serde_json::json;
4598 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4599 use unindent::Unindent as _;
4600 use util::test::temp_tree;
4601 use worktree::WorktreeHandle as _;
4602
4603 #[gpui::test]
4604 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4605 let dir = temp_tree(json!({
4606 "root": {
4607 "apple": "",
4608 "banana": {
4609 "carrot": {
4610 "date": "",
4611 "endive": "",
4612 }
4613 },
4614 "fennel": {
4615 "grape": "",
4616 }
4617 }
4618 }));
4619
4620 let root_link_path = dir.path().join("root_link");
4621 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4622 unix::fs::symlink(
4623 &dir.path().join("root/fennel"),
4624 &dir.path().join("root/finnochio"),
4625 )
4626 .unwrap();
4627
4628 let project = Project::test(Arc::new(RealFs), cx);
4629
4630 let (tree, _) = project
4631 .update(cx, |project, cx| {
4632 project.find_or_create_local_worktree(&root_link_path, true, cx)
4633 })
4634 .await
4635 .unwrap();
4636
4637 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4638 .await;
4639 cx.read(|cx| {
4640 let tree = tree.read(cx);
4641 assert_eq!(tree.file_count(), 5);
4642 assert_eq!(
4643 tree.inode_for_path("fennel/grape"),
4644 tree.inode_for_path("finnochio/grape")
4645 );
4646 });
4647
4648 let cancel_flag = Default::default();
4649 let results = project
4650 .read_with(cx, |project, cx| {
4651 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4652 })
4653 .await;
4654 assert_eq!(
4655 results
4656 .into_iter()
4657 .map(|result| result.path)
4658 .collect::<Vec<Arc<Path>>>(),
4659 vec![
4660 PathBuf::from("banana/carrot/date").into(),
4661 PathBuf::from("banana/carrot/endive").into(),
4662 ]
4663 );
4664 }
4665
4666 #[gpui::test]
4667 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4668 cx.foreground().forbid_parking();
4669
4670 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4671 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4672 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4673 completion_provider: Some(lsp::CompletionOptions {
4674 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4675 ..Default::default()
4676 }),
4677 ..Default::default()
4678 });
4679 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4680 completion_provider: Some(lsp::CompletionOptions {
4681 trigger_characters: Some(vec![":".to_string()]),
4682 ..Default::default()
4683 }),
4684 ..Default::default()
4685 });
4686
4687 let rust_language = Arc::new(Language::new(
4688 LanguageConfig {
4689 name: "Rust".into(),
4690 path_suffixes: vec!["rs".to_string()],
4691 language_server: Some(rust_lsp_config),
4692 ..Default::default()
4693 },
4694 Some(tree_sitter_rust::language()),
4695 ));
4696 let json_language = Arc::new(Language::new(
4697 LanguageConfig {
4698 name: "JSON".into(),
4699 path_suffixes: vec!["json".to_string()],
4700 language_server: Some(json_lsp_config),
4701 ..Default::default()
4702 },
4703 None,
4704 ));
4705
4706 let fs = FakeFs::new(cx.background());
4707 fs.insert_tree(
4708 "/the-root",
4709 json!({
4710 "test.rs": "const A: i32 = 1;",
4711 "test2.rs": "",
4712 "Cargo.toml": "a = 1",
4713 "package.json": "{\"a\": 1}",
4714 }),
4715 )
4716 .await;
4717
4718 let project = Project::test(fs, cx);
4719 project.update(cx, |project, _| {
4720 project.languages.add(rust_language);
4721 project.languages.add(json_language);
4722 });
4723
4724 let worktree_id = project
4725 .update(cx, |project, cx| {
4726 project.find_or_create_local_worktree("/the-root", true, cx)
4727 })
4728 .await
4729 .unwrap()
4730 .0
4731 .read_with(cx, |tree, _| tree.id());
4732
4733 // Open a buffer without an associated language server.
4734 let toml_buffer = project
4735 .update(cx, |project, cx| {
4736 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4737 })
4738 .await
4739 .unwrap();
4740
4741 // Open a buffer with an associated language server.
4742 let rust_buffer = project
4743 .update(cx, |project, cx| {
4744 project.open_buffer((worktree_id, "test.rs"), cx)
4745 })
4746 .await
4747 .unwrap();
4748
4749 // A server is started up, and it is notified about Rust files.
4750 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4751 assert_eq!(
4752 fake_rust_server
4753 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4754 .await
4755 .text_document,
4756 lsp::TextDocumentItem {
4757 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4758 version: 0,
4759 text: "const A: i32 = 1;".to_string(),
4760 language_id: Default::default()
4761 }
4762 );
4763
4764 // The buffer is configured based on the language server's capabilities.
4765 rust_buffer.read_with(cx, |buffer, _| {
4766 assert_eq!(
4767 buffer.completion_triggers(),
4768 &[".".to_string(), "::".to_string()]
4769 );
4770 });
4771 toml_buffer.read_with(cx, |buffer, _| {
4772 assert!(buffer.completion_triggers().is_empty());
4773 });
4774
4775 // Edit a buffer. The changes are reported to the language server.
4776 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4777 assert_eq!(
4778 fake_rust_server
4779 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4780 .await
4781 .text_document,
4782 lsp::VersionedTextDocumentIdentifier::new(
4783 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4784 1
4785 )
4786 );
4787
4788 // Open a third buffer with a different associated language server.
4789 let json_buffer = project
4790 .update(cx, |project, cx| {
4791 project.open_buffer((worktree_id, "package.json"), cx)
4792 })
4793 .await
4794 .unwrap();
4795
4796 // Another language server is started up, and it is notified about
4797 // all three open buffers.
4798 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4799 assert_eq!(
4800 fake_json_server
4801 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4802 .await
4803 .text_document,
4804 lsp::TextDocumentItem {
4805 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4806 version: 0,
4807 text: "{\"a\": 1}".to_string(),
4808 language_id: Default::default()
4809 }
4810 );
4811
4812 // This buffer is configured based on the second language server's
4813 // capabilities.
4814 json_buffer.read_with(cx, |buffer, _| {
4815 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4816 });
4817
4818 // When opening another buffer whose language server is already running,
4819 // it is also configured based on the existing language server's capabilities.
4820 let rust_buffer2 = project
4821 .update(cx, |project, cx| {
4822 project.open_buffer((worktree_id, "test2.rs"), cx)
4823 })
4824 .await
4825 .unwrap();
4826 rust_buffer2.read_with(cx, |buffer, _| {
4827 assert_eq!(
4828 buffer.completion_triggers(),
4829 &[".".to_string(), "::".to_string()]
4830 );
4831 });
4832
4833 // Changes are reported only to servers matching the buffer's language.
4834 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4835 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4836 assert_eq!(
4837 fake_rust_server
4838 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4839 .await
4840 .text_document,
4841 lsp::VersionedTextDocumentIdentifier::new(
4842 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4843 1
4844 )
4845 );
4846
4847 // Save notifications are reported to all servers.
4848 toml_buffer
4849 .update(cx, |buffer, cx| buffer.save(cx))
4850 .await
4851 .unwrap();
4852 assert_eq!(
4853 fake_rust_server
4854 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4855 .await
4856 .text_document,
4857 lsp::TextDocumentIdentifier::new(
4858 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4859 )
4860 );
4861 assert_eq!(
4862 fake_json_server
4863 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4864 .await
4865 .text_document,
4866 lsp::TextDocumentIdentifier::new(
4867 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4868 )
4869 );
4870
4871 // Close notifications are reported only to servers matching the buffer's language.
4872 cx.update(|_| drop(json_buffer));
4873 let close_message = lsp::DidCloseTextDocumentParams {
4874 text_document: lsp::TextDocumentIdentifier::new(
4875 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4876 ),
4877 };
4878 assert_eq!(
4879 fake_json_server
4880 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4881 .await,
4882 close_message,
4883 );
4884 }
4885
4886 #[gpui::test]
4887 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4888 cx.foreground().forbid_parking();
4889
4890 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4891 let progress_token = language_server_config
4892 .disk_based_diagnostics_progress_token
4893 .clone()
4894 .unwrap();
4895
4896 let language = Arc::new(Language::new(
4897 LanguageConfig {
4898 name: "Rust".into(),
4899 path_suffixes: vec!["rs".to_string()],
4900 language_server: Some(language_server_config),
4901 ..Default::default()
4902 },
4903 Some(tree_sitter_rust::language()),
4904 ));
4905
4906 let fs = FakeFs::new(cx.background());
4907 fs.insert_tree(
4908 "/dir",
4909 json!({
4910 "a.rs": "fn a() { A }",
4911 "b.rs": "const y: i32 = 1",
4912 }),
4913 )
4914 .await;
4915
4916 let project = Project::test(fs, cx);
4917 project.update(cx, |project, _| project.languages.add(language));
4918
4919 let (tree, _) = project
4920 .update(cx, |project, cx| {
4921 project.find_or_create_local_worktree("/dir", true, cx)
4922 })
4923 .await
4924 .unwrap();
4925 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4926
4927 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4928 .await;
4929
4930 // Cause worktree to start the fake language server
4931 let _buffer = project
4932 .update(cx, |project, cx| {
4933 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4934 })
4935 .await
4936 .unwrap();
4937
4938 let mut events = subscribe(&project, cx);
4939
4940 let mut fake_server = fake_servers.next().await.unwrap();
4941 fake_server.start_progress(&progress_token).await;
4942 assert_eq!(
4943 events.next().await.unwrap(),
4944 Event::DiskBasedDiagnosticsStarted
4945 );
4946
4947 fake_server.start_progress(&progress_token).await;
4948 fake_server.end_progress(&progress_token).await;
4949 fake_server.start_progress(&progress_token).await;
4950
4951 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4952 lsp::PublishDiagnosticsParams {
4953 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4954 version: None,
4955 diagnostics: vec![lsp::Diagnostic {
4956 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4957 severity: Some(lsp::DiagnosticSeverity::ERROR),
4958 message: "undefined variable 'A'".to_string(),
4959 ..Default::default()
4960 }],
4961 },
4962 );
4963 assert_eq!(
4964 events.next().await.unwrap(),
4965 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4966 );
4967
4968 fake_server.end_progress(&progress_token).await;
4969 fake_server.end_progress(&progress_token).await;
4970 assert_eq!(
4971 events.next().await.unwrap(),
4972 Event::DiskBasedDiagnosticsUpdated
4973 );
4974 assert_eq!(
4975 events.next().await.unwrap(),
4976 Event::DiskBasedDiagnosticsFinished
4977 );
4978
4979 let buffer = project
4980 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4981 .await
4982 .unwrap();
4983
4984 buffer.read_with(cx, |buffer, _| {
4985 let snapshot = buffer.snapshot();
4986 let diagnostics = snapshot
4987 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4988 .collect::<Vec<_>>();
4989 assert_eq!(
4990 diagnostics,
4991 &[DiagnosticEntry {
4992 range: Point::new(0, 9)..Point::new(0, 10),
4993 diagnostic: Diagnostic {
4994 severity: lsp::DiagnosticSeverity::ERROR,
4995 message: "undefined variable 'A'".to_string(),
4996 group_id: 0,
4997 is_primary: true,
4998 ..Default::default()
4999 }
5000 }]
5001 )
5002 });
5003 }
5004
5005 #[gpui::test]
5006 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5007 cx.foreground().forbid_parking();
5008
5009 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5010 lsp_config
5011 .disk_based_diagnostic_sources
5012 .insert("disk".to_string());
5013 let language = Arc::new(Language::new(
5014 LanguageConfig {
5015 name: "Rust".into(),
5016 path_suffixes: vec!["rs".to_string()],
5017 language_server: Some(lsp_config),
5018 ..Default::default()
5019 },
5020 Some(tree_sitter_rust::language()),
5021 ));
5022
5023 let text = "
5024 fn a() { A }
5025 fn b() { BB }
5026 fn c() { CCC }
5027 "
5028 .unindent();
5029
5030 let fs = FakeFs::new(cx.background());
5031 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5032
5033 let project = Project::test(fs, cx);
5034 project.update(cx, |project, _| project.languages.add(language));
5035
5036 let worktree_id = project
5037 .update(cx, |project, cx| {
5038 project.find_or_create_local_worktree("/dir", true, cx)
5039 })
5040 .await
5041 .unwrap()
5042 .0
5043 .read_with(cx, |tree, _| tree.id());
5044
5045 let buffer = project
5046 .update(cx, |project, cx| {
5047 project.open_buffer((worktree_id, "a.rs"), cx)
5048 })
5049 .await
5050 .unwrap();
5051
5052 let mut fake_server = fake_servers.next().await.unwrap();
5053 let open_notification = fake_server
5054 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5055 .await;
5056
5057 // Edit the buffer, moving the content down
5058 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5059 let change_notification_1 = fake_server
5060 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5061 .await;
5062 assert!(
5063 change_notification_1.text_document.version > open_notification.text_document.version
5064 );
5065
5066 // Report some diagnostics for the initial version of the buffer
5067 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5068 lsp::PublishDiagnosticsParams {
5069 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5070 version: Some(open_notification.text_document.version),
5071 diagnostics: vec![
5072 lsp::Diagnostic {
5073 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5074 severity: Some(DiagnosticSeverity::ERROR),
5075 message: "undefined variable 'A'".to_string(),
5076 source: Some("disk".to_string()),
5077 ..Default::default()
5078 },
5079 lsp::Diagnostic {
5080 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5081 severity: Some(DiagnosticSeverity::ERROR),
5082 message: "undefined variable 'BB'".to_string(),
5083 source: Some("disk".to_string()),
5084 ..Default::default()
5085 },
5086 lsp::Diagnostic {
5087 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5088 severity: Some(DiagnosticSeverity::ERROR),
5089 source: Some("disk".to_string()),
5090 message: "undefined variable 'CCC'".to_string(),
5091 ..Default::default()
5092 },
5093 ],
5094 },
5095 );
5096
5097 // The diagnostics have moved down since they were created.
5098 buffer.next_notification(cx).await;
5099 buffer.read_with(cx, |buffer, _| {
5100 assert_eq!(
5101 buffer
5102 .snapshot()
5103 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5104 .collect::<Vec<_>>(),
5105 &[
5106 DiagnosticEntry {
5107 range: Point::new(3, 9)..Point::new(3, 11),
5108 diagnostic: Diagnostic {
5109 severity: DiagnosticSeverity::ERROR,
5110 message: "undefined variable 'BB'".to_string(),
5111 is_disk_based: true,
5112 group_id: 1,
5113 is_primary: true,
5114 ..Default::default()
5115 },
5116 },
5117 DiagnosticEntry {
5118 range: Point::new(4, 9)..Point::new(4, 12),
5119 diagnostic: Diagnostic {
5120 severity: DiagnosticSeverity::ERROR,
5121 message: "undefined variable 'CCC'".to_string(),
5122 is_disk_based: true,
5123 group_id: 2,
5124 is_primary: true,
5125 ..Default::default()
5126 }
5127 }
5128 ]
5129 );
5130 assert_eq!(
5131 chunks_with_diagnostics(buffer, 0..buffer.len()),
5132 [
5133 ("\n\nfn a() { ".to_string(), None),
5134 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5135 (" }\nfn b() { ".to_string(), None),
5136 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5137 (" }\nfn c() { ".to_string(), None),
5138 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5139 (" }\n".to_string(), None),
5140 ]
5141 );
5142 assert_eq!(
5143 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5144 [
5145 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5146 (" }\nfn c() { ".to_string(), None),
5147 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5148 ]
5149 );
5150 });
5151
5152 // Ensure overlapping diagnostics are highlighted correctly.
5153 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5154 lsp::PublishDiagnosticsParams {
5155 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5156 version: Some(open_notification.text_document.version),
5157 diagnostics: vec![
5158 lsp::Diagnostic {
5159 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5160 severity: Some(DiagnosticSeverity::ERROR),
5161 message: "undefined variable 'A'".to_string(),
5162 source: Some("disk".to_string()),
5163 ..Default::default()
5164 },
5165 lsp::Diagnostic {
5166 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5167 severity: Some(DiagnosticSeverity::WARNING),
5168 message: "unreachable statement".to_string(),
5169 source: Some("disk".to_string()),
5170 ..Default::default()
5171 },
5172 ],
5173 },
5174 );
5175
5176 buffer.next_notification(cx).await;
5177 buffer.read_with(cx, |buffer, _| {
5178 assert_eq!(
5179 buffer
5180 .snapshot()
5181 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5182 .collect::<Vec<_>>(),
5183 &[
5184 DiagnosticEntry {
5185 range: Point::new(2, 9)..Point::new(2, 12),
5186 diagnostic: Diagnostic {
5187 severity: DiagnosticSeverity::WARNING,
5188 message: "unreachable statement".to_string(),
5189 is_disk_based: true,
5190 group_id: 1,
5191 is_primary: true,
5192 ..Default::default()
5193 }
5194 },
5195 DiagnosticEntry {
5196 range: Point::new(2, 9)..Point::new(2, 10),
5197 diagnostic: Diagnostic {
5198 severity: DiagnosticSeverity::ERROR,
5199 message: "undefined variable 'A'".to_string(),
5200 is_disk_based: true,
5201 group_id: 0,
5202 is_primary: true,
5203 ..Default::default()
5204 },
5205 }
5206 ]
5207 );
5208 assert_eq!(
5209 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5210 [
5211 ("fn a() { ".to_string(), None),
5212 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5213 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5214 ("\n".to_string(), None),
5215 ]
5216 );
5217 assert_eq!(
5218 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5219 [
5220 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5221 ("\n".to_string(), None),
5222 ]
5223 );
5224 });
5225
5226 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5227 // changes since the last save.
5228 buffer.update(cx, |buffer, cx| {
5229 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5230 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5231 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5232 });
5233 let change_notification_2 = fake_server
5234 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5235 .await;
5236 assert!(
5237 change_notification_2.text_document.version
5238 > change_notification_1.text_document.version
5239 );
5240
5241 // Handle out-of-order diagnostics
5242 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5243 lsp::PublishDiagnosticsParams {
5244 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5245 version: Some(change_notification_2.text_document.version),
5246 diagnostics: vec![
5247 lsp::Diagnostic {
5248 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5249 severity: Some(DiagnosticSeverity::ERROR),
5250 message: "undefined variable 'BB'".to_string(),
5251 source: Some("disk".to_string()),
5252 ..Default::default()
5253 },
5254 lsp::Diagnostic {
5255 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5256 severity: Some(DiagnosticSeverity::WARNING),
5257 message: "undefined variable 'A'".to_string(),
5258 source: Some("disk".to_string()),
5259 ..Default::default()
5260 },
5261 ],
5262 },
5263 );
5264
5265 buffer.next_notification(cx).await;
5266 buffer.read_with(cx, |buffer, _| {
5267 assert_eq!(
5268 buffer
5269 .snapshot()
5270 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5271 .collect::<Vec<_>>(),
5272 &[
5273 DiagnosticEntry {
5274 range: Point::new(2, 21)..Point::new(2, 22),
5275 diagnostic: Diagnostic {
5276 severity: DiagnosticSeverity::WARNING,
5277 message: "undefined variable 'A'".to_string(),
5278 is_disk_based: true,
5279 group_id: 1,
5280 is_primary: true,
5281 ..Default::default()
5282 }
5283 },
5284 DiagnosticEntry {
5285 range: Point::new(3, 9)..Point::new(3, 14),
5286 diagnostic: Diagnostic {
5287 severity: DiagnosticSeverity::ERROR,
5288 message: "undefined variable 'BB'".to_string(),
5289 is_disk_based: true,
5290 group_id: 0,
5291 is_primary: true,
5292 ..Default::default()
5293 },
5294 }
5295 ]
5296 );
5297 });
5298 }
5299
5300 #[gpui::test]
5301 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5302 cx.foreground().forbid_parking();
5303
5304 let text = concat!(
5305 "let one = ;\n", //
5306 "let two = \n",
5307 "let three = 3;\n",
5308 );
5309
5310 let fs = FakeFs::new(cx.background());
5311 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5312
5313 let project = Project::test(fs, cx);
5314 let worktree_id = project
5315 .update(cx, |project, cx| {
5316 project.find_or_create_local_worktree("/dir", true, cx)
5317 })
5318 .await
5319 .unwrap()
5320 .0
5321 .read_with(cx, |tree, _| tree.id());
5322
5323 let buffer = project
5324 .update(cx, |project, cx| {
5325 project.open_buffer((worktree_id, "a.rs"), cx)
5326 })
5327 .await
5328 .unwrap();
5329
5330 project.update(cx, |project, cx| {
5331 project
5332 .update_buffer_diagnostics(
5333 &buffer,
5334 vec![
5335 DiagnosticEntry {
5336 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5337 diagnostic: Diagnostic {
5338 severity: DiagnosticSeverity::ERROR,
5339 message: "syntax error 1".to_string(),
5340 ..Default::default()
5341 },
5342 },
5343 DiagnosticEntry {
5344 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5345 diagnostic: Diagnostic {
5346 severity: DiagnosticSeverity::ERROR,
5347 message: "syntax error 2".to_string(),
5348 ..Default::default()
5349 },
5350 },
5351 ],
5352 None,
5353 cx,
5354 )
5355 .unwrap();
5356 });
5357
5358 // An empty range is extended forward to include the following character.
5359 // At the end of a line, an empty range is extended backward to include
5360 // the preceding character.
5361 buffer.read_with(cx, |buffer, _| {
5362 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5363 assert_eq!(
5364 chunks
5365 .iter()
5366 .map(|(s, d)| (s.as_str(), *d))
5367 .collect::<Vec<_>>(),
5368 &[
5369 ("let one = ", None),
5370 (";", Some(DiagnosticSeverity::ERROR)),
5371 ("\nlet two =", None),
5372 (" ", Some(DiagnosticSeverity::ERROR)),
5373 ("\nlet three = 3;\n", None)
5374 ]
5375 );
5376 });
5377 }
5378
5379 #[gpui::test]
5380 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5381 cx.foreground().forbid_parking();
5382
5383 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5384 let language = Arc::new(Language::new(
5385 LanguageConfig {
5386 name: "Rust".into(),
5387 path_suffixes: vec!["rs".to_string()],
5388 language_server: Some(lsp_config),
5389 ..Default::default()
5390 },
5391 Some(tree_sitter_rust::language()),
5392 ));
5393
5394 let text = "
5395 fn a() {
5396 f1();
5397 }
5398 fn b() {
5399 f2();
5400 }
5401 fn c() {
5402 f3();
5403 }
5404 "
5405 .unindent();
5406
5407 let fs = FakeFs::new(cx.background());
5408 fs.insert_tree(
5409 "/dir",
5410 json!({
5411 "a.rs": text.clone(),
5412 }),
5413 )
5414 .await;
5415
5416 let project = Project::test(fs, cx);
5417 project.update(cx, |project, _| project.languages.add(language));
5418
5419 let worktree_id = project
5420 .update(cx, |project, cx| {
5421 project.find_or_create_local_worktree("/dir", true, cx)
5422 })
5423 .await
5424 .unwrap()
5425 .0
5426 .read_with(cx, |tree, _| tree.id());
5427
5428 let buffer = project
5429 .update(cx, |project, cx| {
5430 project.open_buffer((worktree_id, "a.rs"), cx)
5431 })
5432 .await
5433 .unwrap();
5434
5435 let mut fake_server = fake_servers.next().await.unwrap();
5436 let lsp_document_version = fake_server
5437 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5438 .await
5439 .text_document
5440 .version;
5441
5442 // Simulate editing the buffer after the language server computes some edits.
5443 buffer.update(cx, |buffer, cx| {
5444 buffer.edit(
5445 [Point::new(0, 0)..Point::new(0, 0)],
5446 "// above first function\n",
5447 cx,
5448 );
5449 buffer.edit(
5450 [Point::new(2, 0)..Point::new(2, 0)],
5451 " // inside first function\n",
5452 cx,
5453 );
5454 buffer.edit(
5455 [Point::new(6, 4)..Point::new(6, 4)],
5456 "// inside second function ",
5457 cx,
5458 );
5459
5460 assert_eq!(
5461 buffer.text(),
5462 "
5463 // above first function
5464 fn a() {
5465 // inside first function
5466 f1();
5467 }
5468 fn b() {
5469 // inside second function f2();
5470 }
5471 fn c() {
5472 f3();
5473 }
5474 "
5475 .unindent()
5476 );
5477 });
5478
5479 let edits = project
5480 .update(cx, |project, cx| {
5481 project.edits_from_lsp(
5482 &buffer,
5483 vec![
5484 // replace body of first function
5485 lsp::TextEdit {
5486 range: lsp::Range::new(
5487 lsp::Position::new(0, 0),
5488 lsp::Position::new(3, 0),
5489 ),
5490 new_text: "
5491 fn a() {
5492 f10();
5493 }
5494 "
5495 .unindent(),
5496 },
5497 // edit inside second function
5498 lsp::TextEdit {
5499 range: lsp::Range::new(
5500 lsp::Position::new(4, 6),
5501 lsp::Position::new(4, 6),
5502 ),
5503 new_text: "00".into(),
5504 },
5505 // edit inside third function via two distinct edits
5506 lsp::TextEdit {
5507 range: lsp::Range::new(
5508 lsp::Position::new(7, 5),
5509 lsp::Position::new(7, 5),
5510 ),
5511 new_text: "4000".into(),
5512 },
5513 lsp::TextEdit {
5514 range: lsp::Range::new(
5515 lsp::Position::new(7, 5),
5516 lsp::Position::new(7, 6),
5517 ),
5518 new_text: "".into(),
5519 },
5520 ],
5521 Some(lsp_document_version),
5522 cx,
5523 )
5524 })
5525 .await
5526 .unwrap();
5527
5528 buffer.update(cx, |buffer, cx| {
5529 for (range, new_text) in edits {
5530 buffer.edit([range], new_text, cx);
5531 }
5532 assert_eq!(
5533 buffer.text(),
5534 "
5535 // above first function
5536 fn a() {
5537 // inside first function
5538 f10();
5539 }
5540 fn b() {
5541 // inside second function f200();
5542 }
5543 fn c() {
5544 f4000();
5545 }
5546 "
5547 .unindent()
5548 );
5549 });
5550 }
5551
5552 #[gpui::test]
5553 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5554 cx.foreground().forbid_parking();
5555
5556 let text = "
5557 use a::b;
5558 use a::c;
5559
5560 fn f() {
5561 b();
5562 c();
5563 }
5564 "
5565 .unindent();
5566
5567 let fs = FakeFs::new(cx.background());
5568 fs.insert_tree(
5569 "/dir",
5570 json!({
5571 "a.rs": text.clone(),
5572 }),
5573 )
5574 .await;
5575
5576 let project = Project::test(fs, cx);
5577 let worktree_id = project
5578 .update(cx, |project, cx| {
5579 project.find_or_create_local_worktree("/dir", true, cx)
5580 })
5581 .await
5582 .unwrap()
5583 .0
5584 .read_with(cx, |tree, _| tree.id());
5585
5586 let buffer = project
5587 .update(cx, |project, cx| {
5588 project.open_buffer((worktree_id, "a.rs"), cx)
5589 })
5590 .await
5591 .unwrap();
5592
5593 // Simulate the language server sending us a small edit in the form of a very large diff.
5594 // Rust-analyzer does this when performing a merge-imports code action.
5595 let edits = project
5596 .update(cx, |project, cx| {
5597 project.edits_from_lsp(
5598 &buffer,
5599 [
5600 // Replace the first use statement without editing the semicolon.
5601 lsp::TextEdit {
5602 range: lsp::Range::new(
5603 lsp::Position::new(0, 4),
5604 lsp::Position::new(0, 8),
5605 ),
5606 new_text: "a::{b, c}".into(),
5607 },
5608 // Reinsert the remainder of the file between the semicolon and the final
5609 // newline of the file.
5610 lsp::TextEdit {
5611 range: lsp::Range::new(
5612 lsp::Position::new(0, 9),
5613 lsp::Position::new(0, 9),
5614 ),
5615 new_text: "\n\n".into(),
5616 },
5617 lsp::TextEdit {
5618 range: lsp::Range::new(
5619 lsp::Position::new(0, 9),
5620 lsp::Position::new(0, 9),
5621 ),
5622 new_text: "
5623 fn f() {
5624 b();
5625 c();
5626 }"
5627 .unindent(),
5628 },
5629 // Delete everything after the first newline of the file.
5630 lsp::TextEdit {
5631 range: lsp::Range::new(
5632 lsp::Position::new(1, 0),
5633 lsp::Position::new(7, 0),
5634 ),
5635 new_text: "".into(),
5636 },
5637 ],
5638 None,
5639 cx,
5640 )
5641 })
5642 .await
5643 .unwrap();
5644
5645 buffer.update(cx, |buffer, cx| {
5646 let edits = edits
5647 .into_iter()
5648 .map(|(range, text)| {
5649 (
5650 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5651 text,
5652 )
5653 })
5654 .collect::<Vec<_>>();
5655
5656 assert_eq!(
5657 edits,
5658 [
5659 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5660 (Point::new(1, 0)..Point::new(2, 0), "".into())
5661 ]
5662 );
5663
5664 for (range, new_text) in edits {
5665 buffer.edit([range], new_text, cx);
5666 }
5667 assert_eq!(
5668 buffer.text(),
5669 "
5670 use a::{b, c};
5671
5672 fn f() {
5673 b();
5674 c();
5675 }
5676 "
5677 .unindent()
5678 );
5679 });
5680 }
5681
5682 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5683 buffer: &Buffer,
5684 range: Range<T>,
5685 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5686 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5687 for chunk in buffer.snapshot().chunks(range, true) {
5688 if chunks.last().map_or(false, |prev_chunk| {
5689 prev_chunk.1 == chunk.diagnostic_severity
5690 }) {
5691 chunks.last_mut().unwrap().0.push_str(chunk.text);
5692 } else {
5693 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5694 }
5695 }
5696 chunks
5697 }
5698
5699 #[gpui::test]
5700 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5701 let dir = temp_tree(json!({
5702 "root": {
5703 "dir1": {},
5704 "dir2": {
5705 "dir3": {}
5706 }
5707 }
5708 }));
5709
5710 let project = Project::test(Arc::new(RealFs), cx);
5711 let (tree, _) = project
5712 .update(cx, |project, cx| {
5713 project.find_or_create_local_worktree(&dir.path(), true, cx)
5714 })
5715 .await
5716 .unwrap();
5717
5718 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5719 .await;
5720
5721 let cancel_flag = Default::default();
5722 let results = project
5723 .read_with(cx, |project, cx| {
5724 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5725 })
5726 .await;
5727
5728 assert!(results.is_empty());
5729 }
5730
5731 #[gpui::test]
5732 async fn test_definition(cx: &mut gpui::TestAppContext) {
5733 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5734 let language = Arc::new(Language::new(
5735 LanguageConfig {
5736 name: "Rust".into(),
5737 path_suffixes: vec!["rs".to_string()],
5738 language_server: Some(language_server_config),
5739 ..Default::default()
5740 },
5741 Some(tree_sitter_rust::language()),
5742 ));
5743
5744 let fs = FakeFs::new(cx.background());
5745 fs.insert_tree(
5746 "/dir",
5747 json!({
5748 "a.rs": "const fn a() { A }",
5749 "b.rs": "const y: i32 = crate::a()",
5750 }),
5751 )
5752 .await;
5753
5754 let project = Project::test(fs, cx);
5755 project.update(cx, |project, _| {
5756 Arc::get_mut(&mut project.languages).unwrap().add(language);
5757 });
5758
5759 let (tree, _) = project
5760 .update(cx, |project, cx| {
5761 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5762 })
5763 .await
5764 .unwrap();
5765 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5766 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5767 .await;
5768
5769 let buffer = project
5770 .update(cx, |project, cx| {
5771 project.open_buffer(
5772 ProjectPath {
5773 worktree_id,
5774 path: Path::new("").into(),
5775 },
5776 cx,
5777 )
5778 })
5779 .await
5780 .unwrap();
5781
5782 let mut fake_server = fake_servers.next().await.unwrap();
5783 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5784 let params = params.text_document_position_params;
5785 assert_eq!(
5786 params.text_document.uri.to_file_path().unwrap(),
5787 Path::new("/dir/b.rs"),
5788 );
5789 assert_eq!(params.position, lsp::Position::new(0, 22));
5790
5791 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5792 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5793 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5794 )))
5795 });
5796
5797 let mut definitions = project
5798 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5799 .await
5800 .unwrap();
5801
5802 assert_eq!(definitions.len(), 1);
5803 let definition = definitions.pop().unwrap();
5804 cx.update(|cx| {
5805 let target_buffer = definition.buffer.read(cx);
5806 assert_eq!(
5807 target_buffer
5808 .file()
5809 .unwrap()
5810 .as_local()
5811 .unwrap()
5812 .abs_path(cx),
5813 Path::new("/dir/a.rs"),
5814 );
5815 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5816 assert_eq!(
5817 list_worktrees(&project, cx),
5818 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5819 );
5820
5821 drop(definition);
5822 });
5823 cx.read(|cx| {
5824 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5825 });
5826
5827 fn list_worktrees<'a>(
5828 project: &'a ModelHandle<Project>,
5829 cx: &'a AppContext,
5830 ) -> Vec<(&'a Path, bool)> {
5831 project
5832 .read(cx)
5833 .worktrees(cx)
5834 .map(|worktree| {
5835 let worktree = worktree.read(cx);
5836 (
5837 worktree.as_local().unwrap().abs_path().as_ref(),
5838 worktree.is_visible(),
5839 )
5840 })
5841 .collect::<Vec<_>>()
5842 }
5843 }
5844
5845 #[gpui::test]
5846 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5847 let fs = FakeFs::new(cx.background());
5848 fs.insert_tree(
5849 "/dir",
5850 json!({
5851 "file1": "the old contents",
5852 }),
5853 )
5854 .await;
5855
5856 let project = Project::test(fs.clone(), cx);
5857 let worktree_id = project
5858 .update(cx, |p, cx| {
5859 p.find_or_create_local_worktree("/dir", true, cx)
5860 })
5861 .await
5862 .unwrap()
5863 .0
5864 .read_with(cx, |tree, _| tree.id());
5865
5866 let buffer = project
5867 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5868 .await
5869 .unwrap();
5870 buffer
5871 .update(cx, |buffer, cx| {
5872 assert_eq!(buffer.text(), "the old contents");
5873 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5874 buffer.save(cx)
5875 })
5876 .await
5877 .unwrap();
5878
5879 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5880 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5881 }
5882
5883 #[gpui::test]
5884 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5885 let fs = FakeFs::new(cx.background());
5886 fs.insert_tree(
5887 "/dir",
5888 json!({
5889 "file1": "the old contents",
5890 }),
5891 )
5892 .await;
5893
5894 let project = Project::test(fs.clone(), cx);
5895 let worktree_id = project
5896 .update(cx, |p, cx| {
5897 p.find_or_create_local_worktree("/dir/file1", true, cx)
5898 })
5899 .await
5900 .unwrap()
5901 .0
5902 .read_with(cx, |tree, _| tree.id());
5903
5904 let buffer = project
5905 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5906 .await
5907 .unwrap();
5908 buffer
5909 .update(cx, |buffer, cx| {
5910 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5911 buffer.save(cx)
5912 })
5913 .await
5914 .unwrap();
5915
5916 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5917 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5918 }
5919
5920 #[gpui::test]
5921 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5922 let fs = FakeFs::new(cx.background());
5923 fs.insert_tree("/dir", json!({})).await;
5924
5925 let project = Project::test(fs.clone(), cx);
5926 let (worktree, _) = project
5927 .update(cx, |project, cx| {
5928 project.find_or_create_local_worktree("/dir", true, cx)
5929 })
5930 .await
5931 .unwrap();
5932 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5933
5934 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5935 buffer.update(cx, |buffer, cx| {
5936 buffer.edit([0..0], "abc", cx);
5937 assert!(buffer.is_dirty());
5938 assert!(!buffer.has_conflict());
5939 });
5940 project
5941 .update(cx, |project, cx| {
5942 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5943 })
5944 .await
5945 .unwrap();
5946 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5947 buffer.read_with(cx, |buffer, cx| {
5948 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5949 assert!(!buffer.is_dirty());
5950 assert!(!buffer.has_conflict());
5951 });
5952
5953 let opened_buffer = project
5954 .update(cx, |project, cx| {
5955 project.open_buffer((worktree_id, "file1"), cx)
5956 })
5957 .await
5958 .unwrap();
5959 assert_eq!(opened_buffer, buffer);
5960 }
5961
5962 #[gpui::test(retries = 5)]
5963 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5964 let dir = temp_tree(json!({
5965 "a": {
5966 "file1": "",
5967 "file2": "",
5968 "file3": "",
5969 },
5970 "b": {
5971 "c": {
5972 "file4": "",
5973 "file5": "",
5974 }
5975 }
5976 }));
5977
5978 let project = Project::test(Arc::new(RealFs), cx);
5979 let rpc = project.read_with(cx, |p, _| p.client.clone());
5980
5981 let (tree, _) = project
5982 .update(cx, |p, cx| {
5983 p.find_or_create_local_worktree(dir.path(), true, cx)
5984 })
5985 .await
5986 .unwrap();
5987 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5988
5989 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5990 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5991 async move { buffer.await.unwrap() }
5992 };
5993 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5994 tree.read_with(cx, |tree, _| {
5995 tree.entry_for_path(path)
5996 .expect(&format!("no entry for path {}", path))
5997 .id
5998 })
5999 };
6000
6001 let buffer2 = buffer_for_path("a/file2", cx).await;
6002 let buffer3 = buffer_for_path("a/file3", cx).await;
6003 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6004 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6005
6006 let file2_id = id_for_path("a/file2", &cx);
6007 let file3_id = id_for_path("a/file3", &cx);
6008 let file4_id = id_for_path("b/c/file4", &cx);
6009
6010 // Wait for the initial scan.
6011 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6012 .await;
6013
6014 // Create a remote copy of this worktree.
6015 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6016 let (remote, load_task) = cx.update(|cx| {
6017 Worktree::remote(
6018 1,
6019 1,
6020 initial_snapshot.to_proto(&Default::default(), true),
6021 rpc.clone(),
6022 cx,
6023 )
6024 });
6025 load_task.await;
6026
6027 cx.read(|cx| {
6028 assert!(!buffer2.read(cx).is_dirty());
6029 assert!(!buffer3.read(cx).is_dirty());
6030 assert!(!buffer4.read(cx).is_dirty());
6031 assert!(!buffer5.read(cx).is_dirty());
6032 });
6033
6034 // Rename and delete files and directories.
6035 tree.flush_fs_events(&cx).await;
6036 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6037 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6038 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6039 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6040 tree.flush_fs_events(&cx).await;
6041
6042 let expected_paths = vec![
6043 "a",
6044 "a/file1",
6045 "a/file2.new",
6046 "b",
6047 "d",
6048 "d/file3",
6049 "d/file4",
6050 ];
6051
6052 cx.read(|app| {
6053 assert_eq!(
6054 tree.read(app)
6055 .paths()
6056 .map(|p| p.to_str().unwrap())
6057 .collect::<Vec<_>>(),
6058 expected_paths
6059 );
6060
6061 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6062 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6063 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6064
6065 assert_eq!(
6066 buffer2.read(app).file().unwrap().path().as_ref(),
6067 Path::new("a/file2.new")
6068 );
6069 assert_eq!(
6070 buffer3.read(app).file().unwrap().path().as_ref(),
6071 Path::new("d/file3")
6072 );
6073 assert_eq!(
6074 buffer4.read(app).file().unwrap().path().as_ref(),
6075 Path::new("d/file4")
6076 );
6077 assert_eq!(
6078 buffer5.read(app).file().unwrap().path().as_ref(),
6079 Path::new("b/c/file5")
6080 );
6081
6082 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6083 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6084 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6085 assert!(buffer5.read(app).file().unwrap().is_deleted());
6086 });
6087
6088 // Update the remote worktree. Check that it becomes consistent with the
6089 // local worktree.
6090 remote.update(cx, |remote, cx| {
6091 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6092 &initial_snapshot,
6093 1,
6094 1,
6095 true,
6096 );
6097 remote
6098 .as_remote_mut()
6099 .unwrap()
6100 .snapshot
6101 .apply_remote_update(update_message)
6102 .unwrap();
6103
6104 assert_eq!(
6105 remote
6106 .paths()
6107 .map(|p| p.to_str().unwrap())
6108 .collect::<Vec<_>>(),
6109 expected_paths
6110 );
6111 });
6112 }
6113
6114 #[gpui::test]
6115 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6116 let fs = FakeFs::new(cx.background());
6117 fs.insert_tree(
6118 "/the-dir",
6119 json!({
6120 "a.txt": "a-contents",
6121 "b.txt": "b-contents",
6122 }),
6123 )
6124 .await;
6125
6126 let project = Project::test(fs.clone(), cx);
6127 let worktree_id = project
6128 .update(cx, |p, cx| {
6129 p.find_or_create_local_worktree("/the-dir", true, cx)
6130 })
6131 .await
6132 .unwrap()
6133 .0
6134 .read_with(cx, |tree, _| tree.id());
6135
6136 // Spawn multiple tasks to open paths, repeating some paths.
6137 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6138 (
6139 p.open_buffer((worktree_id, "a.txt"), cx),
6140 p.open_buffer((worktree_id, "b.txt"), cx),
6141 p.open_buffer((worktree_id, "a.txt"), cx),
6142 )
6143 });
6144
6145 let buffer_a_1 = buffer_a_1.await.unwrap();
6146 let buffer_a_2 = buffer_a_2.await.unwrap();
6147 let buffer_b = buffer_b.await.unwrap();
6148 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6149 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6150
6151 // There is only one buffer per path.
6152 let buffer_a_id = buffer_a_1.id();
6153 assert_eq!(buffer_a_2.id(), buffer_a_id);
6154
6155 // Open the same path again while it is still open.
6156 drop(buffer_a_1);
6157 let buffer_a_3 = project
6158 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6159 .await
6160 .unwrap();
6161
6162 // There's still only one buffer per path.
6163 assert_eq!(buffer_a_3.id(), buffer_a_id);
6164 }
6165
6166 #[gpui::test]
6167 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6168 use std::fs;
6169
6170 let dir = temp_tree(json!({
6171 "file1": "abc",
6172 "file2": "def",
6173 "file3": "ghi",
6174 }));
6175
6176 let project = Project::test(Arc::new(RealFs), cx);
6177 let (worktree, _) = project
6178 .update(cx, |p, cx| {
6179 p.find_or_create_local_worktree(dir.path(), true, cx)
6180 })
6181 .await
6182 .unwrap();
6183 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6184
6185 worktree.flush_fs_events(&cx).await;
6186 worktree
6187 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6188 .await;
6189
6190 let buffer1 = project
6191 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6192 .await
6193 .unwrap();
6194 let events = Rc::new(RefCell::new(Vec::new()));
6195
6196 // initially, the buffer isn't dirty.
6197 buffer1.update(cx, |buffer, cx| {
6198 cx.subscribe(&buffer1, {
6199 let events = events.clone();
6200 move |_, _, event, _| match event {
6201 BufferEvent::Operation(_) => {}
6202 _ => events.borrow_mut().push(event.clone()),
6203 }
6204 })
6205 .detach();
6206
6207 assert!(!buffer.is_dirty());
6208 assert!(events.borrow().is_empty());
6209
6210 buffer.edit(vec![1..2], "", cx);
6211 });
6212
6213 // after the first edit, the buffer is dirty, and emits a dirtied event.
6214 buffer1.update(cx, |buffer, cx| {
6215 assert!(buffer.text() == "ac");
6216 assert!(buffer.is_dirty());
6217 assert_eq!(
6218 *events.borrow(),
6219 &[language::Event::Edited, language::Event::Dirtied]
6220 );
6221 events.borrow_mut().clear();
6222 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6223 });
6224
6225 // after saving, the buffer is not dirty, and emits a saved event.
6226 buffer1.update(cx, |buffer, cx| {
6227 assert!(!buffer.is_dirty());
6228 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6229 events.borrow_mut().clear();
6230
6231 buffer.edit(vec![1..1], "B", cx);
6232 buffer.edit(vec![2..2], "D", cx);
6233 });
6234
6235 // after editing again, the buffer is dirty, and emits another dirty event.
6236 buffer1.update(cx, |buffer, cx| {
6237 assert!(buffer.text() == "aBDc");
6238 assert!(buffer.is_dirty());
6239 assert_eq!(
6240 *events.borrow(),
6241 &[
6242 language::Event::Edited,
6243 language::Event::Dirtied,
6244 language::Event::Edited,
6245 ],
6246 );
6247 events.borrow_mut().clear();
6248
6249 // TODO - currently, after restoring the buffer to its
6250 // previously-saved state, the is still considered dirty.
6251 buffer.edit([1..3], "", cx);
6252 assert!(buffer.text() == "ac");
6253 assert!(buffer.is_dirty());
6254 });
6255
6256 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6257
6258 // When a file is deleted, the buffer is considered dirty.
6259 let events = Rc::new(RefCell::new(Vec::new()));
6260 let buffer2 = project
6261 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6262 .await
6263 .unwrap();
6264 buffer2.update(cx, |_, cx| {
6265 cx.subscribe(&buffer2, {
6266 let events = events.clone();
6267 move |_, _, event, _| events.borrow_mut().push(event.clone())
6268 })
6269 .detach();
6270 });
6271
6272 fs::remove_file(dir.path().join("file2")).unwrap();
6273 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6274 assert_eq!(
6275 *events.borrow(),
6276 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6277 );
6278
6279 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6280 let events = Rc::new(RefCell::new(Vec::new()));
6281 let buffer3 = project
6282 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6283 .await
6284 .unwrap();
6285 buffer3.update(cx, |_, cx| {
6286 cx.subscribe(&buffer3, {
6287 let events = events.clone();
6288 move |_, _, event, _| events.borrow_mut().push(event.clone())
6289 })
6290 .detach();
6291 });
6292
6293 worktree.flush_fs_events(&cx).await;
6294 buffer3.update(cx, |buffer, cx| {
6295 buffer.edit(Some(0..0), "x", cx);
6296 });
6297 events.borrow_mut().clear();
6298 fs::remove_file(dir.path().join("file3")).unwrap();
6299 buffer3
6300 .condition(&cx, |_, _| !events.borrow().is_empty())
6301 .await;
6302 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6303 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6304 }
6305
6306 #[gpui::test]
6307 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6308 use std::fs;
6309
6310 let initial_contents = "aaa\nbbbbb\nc\n";
6311 let dir = temp_tree(json!({ "the-file": initial_contents }));
6312
6313 let project = Project::test(Arc::new(RealFs), cx);
6314 let (worktree, _) = project
6315 .update(cx, |p, cx| {
6316 p.find_or_create_local_worktree(dir.path(), true, cx)
6317 })
6318 .await
6319 .unwrap();
6320 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6321
6322 worktree
6323 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6324 .await;
6325
6326 let abs_path = dir.path().join("the-file");
6327 let buffer = project
6328 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6329 .await
6330 .unwrap();
6331
6332 // TODO
6333 // Add a cursor on each row.
6334 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6335 // assert!(!buffer.is_dirty());
6336 // buffer.add_selection_set(
6337 // &(0..3)
6338 // .map(|row| Selection {
6339 // id: row as usize,
6340 // start: Point::new(row, 1),
6341 // end: Point::new(row, 1),
6342 // reversed: false,
6343 // goal: SelectionGoal::None,
6344 // })
6345 // .collect::<Vec<_>>(),
6346 // cx,
6347 // )
6348 // });
6349
6350 // Change the file on disk, adding two new lines of text, and removing
6351 // one line.
6352 buffer.read_with(cx, |buffer, _| {
6353 assert!(!buffer.is_dirty());
6354 assert!(!buffer.has_conflict());
6355 });
6356 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6357 fs::write(&abs_path, new_contents).unwrap();
6358
6359 // Because the buffer was not modified, it is reloaded from disk. Its
6360 // contents are edited according to the diff between the old and new
6361 // file contents.
6362 buffer
6363 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6364 .await;
6365
6366 buffer.update(cx, |buffer, _| {
6367 assert_eq!(buffer.text(), new_contents);
6368 assert!(!buffer.is_dirty());
6369 assert!(!buffer.has_conflict());
6370
6371 // TODO
6372 // let cursor_positions = buffer
6373 // .selection_set(selection_set_id)
6374 // .unwrap()
6375 // .selections::<Point>(&*buffer)
6376 // .map(|selection| {
6377 // assert_eq!(selection.start, selection.end);
6378 // selection.start
6379 // })
6380 // .collect::<Vec<_>>();
6381 // assert_eq!(
6382 // cursor_positions,
6383 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6384 // );
6385 });
6386
6387 // Modify the buffer
6388 buffer.update(cx, |buffer, cx| {
6389 buffer.edit(vec![0..0], " ", cx);
6390 assert!(buffer.is_dirty());
6391 assert!(!buffer.has_conflict());
6392 });
6393
6394 // Change the file on disk again, adding blank lines to the beginning.
6395 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6396
6397 // Because the buffer is modified, it doesn't reload from disk, but is
6398 // marked as having a conflict.
6399 buffer
6400 .condition(&cx, |buffer, _| buffer.has_conflict())
6401 .await;
6402 }
6403
6404 #[gpui::test]
6405 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6406 cx.foreground().forbid_parking();
6407
6408 let fs = FakeFs::new(cx.background());
6409 fs.insert_tree(
6410 "/the-dir",
6411 json!({
6412 "a.rs": "
6413 fn foo(mut v: Vec<usize>) {
6414 for x in &v {
6415 v.push(1);
6416 }
6417 }
6418 "
6419 .unindent(),
6420 }),
6421 )
6422 .await;
6423
6424 let project = Project::test(fs.clone(), cx);
6425 let (worktree, _) = project
6426 .update(cx, |p, cx| {
6427 p.find_or_create_local_worktree("/the-dir", true, cx)
6428 })
6429 .await
6430 .unwrap();
6431 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6432
6433 let buffer = project
6434 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6435 .await
6436 .unwrap();
6437
6438 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6439 let message = lsp::PublishDiagnosticsParams {
6440 uri: buffer_uri.clone(),
6441 diagnostics: vec![
6442 lsp::Diagnostic {
6443 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6444 severity: Some(DiagnosticSeverity::WARNING),
6445 message: "error 1".to_string(),
6446 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6447 location: lsp::Location {
6448 uri: buffer_uri.clone(),
6449 range: lsp::Range::new(
6450 lsp::Position::new(1, 8),
6451 lsp::Position::new(1, 9),
6452 ),
6453 },
6454 message: "error 1 hint 1".to_string(),
6455 }]),
6456 ..Default::default()
6457 },
6458 lsp::Diagnostic {
6459 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6460 severity: Some(DiagnosticSeverity::HINT),
6461 message: "error 1 hint 1".to_string(),
6462 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6463 location: lsp::Location {
6464 uri: buffer_uri.clone(),
6465 range: lsp::Range::new(
6466 lsp::Position::new(1, 8),
6467 lsp::Position::new(1, 9),
6468 ),
6469 },
6470 message: "original diagnostic".to_string(),
6471 }]),
6472 ..Default::default()
6473 },
6474 lsp::Diagnostic {
6475 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6476 severity: Some(DiagnosticSeverity::ERROR),
6477 message: "error 2".to_string(),
6478 related_information: Some(vec![
6479 lsp::DiagnosticRelatedInformation {
6480 location: lsp::Location {
6481 uri: buffer_uri.clone(),
6482 range: lsp::Range::new(
6483 lsp::Position::new(1, 13),
6484 lsp::Position::new(1, 15),
6485 ),
6486 },
6487 message: "error 2 hint 1".to_string(),
6488 },
6489 lsp::DiagnosticRelatedInformation {
6490 location: lsp::Location {
6491 uri: buffer_uri.clone(),
6492 range: lsp::Range::new(
6493 lsp::Position::new(1, 13),
6494 lsp::Position::new(1, 15),
6495 ),
6496 },
6497 message: "error 2 hint 2".to_string(),
6498 },
6499 ]),
6500 ..Default::default()
6501 },
6502 lsp::Diagnostic {
6503 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6504 severity: Some(DiagnosticSeverity::HINT),
6505 message: "error 2 hint 1".to_string(),
6506 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6507 location: lsp::Location {
6508 uri: buffer_uri.clone(),
6509 range: lsp::Range::new(
6510 lsp::Position::new(2, 8),
6511 lsp::Position::new(2, 17),
6512 ),
6513 },
6514 message: "original diagnostic".to_string(),
6515 }]),
6516 ..Default::default()
6517 },
6518 lsp::Diagnostic {
6519 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6520 severity: Some(DiagnosticSeverity::HINT),
6521 message: "error 2 hint 2".to_string(),
6522 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6523 location: lsp::Location {
6524 uri: buffer_uri.clone(),
6525 range: lsp::Range::new(
6526 lsp::Position::new(2, 8),
6527 lsp::Position::new(2, 17),
6528 ),
6529 },
6530 message: "original diagnostic".to_string(),
6531 }]),
6532 ..Default::default()
6533 },
6534 ],
6535 version: None,
6536 };
6537
6538 project
6539 .update(cx, |p, cx| {
6540 p.update_diagnostics(message, &Default::default(), cx)
6541 })
6542 .unwrap();
6543 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6544
6545 assert_eq!(
6546 buffer
6547 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6548 .collect::<Vec<_>>(),
6549 &[
6550 DiagnosticEntry {
6551 range: Point::new(1, 8)..Point::new(1, 9),
6552 diagnostic: Diagnostic {
6553 severity: DiagnosticSeverity::WARNING,
6554 message: "error 1".to_string(),
6555 group_id: 0,
6556 is_primary: true,
6557 ..Default::default()
6558 }
6559 },
6560 DiagnosticEntry {
6561 range: Point::new(1, 8)..Point::new(1, 9),
6562 diagnostic: Diagnostic {
6563 severity: DiagnosticSeverity::HINT,
6564 message: "error 1 hint 1".to_string(),
6565 group_id: 0,
6566 is_primary: false,
6567 ..Default::default()
6568 }
6569 },
6570 DiagnosticEntry {
6571 range: Point::new(1, 13)..Point::new(1, 15),
6572 diagnostic: Diagnostic {
6573 severity: DiagnosticSeverity::HINT,
6574 message: "error 2 hint 1".to_string(),
6575 group_id: 1,
6576 is_primary: false,
6577 ..Default::default()
6578 }
6579 },
6580 DiagnosticEntry {
6581 range: Point::new(1, 13)..Point::new(1, 15),
6582 diagnostic: Diagnostic {
6583 severity: DiagnosticSeverity::HINT,
6584 message: "error 2 hint 2".to_string(),
6585 group_id: 1,
6586 is_primary: false,
6587 ..Default::default()
6588 }
6589 },
6590 DiagnosticEntry {
6591 range: Point::new(2, 8)..Point::new(2, 17),
6592 diagnostic: Diagnostic {
6593 severity: DiagnosticSeverity::ERROR,
6594 message: "error 2".to_string(),
6595 group_id: 1,
6596 is_primary: true,
6597 ..Default::default()
6598 }
6599 }
6600 ]
6601 );
6602
6603 assert_eq!(
6604 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6605 &[
6606 DiagnosticEntry {
6607 range: Point::new(1, 8)..Point::new(1, 9),
6608 diagnostic: Diagnostic {
6609 severity: DiagnosticSeverity::WARNING,
6610 message: "error 1".to_string(),
6611 group_id: 0,
6612 is_primary: true,
6613 ..Default::default()
6614 }
6615 },
6616 DiagnosticEntry {
6617 range: Point::new(1, 8)..Point::new(1, 9),
6618 diagnostic: Diagnostic {
6619 severity: DiagnosticSeverity::HINT,
6620 message: "error 1 hint 1".to_string(),
6621 group_id: 0,
6622 is_primary: false,
6623 ..Default::default()
6624 }
6625 },
6626 ]
6627 );
6628 assert_eq!(
6629 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6630 &[
6631 DiagnosticEntry {
6632 range: Point::new(1, 13)..Point::new(1, 15),
6633 diagnostic: Diagnostic {
6634 severity: DiagnosticSeverity::HINT,
6635 message: "error 2 hint 1".to_string(),
6636 group_id: 1,
6637 is_primary: false,
6638 ..Default::default()
6639 }
6640 },
6641 DiagnosticEntry {
6642 range: Point::new(1, 13)..Point::new(1, 15),
6643 diagnostic: Diagnostic {
6644 severity: DiagnosticSeverity::HINT,
6645 message: "error 2 hint 2".to_string(),
6646 group_id: 1,
6647 is_primary: false,
6648 ..Default::default()
6649 }
6650 },
6651 DiagnosticEntry {
6652 range: Point::new(2, 8)..Point::new(2, 17),
6653 diagnostic: Diagnostic {
6654 severity: DiagnosticSeverity::ERROR,
6655 message: "error 2".to_string(),
6656 group_id: 1,
6657 is_primary: true,
6658 ..Default::default()
6659 }
6660 }
6661 ]
6662 );
6663 }
6664
6665 #[gpui::test]
6666 async fn test_rename(cx: &mut gpui::TestAppContext) {
6667 cx.foreground().forbid_parking();
6668
6669 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6670 let language = Arc::new(Language::new(
6671 LanguageConfig {
6672 name: "Rust".into(),
6673 path_suffixes: vec!["rs".to_string()],
6674 language_server: Some(language_server_config),
6675 ..Default::default()
6676 },
6677 Some(tree_sitter_rust::language()),
6678 ));
6679
6680 let fs = FakeFs::new(cx.background());
6681 fs.insert_tree(
6682 "/dir",
6683 json!({
6684 "one.rs": "const ONE: usize = 1;",
6685 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6686 }),
6687 )
6688 .await;
6689
6690 let project = Project::test(fs.clone(), cx);
6691 project.update(cx, |project, _| {
6692 Arc::get_mut(&mut project.languages).unwrap().add(language);
6693 });
6694
6695 let (tree, _) = project
6696 .update(cx, |project, cx| {
6697 project.find_or_create_local_worktree("/dir", true, cx)
6698 })
6699 .await
6700 .unwrap();
6701 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6702 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6703 .await;
6704
6705 let buffer = project
6706 .update(cx, |project, cx| {
6707 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6708 })
6709 .await
6710 .unwrap();
6711
6712 let mut fake_server = fake_servers.next().await.unwrap();
6713
6714 let response = project.update(cx, |project, cx| {
6715 project.prepare_rename(buffer.clone(), 7, cx)
6716 });
6717 fake_server
6718 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6719 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6720 assert_eq!(params.position, lsp::Position::new(0, 7));
6721 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6722 lsp::Position::new(0, 6),
6723 lsp::Position::new(0, 9),
6724 )))
6725 })
6726 .next()
6727 .await
6728 .unwrap();
6729 let range = response.await.unwrap().unwrap();
6730 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6731 assert_eq!(range, 6..9);
6732
6733 let response = project.update(cx, |project, cx| {
6734 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6735 });
6736 fake_server
6737 .handle_request::<lsp::request::Rename, _>(|params, _| {
6738 assert_eq!(
6739 params.text_document_position.text_document.uri.as_str(),
6740 "file:///dir/one.rs"
6741 );
6742 assert_eq!(
6743 params.text_document_position.position,
6744 lsp::Position::new(0, 7)
6745 );
6746 assert_eq!(params.new_name, "THREE");
6747 Some(lsp::WorkspaceEdit {
6748 changes: Some(
6749 [
6750 (
6751 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6752 vec![lsp::TextEdit::new(
6753 lsp::Range::new(
6754 lsp::Position::new(0, 6),
6755 lsp::Position::new(0, 9),
6756 ),
6757 "THREE".to_string(),
6758 )],
6759 ),
6760 (
6761 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6762 vec![
6763 lsp::TextEdit::new(
6764 lsp::Range::new(
6765 lsp::Position::new(0, 24),
6766 lsp::Position::new(0, 27),
6767 ),
6768 "THREE".to_string(),
6769 ),
6770 lsp::TextEdit::new(
6771 lsp::Range::new(
6772 lsp::Position::new(0, 35),
6773 lsp::Position::new(0, 38),
6774 ),
6775 "THREE".to_string(),
6776 ),
6777 ],
6778 ),
6779 ]
6780 .into_iter()
6781 .collect(),
6782 ),
6783 ..Default::default()
6784 })
6785 })
6786 .next()
6787 .await
6788 .unwrap();
6789 let mut transaction = response.await.unwrap().0;
6790 assert_eq!(transaction.len(), 2);
6791 assert_eq!(
6792 transaction
6793 .remove_entry(&buffer)
6794 .unwrap()
6795 .0
6796 .read_with(cx, |buffer, _| buffer.text()),
6797 "const THREE: usize = 1;"
6798 );
6799 assert_eq!(
6800 transaction
6801 .into_keys()
6802 .next()
6803 .unwrap()
6804 .read_with(cx, |buffer, _| buffer.text()),
6805 "const TWO: usize = one::THREE + one::THREE;"
6806 );
6807 }
6808
6809 #[gpui::test]
6810 async fn test_search(cx: &mut gpui::TestAppContext) {
6811 let fs = FakeFs::new(cx.background());
6812 fs.insert_tree(
6813 "/dir",
6814 json!({
6815 "one.rs": "const ONE: usize = 1;",
6816 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6817 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6818 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6819 }),
6820 )
6821 .await;
6822 let project = Project::test(fs.clone(), cx);
6823 let (tree, _) = project
6824 .update(cx, |project, cx| {
6825 project.find_or_create_local_worktree("/dir", true, cx)
6826 })
6827 .await
6828 .unwrap();
6829 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6830 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6831 .await;
6832
6833 assert_eq!(
6834 search(&project, SearchQuery::text("TWO", false, true), cx)
6835 .await
6836 .unwrap(),
6837 HashMap::from_iter([
6838 ("two.rs".to_string(), vec![6..9]),
6839 ("three.rs".to_string(), vec![37..40])
6840 ])
6841 );
6842
6843 let buffer_4 = project
6844 .update(cx, |project, cx| {
6845 project.open_buffer((worktree_id, "four.rs"), cx)
6846 })
6847 .await
6848 .unwrap();
6849 buffer_4.update(cx, |buffer, cx| {
6850 buffer.edit([20..28, 31..43], "two::TWO", cx);
6851 });
6852
6853 assert_eq!(
6854 search(&project, SearchQuery::text("TWO", false, true), cx)
6855 .await
6856 .unwrap(),
6857 HashMap::from_iter([
6858 ("two.rs".to_string(), vec![6..9]),
6859 ("three.rs".to_string(), vec![37..40]),
6860 ("four.rs".to_string(), vec![25..28, 36..39])
6861 ])
6862 );
6863
6864 async fn search(
6865 project: &ModelHandle<Project>,
6866 query: SearchQuery,
6867 cx: &mut gpui::TestAppContext,
6868 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6869 let results = project
6870 .update(cx, |project, cx| project.search(query, cx))
6871 .await?;
6872
6873 Ok(results
6874 .into_iter()
6875 .map(|(buffer, ranges)| {
6876 buffer.read_with(cx, |buffer, _| {
6877 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6878 let ranges = ranges
6879 .into_iter()
6880 .map(|range| range.to_offset(buffer))
6881 .collect::<Vec<_>>();
6882 (path, ranges)
6883 })
6884 })
6885 .collect())
6886 }
6887 }
6888}