1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
61 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
62 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
63 language_server_settings: Arc<Mutex<serde_json::Value>>,
64 next_language_server_id: usize,
65 client: Arc<client::Client>,
66 next_entry_id: Arc<AtomicUsize>,
67 user_store: ModelHandle<UserStore>,
68 fs: Arc<dyn Fs>,
69 client_state: ProjectClientState,
70 collaborators: HashMap<PeerId, Collaborator>,
71 subscriptions: Vec<client::Subscription>,
72 language_servers_with_diagnostics_running: isize,
73 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
74 shared_buffers: HashMap<PeerId, HashSet<u64>>,
75 loading_buffers: HashMap<
76 ProjectPath,
77 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
78 >,
79 loading_local_worktrees:
80 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
81 opened_buffers: HashMap<u64, OpenBuffer>,
82 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
83 nonce: u128,
84}
85
86enum OpenBuffer {
87 Strong(ModelHandle<Buffer>),
88 Weak(WeakModelHandle<Buffer>),
89 Loading(Vec<Operation>),
90}
91
92enum WorktreeHandle {
93 Strong(ModelHandle<Worktree>),
94 Weak(WeakModelHandle<Worktree>),
95}
96
97enum ProjectClientState {
98 Local {
99 is_shared: bool,
100 remote_id_tx: watch::Sender<Option<u64>>,
101 remote_id_rx: watch::Receiver<Option<u64>>,
102 _maintain_remote_id_task: Task<Option<()>>,
103 },
104 Remote {
105 sharing_has_stopped: bool,
106 remote_id: u64,
107 replica_id: ReplicaId,
108 _detect_unshare_task: Task<Option<()>>,
109 },
110}
111
112#[derive(Clone, Debug)]
113pub struct Collaborator {
114 pub user: Arc<User>,
115 pub peer_id: PeerId,
116 pub replica_id: ReplicaId,
117}
118
119#[derive(Clone, Debug, PartialEq)]
120pub enum Event {
121 ActiveEntryChanged(Option<ProjectEntryId>),
122 WorktreeRemoved(WorktreeId),
123 DiskBasedDiagnosticsStarted,
124 DiskBasedDiagnosticsUpdated,
125 DiskBasedDiagnosticsFinished,
126 DiagnosticsUpdated(ProjectPath),
127}
128
129enum LanguageServerEvent {
130 WorkStart {
131 token: String,
132 },
133 WorkProgress {
134 token: String,
135 progress: LanguageServerProgress,
136 },
137 WorkEnd {
138 token: String,
139 },
140 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
141}
142
143pub struct LanguageServerStatus {
144 pub name: String,
145 pub pending_work: BTreeMap<String, LanguageServerProgress>,
146 pending_diagnostic_updates: isize,
147}
148
149#[derive(Clone, Debug)]
150pub struct LanguageServerProgress {
151 pub message: Option<String>,
152 pub percentage: Option<usize>,
153 pub last_update_at: Instant,
154}
155
156#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
157pub struct ProjectPath {
158 pub worktree_id: WorktreeId,
159 pub path: Arc<Path>,
160}
161
162#[derive(Clone, Debug, Default, PartialEq)]
163pub struct DiagnosticSummary {
164 pub error_count: usize,
165 pub warning_count: usize,
166 pub info_count: usize,
167 pub hint_count: usize,
168}
169
170#[derive(Debug)]
171pub struct Location {
172 pub buffer: ModelHandle<Buffer>,
173 pub range: Range<language::Anchor>,
174}
175
176#[derive(Debug)]
177pub struct DocumentHighlight {
178 pub range: Range<language::Anchor>,
179 pub kind: DocumentHighlightKind,
180}
181
182#[derive(Clone, Debug)]
183pub struct Symbol {
184 pub source_worktree_id: WorktreeId,
185 pub worktree_id: WorktreeId,
186 pub language_name: String,
187 pub path: PathBuf,
188 pub label: CodeLabel,
189 pub name: String,
190 pub kind: lsp::SymbolKind,
191 pub range: Range<PointUtf16>,
192 pub signature: [u8; 32],
193}
194
195#[derive(Default)]
196pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
197
198impl DiagnosticSummary {
199 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
200 let mut this = Self {
201 error_count: 0,
202 warning_count: 0,
203 info_count: 0,
204 hint_count: 0,
205 };
206
207 for entry in diagnostics {
208 if entry.diagnostic.is_primary {
209 match entry.diagnostic.severity {
210 DiagnosticSeverity::ERROR => this.error_count += 1,
211 DiagnosticSeverity::WARNING => this.warning_count += 1,
212 DiagnosticSeverity::INFORMATION => this.info_count += 1,
213 DiagnosticSeverity::HINT => this.hint_count += 1,
214 _ => {}
215 }
216 }
217 }
218
219 this
220 }
221
222 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
223 proto::DiagnosticSummary {
224 path: path.to_string_lossy().to_string(),
225 error_count: self.error_count as u32,
226 warning_count: self.warning_count as u32,
227 info_count: self.info_count as u32,
228 hint_count: self.hint_count as u32,
229 }
230 }
231}
232
233#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
234pub struct ProjectEntryId(usize);
235
236impl ProjectEntryId {
237 pub fn new(counter: &AtomicUsize) -> Self {
238 Self(counter.fetch_add(1, SeqCst))
239 }
240
241 pub fn from_proto(id: u64) -> Self {
242 Self(id as usize)
243 }
244
245 pub fn to_proto(&self) -> u64 {
246 self.0 as u64
247 }
248
249 pub fn to_usize(&self) -> usize {
250 self.0
251 }
252}
253
254impl Project {
255 pub fn init(client: &Arc<Client>) {
256 client.add_entity_message_handler(Self::handle_add_collaborator);
257 client.add_entity_message_handler(Self::handle_buffer_reloaded);
258 client.add_entity_message_handler(Self::handle_buffer_saved);
259 client.add_entity_message_handler(Self::handle_start_language_server);
260 client.add_entity_message_handler(Self::handle_update_language_server);
261 client.add_entity_message_handler(Self::handle_remove_collaborator);
262 client.add_entity_message_handler(Self::handle_register_worktree);
263 client.add_entity_message_handler(Self::handle_unregister_worktree);
264 client.add_entity_message_handler(Self::handle_unshare_project);
265 client.add_entity_message_handler(Self::handle_update_buffer_file);
266 client.add_entity_message_handler(Self::handle_update_buffer);
267 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
268 client.add_entity_message_handler(Self::handle_update_worktree);
269 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
270 client.add_entity_request_handler(Self::handle_apply_code_action);
271 client.add_entity_request_handler(Self::handle_format_buffers);
272 client.add_entity_request_handler(Self::handle_get_code_actions);
273 client.add_entity_request_handler(Self::handle_get_completions);
274 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
275 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
276 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
277 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
278 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
279 client.add_entity_request_handler(Self::handle_search_project);
280 client.add_entity_request_handler(Self::handle_get_project_symbols);
281 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
282 client.add_entity_request_handler(Self::handle_open_buffer);
283 client.add_entity_request_handler(Self::handle_save_buffer);
284 }
285
286 pub fn local(
287 client: Arc<Client>,
288 user_store: ModelHandle<UserStore>,
289 languages: Arc<LanguageRegistry>,
290 fs: Arc<dyn Fs>,
291 cx: &mut MutableAppContext,
292 ) -> ModelHandle<Self> {
293 cx.add_model(|cx: &mut ModelContext<Self>| {
294 let (remote_id_tx, remote_id_rx) = watch::channel();
295 let _maintain_remote_id_task = cx.spawn_weak({
296 let rpc = client.clone();
297 move |this, mut cx| {
298 async move {
299 let mut status = rpc.status();
300 while let Some(status) = status.next().await {
301 if let Some(this) = this.upgrade(&cx) {
302 let remote_id = if status.is_connected() {
303 let response = rpc.request(proto::RegisterProject {}).await?;
304 Some(response.project_id)
305 } else {
306 None
307 };
308
309 if let Some(project_id) = remote_id {
310 let mut registrations = Vec::new();
311 this.update(&mut cx, |this, cx| {
312 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
313 registrations.push(worktree.update(
314 cx,
315 |worktree, cx| {
316 let worktree = worktree.as_local_mut().unwrap();
317 worktree.register(project_id, cx)
318 },
319 ));
320 }
321 });
322 for registration in registrations {
323 registration.await?;
324 }
325 }
326 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
327 }
328 }
329 Ok(())
330 }
331 .log_err()
332 }
333 });
334
335 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
336 Self {
337 worktrees: Default::default(),
338 collaborators: Default::default(),
339 opened_buffers: Default::default(),
340 shared_buffers: Default::default(),
341 loading_buffers: Default::default(),
342 loading_local_worktrees: Default::default(),
343 buffer_snapshots: Default::default(),
344 client_state: ProjectClientState::Local {
345 is_shared: false,
346 remote_id_tx,
347 remote_id_rx,
348 _maintain_remote_id_task,
349 },
350 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
351 subscriptions: Vec::new(),
352 active_entry: None,
353 languages,
354 client,
355 user_store,
356 fs,
357 next_entry_id: Default::default(),
358 language_servers_with_diagnostics_running: 0,
359 language_servers: Default::default(),
360 started_language_servers: Default::default(),
361 language_server_statuses: Default::default(),
362 language_server_settings: Default::default(),
363 next_language_server_id: 0,
364 nonce: StdRng::from_entropy().gen(),
365 }
366 })
367 }
368
369 pub async fn remote(
370 remote_id: u64,
371 client: Arc<Client>,
372 user_store: ModelHandle<UserStore>,
373 languages: Arc<LanguageRegistry>,
374 fs: Arc<dyn Fs>,
375 cx: &mut AsyncAppContext,
376 ) -> Result<ModelHandle<Self>> {
377 client.authenticate_and_connect(&cx).await?;
378
379 let response = client
380 .request(proto::JoinProject {
381 project_id: remote_id,
382 })
383 .await?;
384
385 let replica_id = response.replica_id as ReplicaId;
386
387 let mut worktrees = Vec::new();
388 for worktree in response.worktrees {
389 let (worktree, load_task) = cx
390 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
391 worktrees.push(worktree);
392 load_task.detach();
393 }
394
395 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
396 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
397 let mut this = Self {
398 worktrees: Vec::new(),
399 loading_buffers: Default::default(),
400 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
401 shared_buffers: Default::default(),
402 loading_local_worktrees: Default::default(),
403 active_entry: None,
404 collaborators: Default::default(),
405 languages,
406 user_store: user_store.clone(),
407 fs,
408 next_entry_id: Default::default(),
409 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
410 client: client.clone(),
411 client_state: ProjectClientState::Remote {
412 sharing_has_stopped: false,
413 remote_id,
414 replica_id,
415 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
416 async move {
417 let mut status = client.status();
418 let is_connected =
419 status.next().await.map_or(false, |s| s.is_connected());
420 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
421 if !is_connected || status.next().await.is_some() {
422 if let Some(this) = this.upgrade(&cx) {
423 this.update(&mut cx, |this, cx| this.project_unshared(cx))
424 }
425 }
426 Ok(())
427 }
428 .log_err()
429 }),
430 },
431 language_servers_with_diagnostics_running: 0,
432 language_servers: Default::default(),
433 started_language_servers: Default::default(),
434 language_server_settings: Default::default(),
435 language_server_statuses: response
436 .language_servers
437 .into_iter()
438 .map(|server| {
439 (
440 server.id as usize,
441 LanguageServerStatus {
442 name: server.name,
443 pending_work: Default::default(),
444 pending_diagnostic_updates: 0,
445 },
446 )
447 })
448 .collect(),
449 next_language_server_id: 0,
450 opened_buffers: Default::default(),
451 buffer_snapshots: Default::default(),
452 nonce: StdRng::from_entropy().gen(),
453 };
454 for worktree in worktrees {
455 this.add_worktree(&worktree, cx);
456 }
457 this
458 });
459
460 let user_ids = response
461 .collaborators
462 .iter()
463 .map(|peer| peer.user_id)
464 .collect();
465 user_store
466 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
467 .await?;
468 let mut collaborators = HashMap::default();
469 for message in response.collaborators {
470 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
471 collaborators.insert(collaborator.peer_id, collaborator);
472 }
473
474 this.update(cx, |this, _| {
475 this.collaborators = collaborators;
476 });
477
478 Ok(this)
479 }
480
481 #[cfg(any(test, feature = "test-support"))]
482 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
483 let languages = Arc::new(LanguageRegistry::test());
484 let http_client = client::test::FakeHttpClient::with_404_response();
485 let client = client::Client::new(http_client.clone());
486 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
487 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
488 }
489
490 #[cfg(any(test, feature = "test-support"))]
491 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
492 self.opened_buffers
493 .get(&remote_id)
494 .and_then(|buffer| buffer.upgrade(cx))
495 }
496
497 #[cfg(any(test, feature = "test-support"))]
498 pub fn languages(&self) -> &Arc<LanguageRegistry> {
499 &self.languages
500 }
501
502 #[cfg(any(test, feature = "test-support"))]
503 pub fn check_invariants(&self, cx: &AppContext) {
504 if self.is_local() {
505 let mut worktree_root_paths = HashMap::default();
506 for worktree in self.worktrees(cx) {
507 let worktree = worktree.read(cx);
508 let abs_path = worktree.as_local().unwrap().abs_path().clone();
509 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
510 assert_eq!(
511 prev_worktree_id,
512 None,
513 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
514 abs_path,
515 worktree.id(),
516 prev_worktree_id
517 )
518 }
519 } else {
520 let replica_id = self.replica_id();
521 for buffer in self.opened_buffers.values() {
522 if let Some(buffer) = buffer.upgrade(cx) {
523 let buffer = buffer.read(cx);
524 assert_eq!(
525 buffer.deferred_ops_len(),
526 0,
527 "replica {}, buffer {} has deferred operations",
528 replica_id,
529 buffer.remote_id()
530 );
531 }
532 }
533 }
534 }
535
536 #[cfg(any(test, feature = "test-support"))]
537 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
538 let path = path.into();
539 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
540 self.opened_buffers.iter().any(|(_, buffer)| {
541 if let Some(buffer) = buffer.upgrade(cx) {
542 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
543 if file.worktree == worktree && file.path() == &path.path {
544 return true;
545 }
546 }
547 }
548 false
549 })
550 } else {
551 false
552 }
553 }
554
555 pub fn fs(&self) -> &Arc<dyn Fs> {
556 &self.fs
557 }
558
559 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
560 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
561 *remote_id_tx.borrow_mut() = remote_id;
562 }
563
564 self.subscriptions.clear();
565 if let Some(remote_id) = remote_id {
566 self.subscriptions
567 .push(self.client.add_model_for_remote_entity(remote_id, cx));
568 }
569 }
570
571 pub fn remote_id(&self) -> Option<u64> {
572 match &self.client_state {
573 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
574 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
575 }
576 }
577
578 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
579 let mut id = None;
580 let mut watch = None;
581 match &self.client_state {
582 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
583 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
584 }
585
586 async move {
587 if let Some(id) = id {
588 return id;
589 }
590 let mut watch = watch.unwrap();
591 loop {
592 let id = *watch.borrow();
593 if let Some(id) = id {
594 return id;
595 }
596 watch.next().await;
597 }
598 }
599 }
600
601 pub fn replica_id(&self) -> ReplicaId {
602 match &self.client_state {
603 ProjectClientState::Local { .. } => 0,
604 ProjectClientState::Remote { replica_id, .. } => *replica_id,
605 }
606 }
607
608 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
609 &self.collaborators
610 }
611
612 pub fn worktrees<'a>(
613 &'a self,
614 cx: &'a AppContext,
615 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
616 self.worktrees
617 .iter()
618 .filter_map(move |worktree| worktree.upgrade(cx))
619 }
620
621 pub fn visible_worktrees<'a>(
622 &'a self,
623 cx: &'a AppContext,
624 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
625 self.worktrees.iter().filter_map(|worktree| {
626 worktree.upgrade(cx).and_then(|worktree| {
627 if worktree.read(cx).is_visible() {
628 Some(worktree)
629 } else {
630 None
631 }
632 })
633 })
634 }
635
636 pub fn worktree_for_id(
637 &self,
638 id: WorktreeId,
639 cx: &AppContext,
640 ) -> Option<ModelHandle<Worktree>> {
641 self.worktrees(cx)
642 .find(|worktree| worktree.read(cx).id() == id)
643 }
644
645 pub fn worktree_for_entry(
646 &self,
647 entry_id: ProjectEntryId,
648 cx: &AppContext,
649 ) -> Option<ModelHandle<Worktree>> {
650 self.worktrees(cx)
651 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
652 }
653
654 pub fn worktree_id_for_entry(
655 &self,
656 entry_id: ProjectEntryId,
657 cx: &AppContext,
658 ) -> Option<WorktreeId> {
659 self.worktree_for_entry(entry_id, cx)
660 .map(|worktree| worktree.read(cx).id())
661 }
662
663 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
664 let rpc = self.client.clone();
665 cx.spawn(|this, mut cx| async move {
666 let project_id = this.update(&mut cx, |this, cx| {
667 if let ProjectClientState::Local {
668 is_shared,
669 remote_id_rx,
670 ..
671 } = &mut this.client_state
672 {
673 *is_shared = true;
674
675 for open_buffer in this.opened_buffers.values_mut() {
676 match open_buffer {
677 OpenBuffer::Strong(_) => {}
678 OpenBuffer::Weak(buffer) => {
679 if let Some(buffer) = buffer.upgrade(cx) {
680 *open_buffer = OpenBuffer::Strong(buffer);
681 }
682 }
683 OpenBuffer::Loading(_) => unreachable!(),
684 }
685 }
686
687 for worktree_handle in this.worktrees.iter_mut() {
688 match worktree_handle {
689 WorktreeHandle::Strong(_) => {}
690 WorktreeHandle::Weak(worktree) => {
691 if let Some(worktree) = worktree.upgrade(cx) {
692 *worktree_handle = WorktreeHandle::Strong(worktree);
693 }
694 }
695 }
696 }
697
698 remote_id_rx
699 .borrow()
700 .ok_or_else(|| anyhow!("no project id"))
701 } else {
702 Err(anyhow!("can't share a remote project"))
703 }
704 })?;
705
706 rpc.request(proto::ShareProject { project_id }).await?;
707
708 let mut tasks = Vec::new();
709 this.update(&mut cx, |this, cx| {
710 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
711 worktree.update(cx, |worktree, cx| {
712 let worktree = worktree.as_local_mut().unwrap();
713 tasks.push(worktree.share(project_id, cx));
714 });
715 }
716 });
717 for task in tasks {
718 task.await?;
719 }
720 this.update(&mut cx, |_, cx| cx.notify());
721 Ok(())
722 })
723 }
724
725 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
726 let rpc = self.client.clone();
727 cx.spawn(|this, mut cx| async move {
728 let project_id = this.update(&mut cx, |this, cx| {
729 if let ProjectClientState::Local {
730 is_shared,
731 remote_id_rx,
732 ..
733 } = &mut this.client_state
734 {
735 *is_shared = false;
736
737 for open_buffer in this.opened_buffers.values_mut() {
738 match open_buffer {
739 OpenBuffer::Strong(buffer) => {
740 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
741 }
742 _ => {}
743 }
744 }
745
746 for worktree_handle in this.worktrees.iter_mut() {
747 match worktree_handle {
748 WorktreeHandle::Strong(worktree) => {
749 if !worktree.read(cx).is_visible() {
750 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
751 }
752 }
753 _ => {}
754 }
755 }
756
757 remote_id_rx
758 .borrow()
759 .ok_or_else(|| anyhow!("no project id"))
760 } else {
761 Err(anyhow!("can't share a remote project"))
762 }
763 })?;
764
765 rpc.send(proto::UnshareProject { project_id })?;
766 this.update(&mut cx, |this, cx| {
767 this.collaborators.clear();
768 this.shared_buffers.clear();
769 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
770 worktree.update(cx, |worktree, _| {
771 worktree.as_local_mut().unwrap().unshare();
772 });
773 }
774 cx.notify()
775 });
776 Ok(())
777 })
778 }
779
780 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
781 if let ProjectClientState::Remote {
782 sharing_has_stopped,
783 ..
784 } = &mut self.client_state
785 {
786 *sharing_has_stopped = true;
787 self.collaborators.clear();
788 cx.notify();
789 }
790 }
791
792 pub fn is_read_only(&self) -> bool {
793 match &self.client_state {
794 ProjectClientState::Local { .. } => false,
795 ProjectClientState::Remote {
796 sharing_has_stopped,
797 ..
798 } => *sharing_has_stopped,
799 }
800 }
801
802 pub fn is_local(&self) -> bool {
803 match &self.client_state {
804 ProjectClientState::Local { .. } => true,
805 ProjectClientState::Remote { .. } => false,
806 }
807 }
808
809 pub fn is_remote(&self) -> bool {
810 !self.is_local()
811 }
812
813 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
814 if self.is_remote() {
815 return Err(anyhow!("creating buffers as a guest is not supported yet"));
816 }
817
818 let buffer = cx.add_model(|cx| {
819 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
820 });
821 self.register_buffer(&buffer, cx)?;
822 Ok(buffer)
823 }
824
825 pub fn open_buffer(
826 &mut self,
827 path: impl Into<ProjectPath>,
828 cx: &mut ModelContext<Self>,
829 ) -> Task<Result<ModelHandle<Buffer>>> {
830 let project_path = path.into();
831 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
832 worktree
833 } else {
834 return Task::ready(Err(anyhow!("no such worktree")));
835 };
836
837 // If there is already a buffer for the given path, then return it.
838 let existing_buffer = self.get_open_buffer(&project_path, cx);
839 if let Some(existing_buffer) = existing_buffer {
840 return Task::ready(Ok(existing_buffer));
841 }
842
843 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
844 // If the given path is already being loaded, then wait for that existing
845 // task to complete and return the same buffer.
846 hash_map::Entry::Occupied(e) => e.get().clone(),
847
848 // Otherwise, record the fact that this path is now being loaded.
849 hash_map::Entry::Vacant(entry) => {
850 let (mut tx, rx) = postage::watch::channel();
851 entry.insert(rx.clone());
852
853 let load_buffer = if worktree.read(cx).is_local() {
854 self.open_local_buffer(&project_path.path, &worktree, cx)
855 } else {
856 self.open_remote_buffer(&project_path.path, &worktree, cx)
857 };
858
859 cx.spawn(move |this, mut cx| async move {
860 let load_result = load_buffer.await;
861 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
862 // Record the fact that the buffer is no longer loading.
863 this.loading_buffers.remove(&project_path);
864 let buffer = load_result.map_err(Arc::new)?;
865 Ok(buffer)
866 }));
867 })
868 .detach();
869 rx
870 }
871 };
872
873 cx.foreground().spawn(async move {
874 loop {
875 if let Some(result) = loading_watch.borrow().as_ref() {
876 match result {
877 Ok(buffer) => return Ok(buffer.clone()),
878 Err(error) => return Err(anyhow!("{}", error)),
879 }
880 }
881 loading_watch.next().await;
882 }
883 })
884 }
885
886 fn open_local_buffer(
887 &mut self,
888 path: &Arc<Path>,
889 worktree: &ModelHandle<Worktree>,
890 cx: &mut ModelContext<Self>,
891 ) -> Task<Result<ModelHandle<Buffer>>> {
892 let load_buffer = worktree.update(cx, |worktree, cx| {
893 let worktree = worktree.as_local_mut().unwrap();
894 worktree.load_buffer(path, cx)
895 });
896 cx.spawn(|this, mut cx| async move {
897 let buffer = load_buffer.await?;
898 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
899 Ok(buffer)
900 })
901 }
902
903 fn open_remote_buffer(
904 &mut self,
905 path: &Arc<Path>,
906 worktree: &ModelHandle<Worktree>,
907 cx: &mut ModelContext<Self>,
908 ) -> Task<Result<ModelHandle<Buffer>>> {
909 let rpc = self.client.clone();
910 let project_id = self.remote_id().unwrap();
911 let remote_worktree_id = worktree.read(cx).id();
912 let path = path.clone();
913 let path_string = path.to_string_lossy().to_string();
914 cx.spawn(|this, mut cx| async move {
915 let response = rpc
916 .request(proto::OpenBuffer {
917 project_id,
918 worktree_id: remote_worktree_id.to_proto(),
919 path: path_string,
920 })
921 .await?;
922 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
923 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
924 .await
925 })
926 }
927
928 fn open_local_buffer_via_lsp(
929 &mut self,
930 abs_path: lsp::Url,
931 lang_name: Arc<str>,
932 lang_server: Arc<LanguageServer>,
933 cx: &mut ModelContext<Self>,
934 ) -> Task<Result<ModelHandle<Buffer>>> {
935 cx.spawn(|this, mut cx| async move {
936 let abs_path = abs_path
937 .to_file_path()
938 .map_err(|_| anyhow!("can't convert URI to path"))?;
939 let (worktree, relative_path) = if let Some(result) =
940 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
941 {
942 result
943 } else {
944 let worktree = this
945 .update(&mut cx, |this, cx| {
946 this.create_local_worktree(&abs_path, false, cx)
947 })
948 .await?;
949 this.update(&mut cx, |this, cx| {
950 this.language_servers
951 .insert((worktree.read(cx).id(), lang_name), lang_server);
952 });
953 (worktree, PathBuf::new())
954 };
955
956 let project_path = ProjectPath {
957 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
958 path: relative_path.into(),
959 };
960 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
961 .await
962 })
963 }
964
965 pub fn save_buffer_as(
966 &mut self,
967 buffer: ModelHandle<Buffer>,
968 abs_path: PathBuf,
969 cx: &mut ModelContext<Project>,
970 ) -> Task<Result<()>> {
971 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
972 cx.spawn(|this, mut cx| async move {
973 let (worktree, path) = worktree_task.await?;
974 worktree
975 .update(&mut cx, |worktree, cx| {
976 worktree
977 .as_local_mut()
978 .unwrap()
979 .save_buffer_as(buffer.clone(), path, cx)
980 })
981 .await?;
982 this.update(&mut cx, |this, cx| {
983 this.assign_language_to_buffer(&buffer, cx);
984 this.register_buffer_with_language_server(&buffer, cx);
985 });
986 Ok(())
987 })
988 }
989
990 pub fn get_open_buffer(
991 &mut self,
992 path: &ProjectPath,
993 cx: &mut ModelContext<Self>,
994 ) -> Option<ModelHandle<Buffer>> {
995 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
996 self.opened_buffers.values().find_map(|buffer| {
997 let buffer = buffer.upgrade(cx)?;
998 let file = File::from_dyn(buffer.read(cx).file())?;
999 if file.worktree == worktree && file.path() == &path.path {
1000 Some(buffer)
1001 } else {
1002 None
1003 }
1004 })
1005 }
1006
1007 fn register_buffer(
1008 &mut self,
1009 buffer: &ModelHandle<Buffer>,
1010 cx: &mut ModelContext<Self>,
1011 ) -> Result<()> {
1012 let remote_id = buffer.read(cx).remote_id();
1013 let open_buffer = if self.is_remote() || self.is_shared() {
1014 OpenBuffer::Strong(buffer.clone())
1015 } else {
1016 OpenBuffer::Weak(buffer.downgrade())
1017 };
1018
1019 match self.opened_buffers.insert(remote_id, open_buffer) {
1020 None => {}
1021 Some(OpenBuffer::Loading(operations)) => {
1022 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1023 }
1024 Some(OpenBuffer::Weak(existing_handle)) => {
1025 if existing_handle.upgrade(cx).is_some() {
1026 Err(anyhow!(
1027 "already registered buffer with remote id {}",
1028 remote_id
1029 ))?
1030 }
1031 }
1032 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1033 "already registered buffer with remote id {}",
1034 remote_id
1035 ))?,
1036 }
1037 cx.subscribe(buffer, |this, buffer, event, cx| {
1038 this.on_buffer_event(buffer, event, cx);
1039 })
1040 .detach();
1041
1042 self.assign_language_to_buffer(buffer, cx);
1043 self.register_buffer_with_language_server(buffer, cx);
1044
1045 Ok(())
1046 }
1047
1048 fn register_buffer_with_language_server(
1049 &mut self,
1050 buffer_handle: &ModelHandle<Buffer>,
1051 cx: &mut ModelContext<Self>,
1052 ) {
1053 let buffer = buffer_handle.read(cx);
1054 let buffer_id = buffer.remote_id();
1055 if let Some(file) = File::from_dyn(buffer.file()) {
1056 if file.is_local() {
1057 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1058 let initial_snapshot = buffer.text_snapshot();
1059 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1060
1061 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1062 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1063 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1064 .log_err();
1065 }
1066 }
1067
1068 if let Some(server) = language_server {
1069 server
1070 .notify::<lsp::notification::DidOpenTextDocument>(
1071 lsp::DidOpenTextDocumentParams {
1072 text_document: lsp::TextDocumentItem::new(
1073 uri,
1074 Default::default(),
1075 0,
1076 initial_snapshot.text(),
1077 ),
1078 }
1079 .clone(),
1080 )
1081 .log_err();
1082 buffer_handle.update(cx, |buffer, cx| {
1083 buffer.set_completion_triggers(
1084 server
1085 .capabilities()
1086 .completion_provider
1087 .as_ref()
1088 .and_then(|provider| provider.trigger_characters.clone())
1089 .unwrap_or(Vec::new()),
1090 cx,
1091 )
1092 });
1093 self.buffer_snapshots
1094 .insert(buffer_id, vec![(0, initial_snapshot)]);
1095 }
1096
1097 cx.observe_release(buffer_handle, |this, buffer, cx| {
1098 if let Some(file) = File::from_dyn(buffer.file()) {
1099 if file.is_local() {
1100 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1101 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1102 server
1103 .notify::<lsp::notification::DidCloseTextDocument>(
1104 lsp::DidCloseTextDocumentParams {
1105 text_document: lsp::TextDocumentIdentifier::new(
1106 uri.clone(),
1107 ),
1108 },
1109 )
1110 .log_err();
1111 }
1112 }
1113 }
1114 })
1115 .detach();
1116 }
1117 }
1118 }
1119
1120 fn on_buffer_event(
1121 &mut self,
1122 buffer: ModelHandle<Buffer>,
1123 event: &BufferEvent,
1124 cx: &mut ModelContext<Self>,
1125 ) -> Option<()> {
1126 match event {
1127 BufferEvent::Operation(operation) => {
1128 let project_id = self.remote_id()?;
1129 let request = self.client.request(proto::UpdateBuffer {
1130 project_id,
1131 buffer_id: buffer.read(cx).remote_id(),
1132 operations: vec![language::proto::serialize_operation(&operation)],
1133 });
1134 cx.background().spawn(request).detach_and_log_err(cx);
1135 }
1136 BufferEvent::Edited => {
1137 let language_server = self
1138 .language_server_for_buffer(buffer.read(cx), cx)?
1139 .clone();
1140 let buffer = buffer.read(cx);
1141 let file = File::from_dyn(buffer.file())?;
1142 let abs_path = file.as_local()?.abs_path(cx);
1143 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1144 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1145 let (version, prev_snapshot) = buffer_snapshots.last()?;
1146 let next_snapshot = buffer.text_snapshot();
1147 let next_version = version + 1;
1148
1149 let content_changes = buffer
1150 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1151 .map(|edit| {
1152 let edit_start = edit.new.start.0;
1153 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1154 let new_text = next_snapshot
1155 .text_for_range(edit.new.start.1..edit.new.end.1)
1156 .collect();
1157 lsp::TextDocumentContentChangeEvent {
1158 range: Some(lsp::Range::new(
1159 edit_start.to_lsp_position(),
1160 edit_end.to_lsp_position(),
1161 )),
1162 range_length: None,
1163 text: new_text,
1164 }
1165 })
1166 .collect();
1167
1168 buffer_snapshots.push((next_version, next_snapshot));
1169
1170 language_server
1171 .notify::<lsp::notification::DidChangeTextDocument>(
1172 lsp::DidChangeTextDocumentParams {
1173 text_document: lsp::VersionedTextDocumentIdentifier::new(
1174 uri,
1175 next_version,
1176 ),
1177 content_changes,
1178 },
1179 )
1180 .log_err();
1181 }
1182 BufferEvent::Saved => {
1183 let file = File::from_dyn(buffer.read(cx).file())?;
1184 let worktree_id = file.worktree_id(cx);
1185 let abs_path = file.as_local()?.abs_path(cx);
1186 let text_document = lsp::TextDocumentIdentifier {
1187 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1188 };
1189
1190 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1191 server
1192 .notify::<lsp::notification::DidSaveTextDocument>(
1193 lsp::DidSaveTextDocumentParams {
1194 text_document: text_document.clone(),
1195 text: None,
1196 },
1197 )
1198 .log_err();
1199 }
1200 }
1201 _ => {}
1202 }
1203
1204 None
1205 }
1206
1207 fn language_servers_for_worktree(
1208 &self,
1209 worktree_id: WorktreeId,
1210 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1211 self.language_servers.iter().filter_map(
1212 move |((language_server_worktree_id, language_name), server)| {
1213 if *language_server_worktree_id == worktree_id {
1214 Some((language_name.as_ref(), server))
1215 } else {
1216 None
1217 }
1218 },
1219 )
1220 }
1221
1222 fn assign_language_to_buffer(
1223 &mut self,
1224 buffer: &ModelHandle<Buffer>,
1225 cx: &mut ModelContext<Self>,
1226 ) -> Option<()> {
1227 // If the buffer has a language, set it and start the language server if we haven't already.
1228 let full_path = buffer.read(cx).file()?.full_path(cx);
1229 let language = self.languages.select_language(&full_path)?;
1230 buffer.update(cx, |buffer, cx| {
1231 buffer.set_language(Some(language.clone()), cx);
1232 });
1233
1234 let file = File::from_dyn(buffer.read(cx).file())?;
1235 let worktree = file.worktree.read(cx).as_local()?;
1236 let worktree_id = worktree.id();
1237 let worktree_abs_path = worktree.abs_path().clone();
1238 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1239
1240 None
1241 }
1242
1243 fn start_language_server(
1244 &mut self,
1245 worktree_id: WorktreeId,
1246 worktree_path: Arc<Path>,
1247 language: Arc<Language>,
1248 cx: &mut ModelContext<Self>,
1249 ) {
1250 let key = (worktree_id, language.name());
1251 self.started_language_servers
1252 .entry(key.clone())
1253 .or_insert_with(|| {
1254 let server_id = post_inc(&mut self.next_language_server_id);
1255 let language_server = self.languages.start_language_server(
1256 language.clone(),
1257 worktree_path,
1258 self.client.http_client(),
1259 cx,
1260 );
1261 cx.spawn_weak(|this, mut cx| async move {
1262 let mut language_server = language_server?.await.log_err()?;
1263 let this = this.upgrade(&cx)?;
1264 let (language_server_events_tx, language_server_events_rx) =
1265 smol::channel::unbounded();
1266
1267 language_server
1268 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1269 let language_server_events_tx = language_server_events_tx.clone();
1270 move |params| {
1271 language_server_events_tx
1272 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1273 .ok();
1274 }
1275 })
1276 .detach();
1277
1278 language_server
1279 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1280 let settings = this
1281 .read_with(&cx, |this, _| this.language_server_settings.clone());
1282 move |params| {
1283 let settings = settings.lock();
1284 Ok(params
1285 .items
1286 .into_iter()
1287 .map(|item| {
1288 if let Some(section) = &item.section {
1289 settings
1290 .get(section)
1291 .cloned()
1292 .unwrap_or(serde_json::Value::Null)
1293 } else {
1294 settings.clone()
1295 }
1296 })
1297 .collect())
1298 }
1299 })
1300 .detach();
1301
1302 language_server
1303 .on_notification::<lsp::notification::Progress, _>(move |params| {
1304 let token = match params.token {
1305 lsp::NumberOrString::String(token) => token,
1306 lsp::NumberOrString::Number(token) => {
1307 log::info!("skipping numeric progress token {}", token);
1308 return;
1309 }
1310 };
1311
1312 match params.value {
1313 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1314 lsp::WorkDoneProgress::Begin(_) => {
1315 language_server_events_tx
1316 .try_send(LanguageServerEvent::WorkStart { token })
1317 .ok();
1318 }
1319 lsp::WorkDoneProgress::Report(report) => {
1320 language_server_events_tx
1321 .try_send(LanguageServerEvent::WorkProgress {
1322 token,
1323 progress: LanguageServerProgress {
1324 message: report.message,
1325 percentage: report
1326 .percentage
1327 .map(|p| p as usize),
1328 last_update_at: Instant::now(),
1329 },
1330 })
1331 .ok();
1332 }
1333 lsp::WorkDoneProgress::End(_) => {
1334 language_server_events_tx
1335 .try_send(LanguageServerEvent::WorkEnd { token })
1336 .ok();
1337 }
1338 },
1339 }
1340 })
1341 .detach();
1342
1343 // Process all the LSP events.
1344 cx.spawn(|mut cx| {
1345 let this = this.downgrade();
1346 async move {
1347 while let Ok(event) = language_server_events_rx.recv().await {
1348 let this = this.upgrade(&cx)?;
1349 this.update(&mut cx, |this, cx| {
1350 this.on_lsp_event(server_id, event, &language, cx)
1351 });
1352
1353 // Don't starve the main thread when lots of events arrive all at once.
1354 smol::future::yield_now().await;
1355 }
1356 Some(())
1357 }
1358 })
1359 .detach();
1360
1361 let language_server = language_server.initialize().await.log_err()?;
1362 this.update(&mut cx, |this, cx| {
1363 this.language_servers
1364 .insert(key.clone(), language_server.clone());
1365 this.language_server_statuses.insert(
1366 server_id,
1367 LanguageServerStatus {
1368 name: language_server.name().to_string(),
1369 pending_work: Default::default(),
1370 pending_diagnostic_updates: 0,
1371 },
1372 );
1373 language_server
1374 .notify::<lsp::notification::DidChangeConfiguration>(
1375 lsp::DidChangeConfigurationParams {
1376 settings: this.language_server_settings.lock().clone(),
1377 },
1378 )
1379 .ok();
1380
1381 if let Some(project_id) = this.remote_id() {
1382 this.client
1383 .send(proto::StartLanguageServer {
1384 project_id,
1385 server: Some(proto::LanguageServer {
1386 id: server_id as u64,
1387 name: language_server.name().to_string(),
1388 }),
1389 })
1390 .log_err();
1391 }
1392
1393 // Tell the language server about every open buffer in the worktree that matches the language.
1394 for buffer in this.opened_buffers.values() {
1395 if let Some(buffer_handle) = buffer.upgrade(cx) {
1396 let buffer = buffer_handle.read(cx);
1397 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1398 file
1399 } else {
1400 continue;
1401 };
1402 let language = if let Some(language) = buffer.language() {
1403 language
1404 } else {
1405 continue;
1406 };
1407 if (file.worktree.read(cx).id(), language.name()) != key {
1408 continue;
1409 }
1410
1411 let file = file.as_local()?;
1412 let versions = this
1413 .buffer_snapshots
1414 .entry(buffer.remote_id())
1415 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1416 let (version, initial_snapshot) = versions.last().unwrap();
1417 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1418 language_server
1419 .notify::<lsp::notification::DidOpenTextDocument>(
1420 lsp::DidOpenTextDocumentParams {
1421 text_document: lsp::TextDocumentItem::new(
1422 uri,
1423 Default::default(),
1424 *version,
1425 initial_snapshot.text(),
1426 ),
1427 },
1428 )
1429 .log_err()?;
1430 buffer_handle.update(cx, |buffer, cx| {
1431 buffer.set_completion_triggers(
1432 language_server
1433 .capabilities()
1434 .completion_provider
1435 .as_ref()
1436 .and_then(|provider| {
1437 provider.trigger_characters.clone()
1438 })
1439 .unwrap_or(Vec::new()),
1440 cx,
1441 )
1442 });
1443 }
1444 }
1445
1446 cx.notify();
1447 Some(())
1448 });
1449
1450 Some(language_server)
1451 })
1452 });
1453 }
1454
1455 fn on_lsp_event(
1456 &mut self,
1457 language_server_id: usize,
1458 event: LanguageServerEvent,
1459 language: &Arc<Language>,
1460 cx: &mut ModelContext<Self>,
1461 ) {
1462 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1463 let language_server_status =
1464 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1465 status
1466 } else {
1467 return;
1468 };
1469
1470 match event {
1471 LanguageServerEvent::WorkStart { token } => {
1472 if Some(&token) == disk_diagnostics_token {
1473 language_server_status.pending_diagnostic_updates += 1;
1474 if language_server_status.pending_diagnostic_updates == 1 {
1475 self.disk_based_diagnostics_started(cx);
1476 self.broadcast_language_server_update(
1477 language_server_id,
1478 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1479 proto::LspDiskBasedDiagnosticsUpdating {},
1480 ),
1481 );
1482 }
1483 } else {
1484 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1485 self.broadcast_language_server_update(
1486 language_server_id,
1487 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1488 token,
1489 }),
1490 );
1491 }
1492 }
1493 LanguageServerEvent::WorkProgress { token, progress } => {
1494 if Some(&token) != disk_diagnostics_token {
1495 self.on_lsp_work_progress(
1496 language_server_id,
1497 token.clone(),
1498 progress.clone(),
1499 cx,
1500 );
1501 self.broadcast_language_server_update(
1502 language_server_id,
1503 proto::update_language_server::Variant::WorkProgress(
1504 proto::LspWorkProgress {
1505 token,
1506 message: progress.message,
1507 percentage: progress.percentage.map(|p| p as u32),
1508 },
1509 ),
1510 );
1511 }
1512 }
1513 LanguageServerEvent::WorkEnd { token } => {
1514 if Some(&token) == disk_diagnostics_token {
1515 language_server_status.pending_diagnostic_updates -= 1;
1516 if language_server_status.pending_diagnostic_updates == 0 {
1517 self.disk_based_diagnostics_finished(cx);
1518 self.broadcast_language_server_update(
1519 language_server_id,
1520 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1521 proto::LspDiskBasedDiagnosticsUpdated {},
1522 ),
1523 );
1524 }
1525 } else {
1526 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1527 self.broadcast_language_server_update(
1528 language_server_id,
1529 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1530 token,
1531 }),
1532 );
1533 }
1534 }
1535 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1536 language.process_diagnostics(&mut params);
1537
1538 if disk_diagnostics_token.is_none() {
1539 self.disk_based_diagnostics_started(cx);
1540 self.broadcast_language_server_update(
1541 language_server_id,
1542 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1543 proto::LspDiskBasedDiagnosticsUpdating {},
1544 ),
1545 );
1546 }
1547 self.update_diagnostics(
1548 params,
1549 language
1550 .disk_based_diagnostic_sources()
1551 .unwrap_or(&Default::default()),
1552 cx,
1553 )
1554 .log_err();
1555 if disk_diagnostics_token.is_none() {
1556 self.disk_based_diagnostics_finished(cx);
1557 self.broadcast_language_server_update(
1558 language_server_id,
1559 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1560 proto::LspDiskBasedDiagnosticsUpdated {},
1561 ),
1562 );
1563 }
1564 }
1565 }
1566 }
1567
1568 fn on_lsp_work_start(
1569 &mut self,
1570 language_server_id: usize,
1571 token: String,
1572 cx: &mut ModelContext<Self>,
1573 ) {
1574 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1575 status.pending_work.insert(
1576 token,
1577 LanguageServerProgress {
1578 message: None,
1579 percentage: None,
1580 last_update_at: Instant::now(),
1581 },
1582 );
1583 cx.notify();
1584 }
1585 }
1586
1587 fn on_lsp_work_progress(
1588 &mut self,
1589 language_server_id: usize,
1590 token: String,
1591 progress: LanguageServerProgress,
1592 cx: &mut ModelContext<Self>,
1593 ) {
1594 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1595 status.pending_work.insert(token, progress);
1596 cx.notify();
1597 }
1598 }
1599
1600 fn on_lsp_work_end(
1601 &mut self,
1602 language_server_id: usize,
1603 token: String,
1604 cx: &mut ModelContext<Self>,
1605 ) {
1606 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1607 status.pending_work.remove(&token);
1608 cx.notify();
1609 }
1610 }
1611
1612 fn broadcast_language_server_update(
1613 &self,
1614 language_server_id: usize,
1615 event: proto::update_language_server::Variant,
1616 ) {
1617 if let Some(project_id) = self.remote_id() {
1618 self.client
1619 .send(proto::UpdateLanguageServer {
1620 project_id,
1621 language_server_id: language_server_id as u64,
1622 variant: Some(event),
1623 })
1624 .log_err();
1625 }
1626 }
1627
1628 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1629 for server in self.language_servers.values() {
1630 server
1631 .notify::<lsp::notification::DidChangeConfiguration>(
1632 lsp::DidChangeConfigurationParams {
1633 settings: settings.clone(),
1634 },
1635 )
1636 .ok();
1637 }
1638 *self.language_server_settings.lock() = settings;
1639 }
1640
1641 pub fn language_server_statuses(
1642 &self,
1643 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1644 self.language_server_statuses.values()
1645 }
1646
1647 pub fn update_diagnostics(
1648 &mut self,
1649 params: lsp::PublishDiagnosticsParams,
1650 disk_based_sources: &HashSet<String>,
1651 cx: &mut ModelContext<Self>,
1652 ) -> Result<()> {
1653 let abs_path = params
1654 .uri
1655 .to_file_path()
1656 .map_err(|_| anyhow!("URI is not a file"))?;
1657 let mut next_group_id = 0;
1658 let mut diagnostics = Vec::default();
1659 let mut primary_diagnostic_group_ids = HashMap::default();
1660 let mut sources_by_group_id = HashMap::default();
1661 let mut supporting_diagnostics = HashMap::default();
1662 for diagnostic in ¶ms.diagnostics {
1663 let source = diagnostic.source.as_ref();
1664 let code = diagnostic.code.as_ref().map(|code| match code {
1665 lsp::NumberOrString::Number(code) => code.to_string(),
1666 lsp::NumberOrString::String(code) => code.clone(),
1667 });
1668 let range = range_from_lsp(diagnostic.range);
1669 let is_supporting = diagnostic
1670 .related_information
1671 .as_ref()
1672 .map_or(false, |infos| {
1673 infos.iter().any(|info| {
1674 primary_diagnostic_group_ids.contains_key(&(
1675 source,
1676 code.clone(),
1677 range_from_lsp(info.location.range),
1678 ))
1679 })
1680 });
1681
1682 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1683 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1684 });
1685
1686 if is_supporting {
1687 supporting_diagnostics.insert(
1688 (source, code.clone(), range),
1689 (diagnostic.severity, is_unnecessary),
1690 );
1691 } else {
1692 let group_id = post_inc(&mut next_group_id);
1693 let is_disk_based =
1694 source.map_or(false, |source| disk_based_sources.contains(source));
1695
1696 sources_by_group_id.insert(group_id, source);
1697 primary_diagnostic_group_ids
1698 .insert((source, code.clone(), range.clone()), group_id);
1699
1700 diagnostics.push(DiagnosticEntry {
1701 range,
1702 diagnostic: Diagnostic {
1703 code: code.clone(),
1704 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1705 message: diagnostic.message.clone(),
1706 group_id,
1707 is_primary: true,
1708 is_valid: true,
1709 is_disk_based,
1710 is_unnecessary,
1711 },
1712 });
1713 if let Some(infos) = &diagnostic.related_information {
1714 for info in infos {
1715 if info.location.uri == params.uri && !info.message.is_empty() {
1716 let range = range_from_lsp(info.location.range);
1717 diagnostics.push(DiagnosticEntry {
1718 range,
1719 diagnostic: Diagnostic {
1720 code: code.clone(),
1721 severity: DiagnosticSeverity::INFORMATION,
1722 message: info.message.clone(),
1723 group_id,
1724 is_primary: false,
1725 is_valid: true,
1726 is_disk_based,
1727 is_unnecessary: false,
1728 },
1729 });
1730 }
1731 }
1732 }
1733 }
1734 }
1735
1736 for entry in &mut diagnostics {
1737 let diagnostic = &mut entry.diagnostic;
1738 if !diagnostic.is_primary {
1739 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1740 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1741 source,
1742 diagnostic.code.clone(),
1743 entry.range.clone(),
1744 )) {
1745 if let Some(severity) = severity {
1746 diagnostic.severity = severity;
1747 }
1748 diagnostic.is_unnecessary = is_unnecessary;
1749 }
1750 }
1751 }
1752
1753 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1754 Ok(())
1755 }
1756
1757 pub fn update_diagnostic_entries(
1758 &mut self,
1759 abs_path: PathBuf,
1760 version: Option<i32>,
1761 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1762 cx: &mut ModelContext<Project>,
1763 ) -> Result<(), anyhow::Error> {
1764 let (worktree, relative_path) = self
1765 .find_local_worktree(&abs_path, cx)
1766 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1767 if !worktree.read(cx).is_visible() {
1768 return Ok(());
1769 }
1770
1771 let project_path = ProjectPath {
1772 worktree_id: worktree.read(cx).id(),
1773 path: relative_path.into(),
1774 };
1775
1776 for buffer in self.opened_buffers.values() {
1777 if let Some(buffer) = buffer.upgrade(cx) {
1778 if buffer
1779 .read(cx)
1780 .file()
1781 .map_or(false, |file| *file.path() == project_path.path)
1782 {
1783 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1784 break;
1785 }
1786 }
1787 }
1788 worktree.update(cx, |worktree, cx| {
1789 worktree
1790 .as_local_mut()
1791 .ok_or_else(|| anyhow!("not a local worktree"))?
1792 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1793 })?;
1794 cx.emit(Event::DiagnosticsUpdated(project_path));
1795 Ok(())
1796 }
1797
1798 fn update_buffer_diagnostics(
1799 &mut self,
1800 buffer: &ModelHandle<Buffer>,
1801 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1802 version: Option<i32>,
1803 cx: &mut ModelContext<Self>,
1804 ) -> Result<()> {
1805 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1806 Ordering::Equal
1807 .then_with(|| b.is_primary.cmp(&a.is_primary))
1808 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1809 .then_with(|| a.severity.cmp(&b.severity))
1810 .then_with(|| a.message.cmp(&b.message))
1811 }
1812
1813 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1814
1815 diagnostics.sort_unstable_by(|a, b| {
1816 Ordering::Equal
1817 .then_with(|| a.range.start.cmp(&b.range.start))
1818 .then_with(|| b.range.end.cmp(&a.range.end))
1819 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1820 });
1821
1822 let mut sanitized_diagnostics = Vec::new();
1823 let mut edits_since_save = snapshot
1824 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1825 .peekable();
1826 let mut last_edit_old_end = PointUtf16::zero();
1827 let mut last_edit_new_end = PointUtf16::zero();
1828 'outer: for entry in diagnostics {
1829 let mut start = entry.range.start;
1830 let mut end = entry.range.end;
1831
1832 // Some diagnostics are based on files on disk instead of buffers'
1833 // current contents. Adjust these diagnostics' ranges to reflect
1834 // any unsaved edits.
1835 if entry.diagnostic.is_disk_based {
1836 while let Some(edit) = edits_since_save.peek() {
1837 if edit.old.end <= start {
1838 last_edit_old_end = edit.old.end;
1839 last_edit_new_end = edit.new.end;
1840 edits_since_save.next();
1841 } else if edit.old.start <= end && edit.old.end >= start {
1842 continue 'outer;
1843 } else {
1844 break;
1845 }
1846 }
1847
1848 let start_overshoot = start - last_edit_old_end;
1849 start = last_edit_new_end;
1850 start += start_overshoot;
1851
1852 let end_overshoot = end - last_edit_old_end;
1853 end = last_edit_new_end;
1854 end += end_overshoot;
1855 }
1856
1857 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1858 ..snapshot.clip_point_utf16(end, Bias::Right);
1859
1860 // Expand empty ranges by one character
1861 if range.start == range.end {
1862 range.end.column += 1;
1863 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1864 if range.start == range.end && range.end.column > 0 {
1865 range.start.column -= 1;
1866 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1867 }
1868 }
1869
1870 sanitized_diagnostics.push(DiagnosticEntry {
1871 range,
1872 diagnostic: entry.diagnostic,
1873 });
1874 }
1875 drop(edits_since_save);
1876
1877 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1878 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1879 Ok(())
1880 }
1881
1882 pub fn format(
1883 &self,
1884 buffers: HashSet<ModelHandle<Buffer>>,
1885 push_to_history: bool,
1886 cx: &mut ModelContext<Project>,
1887 ) -> Task<Result<ProjectTransaction>> {
1888 let mut local_buffers = Vec::new();
1889 let mut remote_buffers = None;
1890 for buffer_handle in buffers {
1891 let buffer = buffer_handle.read(cx);
1892 if let Some(file) = File::from_dyn(buffer.file()) {
1893 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1894 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1895 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1896 }
1897 } else {
1898 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1899 }
1900 } else {
1901 return Task::ready(Ok(Default::default()));
1902 }
1903 }
1904
1905 let remote_buffers = self.remote_id().zip(remote_buffers);
1906 let client = self.client.clone();
1907
1908 cx.spawn(|this, mut cx| async move {
1909 let mut project_transaction = ProjectTransaction::default();
1910
1911 if let Some((project_id, remote_buffers)) = remote_buffers {
1912 let response = client
1913 .request(proto::FormatBuffers {
1914 project_id,
1915 buffer_ids: remote_buffers
1916 .iter()
1917 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1918 .collect(),
1919 })
1920 .await?
1921 .transaction
1922 .ok_or_else(|| anyhow!("missing transaction"))?;
1923 project_transaction = this
1924 .update(&mut cx, |this, cx| {
1925 this.deserialize_project_transaction(response, push_to_history, cx)
1926 })
1927 .await?;
1928 }
1929
1930 for (buffer, buffer_abs_path, language_server) in local_buffers {
1931 let text_document = lsp::TextDocumentIdentifier::new(
1932 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1933 );
1934 let capabilities = &language_server.capabilities();
1935 let lsp_edits = if capabilities
1936 .document_formatting_provider
1937 .as_ref()
1938 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1939 {
1940 language_server
1941 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1942 text_document,
1943 options: Default::default(),
1944 work_done_progress_params: Default::default(),
1945 })
1946 .await?
1947 } else if capabilities
1948 .document_range_formatting_provider
1949 .as_ref()
1950 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1951 {
1952 let buffer_start = lsp::Position::new(0, 0);
1953 let buffer_end = buffer
1954 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1955 .to_lsp_position();
1956 language_server
1957 .request::<lsp::request::RangeFormatting>(
1958 lsp::DocumentRangeFormattingParams {
1959 text_document,
1960 range: lsp::Range::new(buffer_start, buffer_end),
1961 options: Default::default(),
1962 work_done_progress_params: Default::default(),
1963 },
1964 )
1965 .await?
1966 } else {
1967 continue;
1968 };
1969
1970 if let Some(lsp_edits) = lsp_edits {
1971 let edits = this
1972 .update(&mut cx, |this, cx| {
1973 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1974 })
1975 .await?;
1976 buffer.update(&mut cx, |buffer, cx| {
1977 buffer.finalize_last_transaction();
1978 buffer.start_transaction();
1979 for (range, text) in edits {
1980 buffer.edit([range], text, cx);
1981 }
1982 if buffer.end_transaction(cx).is_some() {
1983 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1984 if !push_to_history {
1985 buffer.forget_transaction(transaction.id);
1986 }
1987 project_transaction.0.insert(cx.handle(), transaction);
1988 }
1989 });
1990 }
1991 }
1992
1993 Ok(project_transaction)
1994 })
1995 }
1996
1997 pub fn definition<T: ToPointUtf16>(
1998 &self,
1999 buffer: &ModelHandle<Buffer>,
2000 position: T,
2001 cx: &mut ModelContext<Self>,
2002 ) -> Task<Result<Vec<Location>>> {
2003 let position = position.to_point_utf16(buffer.read(cx));
2004 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2005 }
2006
2007 pub fn references<T: ToPointUtf16>(
2008 &self,
2009 buffer: &ModelHandle<Buffer>,
2010 position: T,
2011 cx: &mut ModelContext<Self>,
2012 ) -> Task<Result<Vec<Location>>> {
2013 let position = position.to_point_utf16(buffer.read(cx));
2014 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2015 }
2016
2017 pub fn document_highlights<T: ToPointUtf16>(
2018 &self,
2019 buffer: &ModelHandle<Buffer>,
2020 position: T,
2021 cx: &mut ModelContext<Self>,
2022 ) -> Task<Result<Vec<DocumentHighlight>>> {
2023 let position = position.to_point_utf16(buffer.read(cx));
2024
2025 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2026 }
2027
2028 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2029 if self.is_local() {
2030 let mut language_servers = HashMap::default();
2031 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2032 if let Some((worktree, language)) = self
2033 .worktree_for_id(*worktree_id, cx)
2034 .and_then(|worktree| worktree.read(cx).as_local())
2035 .zip(self.languages.get_language(language_name))
2036 {
2037 language_servers
2038 .entry(Arc::as_ptr(language_server))
2039 .or_insert((
2040 language_server.clone(),
2041 *worktree_id,
2042 worktree.abs_path().clone(),
2043 language.clone(),
2044 ));
2045 }
2046 }
2047
2048 let mut requests = Vec::new();
2049 for (language_server, _, _, _) in language_servers.values() {
2050 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2051 lsp::WorkspaceSymbolParams {
2052 query: query.to_string(),
2053 ..Default::default()
2054 },
2055 ));
2056 }
2057
2058 cx.spawn_weak(|this, cx| async move {
2059 let responses = futures::future::try_join_all(requests).await?;
2060
2061 let mut symbols = Vec::new();
2062 if let Some(this) = this.upgrade(&cx) {
2063 this.read_with(&cx, |this, cx| {
2064 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2065 language_servers.into_values().zip(responses)
2066 {
2067 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2068 |lsp_symbol| {
2069 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2070 let mut worktree_id = source_worktree_id;
2071 let path;
2072 if let Some((worktree, rel_path)) =
2073 this.find_local_worktree(&abs_path, cx)
2074 {
2075 worktree_id = worktree.read(cx).id();
2076 path = rel_path;
2077 } else {
2078 path = relativize_path(&worktree_abs_path, &abs_path);
2079 }
2080
2081 let label = language
2082 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2083 .unwrap_or_else(|| {
2084 CodeLabel::plain(lsp_symbol.name.clone(), None)
2085 });
2086 let signature = this.symbol_signature(worktree_id, &path);
2087
2088 Some(Symbol {
2089 source_worktree_id,
2090 worktree_id,
2091 language_name: language.name().to_string(),
2092 name: lsp_symbol.name,
2093 kind: lsp_symbol.kind,
2094 label,
2095 path,
2096 range: range_from_lsp(lsp_symbol.location.range),
2097 signature,
2098 })
2099 },
2100 ));
2101 }
2102 })
2103 }
2104
2105 Ok(symbols)
2106 })
2107 } else if let Some(project_id) = self.remote_id() {
2108 let request = self.client.request(proto::GetProjectSymbols {
2109 project_id,
2110 query: query.to_string(),
2111 });
2112 cx.spawn_weak(|this, cx| async move {
2113 let response = request.await?;
2114 let mut symbols = Vec::new();
2115 if let Some(this) = this.upgrade(&cx) {
2116 this.read_with(&cx, |this, _| {
2117 symbols.extend(
2118 response
2119 .symbols
2120 .into_iter()
2121 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2122 );
2123 })
2124 }
2125 Ok(symbols)
2126 })
2127 } else {
2128 Task::ready(Ok(Default::default()))
2129 }
2130 }
2131
2132 pub fn open_buffer_for_symbol(
2133 &mut self,
2134 symbol: &Symbol,
2135 cx: &mut ModelContext<Self>,
2136 ) -> Task<Result<ModelHandle<Buffer>>> {
2137 if self.is_local() {
2138 let language_server = if let Some(server) = self.language_servers.get(&(
2139 symbol.source_worktree_id,
2140 Arc::from(symbol.language_name.as_str()),
2141 )) {
2142 server.clone()
2143 } else {
2144 return Task::ready(Err(anyhow!(
2145 "language server for worktree and language not found"
2146 )));
2147 };
2148
2149 let worktree_abs_path = if let Some(worktree_abs_path) = self
2150 .worktree_for_id(symbol.worktree_id, cx)
2151 .and_then(|worktree| worktree.read(cx).as_local())
2152 .map(|local_worktree| local_worktree.abs_path())
2153 {
2154 worktree_abs_path
2155 } else {
2156 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2157 };
2158 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2159 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2160 uri
2161 } else {
2162 return Task::ready(Err(anyhow!("invalid symbol path")));
2163 };
2164
2165 self.open_local_buffer_via_lsp(
2166 symbol_uri,
2167 Arc::from(symbol.language_name.as_str()),
2168 language_server,
2169 cx,
2170 )
2171 } else if let Some(project_id) = self.remote_id() {
2172 let request = self.client.request(proto::OpenBufferForSymbol {
2173 project_id,
2174 symbol: Some(serialize_symbol(symbol)),
2175 });
2176 cx.spawn(|this, mut cx| async move {
2177 let response = request.await?;
2178 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2179 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2180 .await
2181 })
2182 } else {
2183 Task::ready(Err(anyhow!("project does not have a remote id")))
2184 }
2185 }
2186
2187 pub fn completions<T: ToPointUtf16>(
2188 &self,
2189 source_buffer_handle: &ModelHandle<Buffer>,
2190 position: T,
2191 cx: &mut ModelContext<Self>,
2192 ) -> Task<Result<Vec<Completion>>> {
2193 let source_buffer_handle = source_buffer_handle.clone();
2194 let source_buffer = source_buffer_handle.read(cx);
2195 let buffer_id = source_buffer.remote_id();
2196 let language = source_buffer.language().cloned();
2197 let worktree;
2198 let buffer_abs_path;
2199 if let Some(file) = File::from_dyn(source_buffer.file()) {
2200 worktree = file.worktree.clone();
2201 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2202 } else {
2203 return Task::ready(Ok(Default::default()));
2204 };
2205
2206 let position = position.to_point_utf16(source_buffer);
2207 let anchor = source_buffer.anchor_after(position);
2208
2209 if worktree.read(cx).as_local().is_some() {
2210 let buffer_abs_path = buffer_abs_path.unwrap();
2211 let lang_server =
2212 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2213 server.clone()
2214 } else {
2215 return Task::ready(Ok(Default::default()));
2216 };
2217
2218 cx.spawn(|_, cx| async move {
2219 let completions = lang_server
2220 .request::<lsp::request::Completion>(lsp::CompletionParams {
2221 text_document_position: lsp::TextDocumentPositionParams::new(
2222 lsp::TextDocumentIdentifier::new(
2223 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2224 ),
2225 position.to_lsp_position(),
2226 ),
2227 context: Default::default(),
2228 work_done_progress_params: Default::default(),
2229 partial_result_params: Default::default(),
2230 })
2231 .await
2232 .context("lsp completion request failed")?;
2233
2234 let completions = if let Some(completions) = completions {
2235 match completions {
2236 lsp::CompletionResponse::Array(completions) => completions,
2237 lsp::CompletionResponse::List(list) => list.items,
2238 }
2239 } else {
2240 Default::default()
2241 };
2242
2243 source_buffer_handle.read_with(&cx, |this, _| {
2244 Ok(completions
2245 .into_iter()
2246 .filter_map(|lsp_completion| {
2247 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2248 lsp::CompletionTextEdit::Edit(edit) => {
2249 (range_from_lsp(edit.range), edit.new_text.clone())
2250 }
2251 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2252 log::info!("unsupported insert/replace completion");
2253 return None;
2254 }
2255 };
2256
2257 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2258 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2259 if clipped_start == old_range.start && clipped_end == old_range.end {
2260 Some(Completion {
2261 old_range: this.anchor_before(old_range.start)
2262 ..this.anchor_after(old_range.end),
2263 new_text,
2264 label: language
2265 .as_ref()
2266 .and_then(|l| l.label_for_completion(&lsp_completion))
2267 .unwrap_or_else(|| {
2268 CodeLabel::plain(
2269 lsp_completion.label.clone(),
2270 lsp_completion.filter_text.as_deref(),
2271 )
2272 }),
2273 lsp_completion,
2274 })
2275 } else {
2276 None
2277 }
2278 })
2279 .collect())
2280 })
2281 })
2282 } else if let Some(project_id) = self.remote_id() {
2283 let rpc = self.client.clone();
2284 let message = proto::GetCompletions {
2285 project_id,
2286 buffer_id,
2287 position: Some(language::proto::serialize_anchor(&anchor)),
2288 version: serialize_version(&source_buffer.version()),
2289 };
2290 cx.spawn_weak(|_, mut cx| async move {
2291 let response = rpc.request(message).await?;
2292
2293 source_buffer_handle
2294 .update(&mut cx, |buffer, _| {
2295 buffer.wait_for_version(deserialize_version(response.version))
2296 })
2297 .await;
2298
2299 response
2300 .completions
2301 .into_iter()
2302 .map(|completion| {
2303 language::proto::deserialize_completion(completion, language.as_ref())
2304 })
2305 .collect()
2306 })
2307 } else {
2308 Task::ready(Ok(Default::default()))
2309 }
2310 }
2311
2312 pub fn apply_additional_edits_for_completion(
2313 &self,
2314 buffer_handle: ModelHandle<Buffer>,
2315 completion: Completion,
2316 push_to_history: bool,
2317 cx: &mut ModelContext<Self>,
2318 ) -> Task<Result<Option<Transaction>>> {
2319 let buffer = buffer_handle.read(cx);
2320 let buffer_id = buffer.remote_id();
2321
2322 if self.is_local() {
2323 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2324 server.clone()
2325 } else {
2326 return Task::ready(Ok(Default::default()));
2327 };
2328
2329 cx.spawn(|this, mut cx| async move {
2330 let resolved_completion = lang_server
2331 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2332 .await?;
2333 if let Some(edits) = resolved_completion.additional_text_edits {
2334 let edits = this
2335 .update(&mut cx, |this, cx| {
2336 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2337 })
2338 .await?;
2339 buffer_handle.update(&mut cx, |buffer, cx| {
2340 buffer.finalize_last_transaction();
2341 buffer.start_transaction();
2342 for (range, text) in edits {
2343 buffer.edit([range], text, cx);
2344 }
2345 let transaction = if buffer.end_transaction(cx).is_some() {
2346 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2347 if !push_to_history {
2348 buffer.forget_transaction(transaction.id);
2349 }
2350 Some(transaction)
2351 } else {
2352 None
2353 };
2354 Ok(transaction)
2355 })
2356 } else {
2357 Ok(None)
2358 }
2359 })
2360 } else if let Some(project_id) = self.remote_id() {
2361 let client = self.client.clone();
2362 cx.spawn(|_, mut cx| async move {
2363 let response = client
2364 .request(proto::ApplyCompletionAdditionalEdits {
2365 project_id,
2366 buffer_id,
2367 completion: Some(language::proto::serialize_completion(&completion)),
2368 })
2369 .await?;
2370
2371 if let Some(transaction) = response.transaction {
2372 let transaction = language::proto::deserialize_transaction(transaction)?;
2373 buffer_handle
2374 .update(&mut cx, |buffer, _| {
2375 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2376 })
2377 .await;
2378 if push_to_history {
2379 buffer_handle.update(&mut cx, |buffer, _| {
2380 buffer.push_transaction(transaction.clone(), Instant::now());
2381 });
2382 }
2383 Ok(Some(transaction))
2384 } else {
2385 Ok(None)
2386 }
2387 })
2388 } else {
2389 Task::ready(Err(anyhow!("project does not have a remote id")))
2390 }
2391 }
2392
2393 pub fn code_actions<T: ToOffset>(
2394 &self,
2395 buffer_handle: &ModelHandle<Buffer>,
2396 range: Range<T>,
2397 cx: &mut ModelContext<Self>,
2398 ) -> Task<Result<Vec<CodeAction>>> {
2399 let buffer_handle = buffer_handle.clone();
2400 let buffer = buffer_handle.read(cx);
2401 let buffer_id = buffer.remote_id();
2402 let worktree;
2403 let buffer_abs_path;
2404 if let Some(file) = File::from_dyn(buffer.file()) {
2405 worktree = file.worktree.clone();
2406 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2407 } else {
2408 return Task::ready(Ok(Default::default()));
2409 };
2410 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2411
2412 if worktree.read(cx).as_local().is_some() {
2413 let buffer_abs_path = buffer_abs_path.unwrap();
2414 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2415 server.clone()
2416 } else {
2417 return Task::ready(Ok(Default::default()));
2418 };
2419
2420 let lsp_range = lsp::Range::new(
2421 range.start.to_point_utf16(buffer).to_lsp_position(),
2422 range.end.to_point_utf16(buffer).to_lsp_position(),
2423 );
2424 cx.foreground().spawn(async move {
2425 if !lang_server.capabilities().code_action_provider.is_some() {
2426 return Ok(Default::default());
2427 }
2428
2429 Ok(lang_server
2430 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2431 text_document: lsp::TextDocumentIdentifier::new(
2432 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2433 ),
2434 range: lsp_range,
2435 work_done_progress_params: Default::default(),
2436 partial_result_params: Default::default(),
2437 context: lsp::CodeActionContext {
2438 diagnostics: Default::default(),
2439 only: Some(vec![
2440 lsp::CodeActionKind::QUICKFIX,
2441 lsp::CodeActionKind::REFACTOR,
2442 lsp::CodeActionKind::REFACTOR_EXTRACT,
2443 ]),
2444 },
2445 })
2446 .await?
2447 .unwrap_or_default()
2448 .into_iter()
2449 .filter_map(|entry| {
2450 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2451 Some(CodeAction {
2452 range: range.clone(),
2453 lsp_action,
2454 })
2455 } else {
2456 None
2457 }
2458 })
2459 .collect())
2460 })
2461 } else if let Some(project_id) = self.remote_id() {
2462 let rpc = self.client.clone();
2463 let version = buffer.version();
2464 cx.spawn_weak(|_, mut cx| async move {
2465 let response = rpc
2466 .request(proto::GetCodeActions {
2467 project_id,
2468 buffer_id,
2469 start: Some(language::proto::serialize_anchor(&range.start)),
2470 end: Some(language::proto::serialize_anchor(&range.end)),
2471 version: serialize_version(&version),
2472 })
2473 .await?;
2474
2475 buffer_handle
2476 .update(&mut cx, |buffer, _| {
2477 buffer.wait_for_version(deserialize_version(response.version))
2478 })
2479 .await;
2480
2481 response
2482 .actions
2483 .into_iter()
2484 .map(language::proto::deserialize_code_action)
2485 .collect()
2486 })
2487 } else {
2488 Task::ready(Ok(Default::default()))
2489 }
2490 }
2491
2492 pub fn apply_code_action(
2493 &self,
2494 buffer_handle: ModelHandle<Buffer>,
2495 mut action: CodeAction,
2496 push_to_history: bool,
2497 cx: &mut ModelContext<Self>,
2498 ) -> Task<Result<ProjectTransaction>> {
2499 if self.is_local() {
2500 let buffer = buffer_handle.read(cx);
2501 let lang_name = if let Some(lang) = buffer.language() {
2502 lang.name()
2503 } else {
2504 return Task::ready(Ok(Default::default()));
2505 };
2506 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2507 server.clone()
2508 } else {
2509 return Task::ready(Ok(Default::default()));
2510 };
2511 let range = action.range.to_point_utf16(buffer);
2512
2513 cx.spawn(|this, mut cx| async move {
2514 if let Some(lsp_range) = action
2515 .lsp_action
2516 .data
2517 .as_mut()
2518 .and_then(|d| d.get_mut("codeActionParams"))
2519 .and_then(|d| d.get_mut("range"))
2520 {
2521 *lsp_range = serde_json::to_value(&lsp::Range::new(
2522 range.start.to_lsp_position(),
2523 range.end.to_lsp_position(),
2524 ))
2525 .unwrap();
2526 action.lsp_action = lang_server
2527 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2528 .await?;
2529 } else {
2530 let actions = this
2531 .update(&mut cx, |this, cx| {
2532 this.code_actions(&buffer_handle, action.range, cx)
2533 })
2534 .await?;
2535 action.lsp_action = actions
2536 .into_iter()
2537 .find(|a| a.lsp_action.title == action.lsp_action.title)
2538 .ok_or_else(|| anyhow!("code action is outdated"))?
2539 .lsp_action;
2540 }
2541
2542 if let Some(edit) = action.lsp_action.edit {
2543 Self::deserialize_workspace_edit(
2544 this,
2545 edit,
2546 push_to_history,
2547 lang_name,
2548 lang_server,
2549 &mut cx,
2550 )
2551 .await
2552 } else {
2553 Ok(ProjectTransaction::default())
2554 }
2555 })
2556 } else if let Some(project_id) = self.remote_id() {
2557 let client = self.client.clone();
2558 let request = proto::ApplyCodeAction {
2559 project_id,
2560 buffer_id: buffer_handle.read(cx).remote_id(),
2561 action: Some(language::proto::serialize_code_action(&action)),
2562 };
2563 cx.spawn(|this, mut cx| async move {
2564 let response = client
2565 .request(request)
2566 .await?
2567 .transaction
2568 .ok_or_else(|| anyhow!("missing transaction"))?;
2569 this.update(&mut cx, |this, cx| {
2570 this.deserialize_project_transaction(response, push_to_history, cx)
2571 })
2572 .await
2573 })
2574 } else {
2575 Task::ready(Err(anyhow!("project does not have a remote id")))
2576 }
2577 }
2578
2579 async fn deserialize_workspace_edit(
2580 this: ModelHandle<Self>,
2581 edit: lsp::WorkspaceEdit,
2582 push_to_history: bool,
2583 language_name: Arc<str>,
2584 language_server: Arc<LanguageServer>,
2585 cx: &mut AsyncAppContext,
2586 ) -> Result<ProjectTransaction> {
2587 let fs = this.read_with(cx, |this, _| this.fs.clone());
2588 let mut operations = Vec::new();
2589 if let Some(document_changes) = edit.document_changes {
2590 match document_changes {
2591 lsp::DocumentChanges::Edits(edits) => {
2592 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2593 }
2594 lsp::DocumentChanges::Operations(ops) => operations = ops,
2595 }
2596 } else if let Some(changes) = edit.changes {
2597 operations.extend(changes.into_iter().map(|(uri, edits)| {
2598 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2599 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2600 uri,
2601 version: None,
2602 },
2603 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2604 })
2605 }));
2606 }
2607
2608 let mut project_transaction = ProjectTransaction::default();
2609 for operation in operations {
2610 match operation {
2611 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2612 let abs_path = op
2613 .uri
2614 .to_file_path()
2615 .map_err(|_| anyhow!("can't convert URI to path"))?;
2616
2617 if let Some(parent_path) = abs_path.parent() {
2618 fs.create_dir(parent_path).await?;
2619 }
2620 if abs_path.ends_with("/") {
2621 fs.create_dir(&abs_path).await?;
2622 } else {
2623 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2624 .await?;
2625 }
2626 }
2627 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2628 let source_abs_path = op
2629 .old_uri
2630 .to_file_path()
2631 .map_err(|_| anyhow!("can't convert URI to path"))?;
2632 let target_abs_path = op
2633 .new_uri
2634 .to_file_path()
2635 .map_err(|_| anyhow!("can't convert URI to path"))?;
2636 fs.rename(
2637 &source_abs_path,
2638 &target_abs_path,
2639 op.options.map(Into::into).unwrap_or_default(),
2640 )
2641 .await?;
2642 }
2643 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2644 let abs_path = op
2645 .uri
2646 .to_file_path()
2647 .map_err(|_| anyhow!("can't convert URI to path"))?;
2648 let options = op.options.map(Into::into).unwrap_or_default();
2649 if abs_path.ends_with("/") {
2650 fs.remove_dir(&abs_path, options).await?;
2651 } else {
2652 fs.remove_file(&abs_path, options).await?;
2653 }
2654 }
2655 lsp::DocumentChangeOperation::Edit(op) => {
2656 let buffer_to_edit = this
2657 .update(cx, |this, cx| {
2658 this.open_local_buffer_via_lsp(
2659 op.text_document.uri,
2660 language_name.clone(),
2661 language_server.clone(),
2662 cx,
2663 )
2664 })
2665 .await?;
2666
2667 let edits = this
2668 .update(cx, |this, cx| {
2669 let edits = op.edits.into_iter().map(|edit| match edit {
2670 lsp::OneOf::Left(edit) => edit,
2671 lsp::OneOf::Right(edit) => edit.text_edit,
2672 });
2673 this.edits_from_lsp(
2674 &buffer_to_edit,
2675 edits,
2676 op.text_document.version,
2677 cx,
2678 )
2679 })
2680 .await?;
2681
2682 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2683 buffer.finalize_last_transaction();
2684 buffer.start_transaction();
2685 for (range, text) in edits {
2686 buffer.edit([range], text, cx);
2687 }
2688 let transaction = if buffer.end_transaction(cx).is_some() {
2689 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2690 if !push_to_history {
2691 buffer.forget_transaction(transaction.id);
2692 }
2693 Some(transaction)
2694 } else {
2695 None
2696 };
2697
2698 transaction
2699 });
2700 if let Some(transaction) = transaction {
2701 project_transaction.0.insert(buffer_to_edit, transaction);
2702 }
2703 }
2704 }
2705 }
2706
2707 Ok(project_transaction)
2708 }
2709
2710 pub fn prepare_rename<T: ToPointUtf16>(
2711 &self,
2712 buffer: ModelHandle<Buffer>,
2713 position: T,
2714 cx: &mut ModelContext<Self>,
2715 ) -> Task<Result<Option<Range<Anchor>>>> {
2716 let position = position.to_point_utf16(buffer.read(cx));
2717 self.request_lsp(buffer, PrepareRename { position }, cx)
2718 }
2719
2720 pub fn perform_rename<T: ToPointUtf16>(
2721 &self,
2722 buffer: ModelHandle<Buffer>,
2723 position: T,
2724 new_name: String,
2725 push_to_history: bool,
2726 cx: &mut ModelContext<Self>,
2727 ) -> Task<Result<ProjectTransaction>> {
2728 let position = position.to_point_utf16(buffer.read(cx));
2729 self.request_lsp(
2730 buffer,
2731 PerformRename {
2732 position,
2733 new_name,
2734 push_to_history,
2735 },
2736 cx,
2737 )
2738 }
2739
2740 pub fn search(
2741 &self,
2742 query: SearchQuery,
2743 cx: &mut ModelContext<Self>,
2744 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2745 if self.is_local() {
2746 let snapshots = self
2747 .visible_worktrees(cx)
2748 .filter_map(|tree| {
2749 let tree = tree.read(cx).as_local()?;
2750 Some(tree.snapshot())
2751 })
2752 .collect::<Vec<_>>();
2753
2754 let background = cx.background().clone();
2755 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2756 if path_count == 0 {
2757 return Task::ready(Ok(Default::default()));
2758 }
2759 let workers = background.num_cpus().min(path_count);
2760 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2761 cx.background()
2762 .spawn({
2763 let fs = self.fs.clone();
2764 let background = cx.background().clone();
2765 let query = query.clone();
2766 async move {
2767 let fs = &fs;
2768 let query = &query;
2769 let matching_paths_tx = &matching_paths_tx;
2770 let paths_per_worker = (path_count + workers - 1) / workers;
2771 let snapshots = &snapshots;
2772 background
2773 .scoped(|scope| {
2774 for worker_ix in 0..workers {
2775 let worker_start_ix = worker_ix * paths_per_worker;
2776 let worker_end_ix = worker_start_ix + paths_per_worker;
2777 scope.spawn(async move {
2778 let mut snapshot_start_ix = 0;
2779 let mut abs_path = PathBuf::new();
2780 for snapshot in snapshots {
2781 let snapshot_end_ix =
2782 snapshot_start_ix + snapshot.visible_file_count();
2783 if worker_end_ix <= snapshot_start_ix {
2784 break;
2785 } else if worker_start_ix > snapshot_end_ix {
2786 snapshot_start_ix = snapshot_end_ix;
2787 continue;
2788 } else {
2789 let start_in_snapshot = worker_start_ix
2790 .saturating_sub(snapshot_start_ix);
2791 let end_in_snapshot =
2792 cmp::min(worker_end_ix, snapshot_end_ix)
2793 - snapshot_start_ix;
2794
2795 for entry in snapshot
2796 .files(false, start_in_snapshot)
2797 .take(end_in_snapshot - start_in_snapshot)
2798 {
2799 if matching_paths_tx.is_closed() {
2800 break;
2801 }
2802
2803 abs_path.clear();
2804 abs_path.push(&snapshot.abs_path());
2805 abs_path.push(&entry.path);
2806 let matches = if let Some(file) =
2807 fs.open_sync(&abs_path).await.log_err()
2808 {
2809 query.detect(file).unwrap_or(false)
2810 } else {
2811 false
2812 };
2813
2814 if matches {
2815 let project_path =
2816 (snapshot.id(), entry.path.clone());
2817 if matching_paths_tx
2818 .send(project_path)
2819 .await
2820 .is_err()
2821 {
2822 break;
2823 }
2824 }
2825 }
2826
2827 snapshot_start_ix = snapshot_end_ix;
2828 }
2829 }
2830 });
2831 }
2832 })
2833 .await;
2834 }
2835 })
2836 .detach();
2837
2838 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2839 let open_buffers = self
2840 .opened_buffers
2841 .values()
2842 .filter_map(|b| b.upgrade(cx))
2843 .collect::<HashSet<_>>();
2844 cx.spawn(|this, cx| async move {
2845 for buffer in &open_buffers {
2846 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2847 buffers_tx.send((buffer.clone(), snapshot)).await?;
2848 }
2849
2850 let open_buffers = Rc::new(RefCell::new(open_buffers));
2851 while let Some(project_path) = matching_paths_rx.next().await {
2852 if buffers_tx.is_closed() {
2853 break;
2854 }
2855
2856 let this = this.clone();
2857 let open_buffers = open_buffers.clone();
2858 let buffers_tx = buffers_tx.clone();
2859 cx.spawn(|mut cx| async move {
2860 if let Some(buffer) = this
2861 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2862 .await
2863 .log_err()
2864 {
2865 if open_buffers.borrow_mut().insert(buffer.clone()) {
2866 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2867 buffers_tx.send((buffer, snapshot)).await?;
2868 }
2869 }
2870
2871 Ok::<_, anyhow::Error>(())
2872 })
2873 .detach();
2874 }
2875
2876 Ok::<_, anyhow::Error>(())
2877 })
2878 .detach_and_log_err(cx);
2879
2880 let background = cx.background().clone();
2881 cx.background().spawn(async move {
2882 let query = &query;
2883 let mut matched_buffers = Vec::new();
2884 for _ in 0..workers {
2885 matched_buffers.push(HashMap::default());
2886 }
2887 background
2888 .scoped(|scope| {
2889 for worker_matched_buffers in matched_buffers.iter_mut() {
2890 let mut buffers_rx = buffers_rx.clone();
2891 scope.spawn(async move {
2892 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2893 let buffer_matches = query
2894 .search(snapshot.as_rope())
2895 .await
2896 .iter()
2897 .map(|range| {
2898 snapshot.anchor_before(range.start)
2899 ..snapshot.anchor_after(range.end)
2900 })
2901 .collect::<Vec<_>>();
2902 if !buffer_matches.is_empty() {
2903 worker_matched_buffers
2904 .insert(buffer.clone(), buffer_matches);
2905 }
2906 }
2907 });
2908 }
2909 })
2910 .await;
2911 Ok(matched_buffers.into_iter().flatten().collect())
2912 })
2913 } else if let Some(project_id) = self.remote_id() {
2914 let request = self.client.request(query.to_proto(project_id));
2915 cx.spawn(|this, mut cx| async move {
2916 let response = request.await?;
2917 let mut result = HashMap::default();
2918 for location in response.locations {
2919 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2920 let target_buffer = this
2921 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2922 .await?;
2923 let start = location
2924 .start
2925 .and_then(deserialize_anchor)
2926 .ok_or_else(|| anyhow!("missing target start"))?;
2927 let end = location
2928 .end
2929 .and_then(deserialize_anchor)
2930 .ok_or_else(|| anyhow!("missing target end"))?;
2931 result
2932 .entry(target_buffer)
2933 .or_insert(Vec::new())
2934 .push(start..end)
2935 }
2936 Ok(result)
2937 })
2938 } else {
2939 Task::ready(Ok(Default::default()))
2940 }
2941 }
2942
2943 fn request_lsp<R: LspCommand>(
2944 &self,
2945 buffer_handle: ModelHandle<Buffer>,
2946 request: R,
2947 cx: &mut ModelContext<Self>,
2948 ) -> Task<Result<R::Response>>
2949 where
2950 <R::LspRequest as lsp::request::Request>::Result: Send,
2951 {
2952 let buffer = buffer_handle.read(cx);
2953 if self.is_local() {
2954 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2955 if let Some((file, language_server)) =
2956 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2957 {
2958 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2959 return cx.spawn(|this, cx| async move {
2960 if !request.check_capabilities(&language_server.capabilities()) {
2961 return Ok(Default::default());
2962 }
2963
2964 let response = language_server
2965 .request::<R::LspRequest>(lsp_params)
2966 .await
2967 .context("lsp request failed")?;
2968 request
2969 .response_from_lsp(response, this, buffer_handle, cx)
2970 .await
2971 });
2972 }
2973 } else if let Some(project_id) = self.remote_id() {
2974 let rpc = self.client.clone();
2975 let message = request.to_proto(project_id, buffer);
2976 return cx.spawn(|this, cx| async move {
2977 let response = rpc.request(message).await?;
2978 request
2979 .response_from_proto(response, this, buffer_handle, cx)
2980 .await
2981 });
2982 }
2983 Task::ready(Ok(Default::default()))
2984 }
2985
2986 pub fn find_or_create_local_worktree(
2987 &mut self,
2988 abs_path: impl AsRef<Path>,
2989 visible: bool,
2990 cx: &mut ModelContext<Self>,
2991 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2992 let abs_path = abs_path.as_ref();
2993 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2994 Task::ready(Ok((tree.clone(), relative_path.into())))
2995 } else {
2996 let worktree = self.create_local_worktree(abs_path, visible, cx);
2997 cx.foreground()
2998 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2999 }
3000 }
3001
3002 pub fn find_local_worktree(
3003 &self,
3004 abs_path: &Path,
3005 cx: &AppContext,
3006 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3007 for tree in self.worktrees(cx) {
3008 if let Some(relative_path) = tree
3009 .read(cx)
3010 .as_local()
3011 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3012 {
3013 return Some((tree.clone(), relative_path.into()));
3014 }
3015 }
3016 None
3017 }
3018
3019 pub fn is_shared(&self) -> bool {
3020 match &self.client_state {
3021 ProjectClientState::Local { is_shared, .. } => *is_shared,
3022 ProjectClientState::Remote { .. } => false,
3023 }
3024 }
3025
3026 fn create_local_worktree(
3027 &mut self,
3028 abs_path: impl AsRef<Path>,
3029 visible: bool,
3030 cx: &mut ModelContext<Self>,
3031 ) -> Task<Result<ModelHandle<Worktree>>> {
3032 let fs = self.fs.clone();
3033 let client = self.client.clone();
3034 let next_entry_id = self.next_entry_id.clone();
3035 let path: Arc<Path> = abs_path.as_ref().into();
3036 let task = self
3037 .loading_local_worktrees
3038 .entry(path.clone())
3039 .or_insert_with(|| {
3040 cx.spawn(|project, mut cx| {
3041 async move {
3042 let worktree = Worktree::local(
3043 client.clone(),
3044 path.clone(),
3045 visible,
3046 fs,
3047 next_entry_id,
3048 &mut cx,
3049 )
3050 .await;
3051 project.update(&mut cx, |project, _| {
3052 project.loading_local_worktrees.remove(&path);
3053 });
3054 let worktree = worktree?;
3055
3056 let (remote_project_id, is_shared) =
3057 project.update(&mut cx, |project, cx| {
3058 project.add_worktree(&worktree, cx);
3059 (project.remote_id(), project.is_shared())
3060 });
3061
3062 if let Some(project_id) = remote_project_id {
3063 if is_shared {
3064 worktree
3065 .update(&mut cx, |worktree, cx| {
3066 worktree.as_local_mut().unwrap().share(project_id, cx)
3067 })
3068 .await?;
3069 } else {
3070 worktree
3071 .update(&mut cx, |worktree, cx| {
3072 worktree.as_local_mut().unwrap().register(project_id, cx)
3073 })
3074 .await?;
3075 }
3076 }
3077
3078 Ok(worktree)
3079 }
3080 .map_err(|err| Arc::new(err))
3081 })
3082 .shared()
3083 })
3084 .clone();
3085 cx.foreground().spawn(async move {
3086 match task.await {
3087 Ok(worktree) => Ok(worktree),
3088 Err(err) => Err(anyhow!("{}", err)),
3089 }
3090 })
3091 }
3092
3093 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3094 self.worktrees.retain(|worktree| {
3095 worktree
3096 .upgrade(cx)
3097 .map_or(false, |w| w.read(cx).id() != id)
3098 });
3099 cx.notify();
3100 }
3101
3102 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3103 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3104 if worktree.read(cx).is_local() {
3105 cx.subscribe(&worktree, |this, worktree, _, cx| {
3106 this.update_local_worktree_buffers(worktree, cx);
3107 })
3108 .detach();
3109 }
3110
3111 let push_strong_handle = {
3112 let worktree = worktree.read(cx);
3113 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3114 };
3115 if push_strong_handle {
3116 self.worktrees
3117 .push(WorktreeHandle::Strong(worktree.clone()));
3118 } else {
3119 cx.observe_release(&worktree, |this, _, cx| {
3120 this.worktrees
3121 .retain(|worktree| worktree.upgrade(cx).is_some());
3122 cx.notify();
3123 })
3124 .detach();
3125 self.worktrees
3126 .push(WorktreeHandle::Weak(worktree.downgrade()));
3127 }
3128 cx.notify();
3129 }
3130
3131 fn update_local_worktree_buffers(
3132 &mut self,
3133 worktree_handle: ModelHandle<Worktree>,
3134 cx: &mut ModelContext<Self>,
3135 ) {
3136 let snapshot = worktree_handle.read(cx).snapshot();
3137 let mut buffers_to_delete = Vec::new();
3138 for (buffer_id, buffer) in &self.opened_buffers {
3139 if let Some(buffer) = buffer.upgrade(cx) {
3140 buffer.update(cx, |buffer, cx| {
3141 if let Some(old_file) = File::from_dyn(buffer.file()) {
3142 if old_file.worktree != worktree_handle {
3143 return;
3144 }
3145
3146 let new_file = if let Some(entry) = old_file
3147 .entry_id
3148 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3149 {
3150 File {
3151 is_local: true,
3152 entry_id: Some(entry.id),
3153 mtime: entry.mtime,
3154 path: entry.path.clone(),
3155 worktree: worktree_handle.clone(),
3156 }
3157 } else if let Some(entry) =
3158 snapshot.entry_for_path(old_file.path().as_ref())
3159 {
3160 File {
3161 is_local: true,
3162 entry_id: Some(entry.id),
3163 mtime: entry.mtime,
3164 path: entry.path.clone(),
3165 worktree: worktree_handle.clone(),
3166 }
3167 } else {
3168 File {
3169 is_local: true,
3170 entry_id: None,
3171 path: old_file.path().clone(),
3172 mtime: old_file.mtime(),
3173 worktree: worktree_handle.clone(),
3174 }
3175 };
3176
3177 if let Some(project_id) = self.remote_id() {
3178 self.client
3179 .send(proto::UpdateBufferFile {
3180 project_id,
3181 buffer_id: *buffer_id as u64,
3182 file: Some(new_file.to_proto()),
3183 })
3184 .log_err();
3185 }
3186 buffer.file_updated(Box::new(new_file), cx).detach();
3187 }
3188 });
3189 } else {
3190 buffers_to_delete.push(*buffer_id);
3191 }
3192 }
3193
3194 for buffer_id in buffers_to_delete {
3195 self.opened_buffers.remove(&buffer_id);
3196 }
3197 }
3198
3199 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3200 let new_active_entry = entry.and_then(|project_path| {
3201 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3202 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3203 Some(entry.id)
3204 });
3205 if new_active_entry != self.active_entry {
3206 self.active_entry = new_active_entry;
3207 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3208 }
3209 }
3210
3211 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3212 self.language_servers_with_diagnostics_running > 0
3213 }
3214
3215 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3216 let mut summary = DiagnosticSummary::default();
3217 for (_, path_summary) in self.diagnostic_summaries(cx) {
3218 summary.error_count += path_summary.error_count;
3219 summary.warning_count += path_summary.warning_count;
3220 summary.info_count += path_summary.info_count;
3221 summary.hint_count += path_summary.hint_count;
3222 }
3223 summary
3224 }
3225
3226 pub fn diagnostic_summaries<'a>(
3227 &'a self,
3228 cx: &'a AppContext,
3229 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3230 self.worktrees(cx).flat_map(move |worktree| {
3231 let worktree = worktree.read(cx);
3232 let worktree_id = worktree.id();
3233 worktree
3234 .diagnostic_summaries()
3235 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3236 })
3237 }
3238
3239 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3240 self.language_servers_with_diagnostics_running += 1;
3241 if self.language_servers_with_diagnostics_running == 1 {
3242 cx.emit(Event::DiskBasedDiagnosticsStarted);
3243 }
3244 }
3245
3246 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3247 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3248 self.language_servers_with_diagnostics_running -= 1;
3249 if self.language_servers_with_diagnostics_running == 0 {
3250 cx.emit(Event::DiskBasedDiagnosticsFinished);
3251 }
3252 }
3253
3254 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3255 self.active_entry
3256 }
3257
3258 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3259 self.worktree_for_id(path.worktree_id, cx)?
3260 .read(cx)
3261 .entry_for_path(&path.path)
3262 .map(|entry| entry.id)
3263 }
3264
3265 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3266 let worktree = self.worktree_for_entry(entry_id, cx)?;
3267 let worktree = worktree.read(cx);
3268 let worktree_id = worktree.id();
3269 let path = worktree.entry_for_id(entry_id)?.path.clone();
3270 Some(ProjectPath { worktree_id, path })
3271 }
3272
3273 // RPC message handlers
3274
3275 async fn handle_unshare_project(
3276 this: ModelHandle<Self>,
3277 _: TypedEnvelope<proto::UnshareProject>,
3278 _: Arc<Client>,
3279 mut cx: AsyncAppContext,
3280 ) -> Result<()> {
3281 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3282 Ok(())
3283 }
3284
3285 async fn handle_add_collaborator(
3286 this: ModelHandle<Self>,
3287 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3288 _: Arc<Client>,
3289 mut cx: AsyncAppContext,
3290 ) -> Result<()> {
3291 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3292 let collaborator = envelope
3293 .payload
3294 .collaborator
3295 .take()
3296 .ok_or_else(|| anyhow!("empty collaborator"))?;
3297
3298 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3299 this.update(&mut cx, |this, cx| {
3300 this.collaborators
3301 .insert(collaborator.peer_id, collaborator);
3302 cx.notify();
3303 });
3304
3305 Ok(())
3306 }
3307
3308 async fn handle_remove_collaborator(
3309 this: ModelHandle<Self>,
3310 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3311 _: Arc<Client>,
3312 mut cx: AsyncAppContext,
3313 ) -> Result<()> {
3314 this.update(&mut cx, |this, cx| {
3315 let peer_id = PeerId(envelope.payload.peer_id);
3316 let replica_id = this
3317 .collaborators
3318 .remove(&peer_id)
3319 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3320 .replica_id;
3321 for (_, buffer) in &this.opened_buffers {
3322 if let Some(buffer) = buffer.upgrade(cx) {
3323 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3324 }
3325 }
3326 cx.notify();
3327 Ok(())
3328 })
3329 }
3330
3331 async fn handle_register_worktree(
3332 this: ModelHandle<Self>,
3333 envelope: TypedEnvelope<proto::RegisterWorktree>,
3334 client: Arc<Client>,
3335 mut cx: AsyncAppContext,
3336 ) -> Result<()> {
3337 this.update(&mut cx, |this, cx| {
3338 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3339 let replica_id = this.replica_id();
3340 let worktree = proto::Worktree {
3341 id: envelope.payload.worktree_id,
3342 root_name: envelope.payload.root_name,
3343 entries: Default::default(),
3344 diagnostic_summaries: Default::default(),
3345 visible: envelope.payload.visible,
3346 };
3347 let (worktree, load_task) =
3348 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3349 this.add_worktree(&worktree, cx);
3350 load_task.detach();
3351 Ok(())
3352 })
3353 }
3354
3355 async fn handle_unregister_worktree(
3356 this: ModelHandle<Self>,
3357 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3358 _: Arc<Client>,
3359 mut cx: AsyncAppContext,
3360 ) -> Result<()> {
3361 this.update(&mut cx, |this, cx| {
3362 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3363 this.remove_worktree(worktree_id, cx);
3364 Ok(())
3365 })
3366 }
3367
3368 async fn handle_update_worktree(
3369 this: ModelHandle<Self>,
3370 envelope: TypedEnvelope<proto::UpdateWorktree>,
3371 _: Arc<Client>,
3372 mut cx: AsyncAppContext,
3373 ) -> Result<()> {
3374 this.update(&mut cx, |this, cx| {
3375 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3376 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3377 worktree.update(cx, |worktree, _| {
3378 let worktree = worktree.as_remote_mut().unwrap();
3379 worktree.update_from_remote(envelope)
3380 })?;
3381 }
3382 Ok(())
3383 })
3384 }
3385
3386 async fn handle_update_diagnostic_summary(
3387 this: ModelHandle<Self>,
3388 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3389 _: Arc<Client>,
3390 mut cx: AsyncAppContext,
3391 ) -> Result<()> {
3392 this.update(&mut cx, |this, cx| {
3393 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3394 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3395 if let Some(summary) = envelope.payload.summary {
3396 let project_path = ProjectPath {
3397 worktree_id,
3398 path: Path::new(&summary.path).into(),
3399 };
3400 worktree.update(cx, |worktree, _| {
3401 worktree
3402 .as_remote_mut()
3403 .unwrap()
3404 .update_diagnostic_summary(project_path.path.clone(), &summary);
3405 });
3406 cx.emit(Event::DiagnosticsUpdated(project_path));
3407 }
3408 }
3409 Ok(())
3410 })
3411 }
3412
3413 async fn handle_start_language_server(
3414 this: ModelHandle<Self>,
3415 envelope: TypedEnvelope<proto::StartLanguageServer>,
3416 _: Arc<Client>,
3417 mut cx: AsyncAppContext,
3418 ) -> Result<()> {
3419 let server = envelope
3420 .payload
3421 .server
3422 .ok_or_else(|| anyhow!("invalid server"))?;
3423 this.update(&mut cx, |this, cx| {
3424 this.language_server_statuses.insert(
3425 server.id as usize,
3426 LanguageServerStatus {
3427 name: server.name,
3428 pending_work: Default::default(),
3429 pending_diagnostic_updates: 0,
3430 },
3431 );
3432 cx.notify();
3433 });
3434 Ok(())
3435 }
3436
3437 async fn handle_update_language_server(
3438 this: ModelHandle<Self>,
3439 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3440 _: Arc<Client>,
3441 mut cx: AsyncAppContext,
3442 ) -> Result<()> {
3443 let language_server_id = envelope.payload.language_server_id as usize;
3444 match envelope
3445 .payload
3446 .variant
3447 .ok_or_else(|| anyhow!("invalid variant"))?
3448 {
3449 proto::update_language_server::Variant::WorkStart(payload) => {
3450 this.update(&mut cx, |this, cx| {
3451 this.on_lsp_work_start(language_server_id, payload.token, cx);
3452 })
3453 }
3454 proto::update_language_server::Variant::WorkProgress(payload) => {
3455 this.update(&mut cx, |this, cx| {
3456 this.on_lsp_work_progress(
3457 language_server_id,
3458 payload.token,
3459 LanguageServerProgress {
3460 message: payload.message,
3461 percentage: payload.percentage.map(|p| p as usize),
3462 last_update_at: Instant::now(),
3463 },
3464 cx,
3465 );
3466 })
3467 }
3468 proto::update_language_server::Variant::WorkEnd(payload) => {
3469 this.update(&mut cx, |this, cx| {
3470 this.on_lsp_work_end(language_server_id, payload.token, cx);
3471 })
3472 }
3473 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3474 this.update(&mut cx, |this, cx| {
3475 this.disk_based_diagnostics_started(cx);
3476 })
3477 }
3478 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3479 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3480 }
3481 }
3482
3483 Ok(())
3484 }
3485
3486 async fn handle_update_buffer(
3487 this: ModelHandle<Self>,
3488 envelope: TypedEnvelope<proto::UpdateBuffer>,
3489 _: Arc<Client>,
3490 mut cx: AsyncAppContext,
3491 ) -> Result<()> {
3492 this.update(&mut cx, |this, cx| {
3493 let payload = envelope.payload.clone();
3494 let buffer_id = payload.buffer_id;
3495 let ops = payload
3496 .operations
3497 .into_iter()
3498 .map(|op| language::proto::deserialize_operation(op))
3499 .collect::<Result<Vec<_>, _>>()?;
3500 match this.opened_buffers.entry(buffer_id) {
3501 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3502 OpenBuffer::Strong(buffer) => {
3503 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3504 }
3505 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3506 OpenBuffer::Weak(_) => {}
3507 },
3508 hash_map::Entry::Vacant(e) => {
3509 e.insert(OpenBuffer::Loading(ops));
3510 }
3511 }
3512 Ok(())
3513 })
3514 }
3515
3516 async fn handle_update_buffer_file(
3517 this: ModelHandle<Self>,
3518 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3519 _: Arc<Client>,
3520 mut cx: AsyncAppContext,
3521 ) -> Result<()> {
3522 this.update(&mut cx, |this, cx| {
3523 let payload = envelope.payload.clone();
3524 let buffer_id = payload.buffer_id;
3525 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3526 let worktree = this
3527 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3528 .ok_or_else(|| anyhow!("no such worktree"))?;
3529 let file = File::from_proto(file, worktree.clone(), cx)?;
3530 let buffer = this
3531 .opened_buffers
3532 .get_mut(&buffer_id)
3533 .and_then(|b| b.upgrade(cx))
3534 .ok_or_else(|| anyhow!("no such buffer"))?;
3535 buffer.update(cx, |buffer, cx| {
3536 buffer.file_updated(Box::new(file), cx).detach();
3537 });
3538 Ok(())
3539 })
3540 }
3541
3542 async fn handle_save_buffer(
3543 this: ModelHandle<Self>,
3544 envelope: TypedEnvelope<proto::SaveBuffer>,
3545 _: Arc<Client>,
3546 mut cx: AsyncAppContext,
3547 ) -> Result<proto::BufferSaved> {
3548 let buffer_id = envelope.payload.buffer_id;
3549 let requested_version = deserialize_version(envelope.payload.version);
3550
3551 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3552 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3553 let buffer = this
3554 .opened_buffers
3555 .get(&buffer_id)
3556 .map(|buffer| buffer.upgrade(cx).unwrap())
3557 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3558 Ok::<_, anyhow::Error>((project_id, buffer))
3559 })?;
3560 buffer
3561 .update(&mut cx, |buffer, _| {
3562 buffer.wait_for_version(requested_version)
3563 })
3564 .await;
3565
3566 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3567 Ok(proto::BufferSaved {
3568 project_id,
3569 buffer_id,
3570 version: serialize_version(&saved_version),
3571 mtime: Some(mtime.into()),
3572 })
3573 }
3574
3575 async fn handle_format_buffers(
3576 this: ModelHandle<Self>,
3577 envelope: TypedEnvelope<proto::FormatBuffers>,
3578 _: Arc<Client>,
3579 mut cx: AsyncAppContext,
3580 ) -> Result<proto::FormatBuffersResponse> {
3581 let sender_id = envelope.original_sender_id()?;
3582 let format = this.update(&mut cx, |this, cx| {
3583 let mut buffers = HashSet::default();
3584 for buffer_id in &envelope.payload.buffer_ids {
3585 buffers.insert(
3586 this.opened_buffers
3587 .get(buffer_id)
3588 .map(|buffer| buffer.upgrade(cx).unwrap())
3589 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3590 );
3591 }
3592 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3593 })?;
3594
3595 let project_transaction = format.await?;
3596 let project_transaction = this.update(&mut cx, |this, cx| {
3597 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3598 });
3599 Ok(proto::FormatBuffersResponse {
3600 transaction: Some(project_transaction),
3601 })
3602 }
3603
3604 async fn handle_get_completions(
3605 this: ModelHandle<Self>,
3606 envelope: TypedEnvelope<proto::GetCompletions>,
3607 _: Arc<Client>,
3608 mut cx: AsyncAppContext,
3609 ) -> Result<proto::GetCompletionsResponse> {
3610 let position = envelope
3611 .payload
3612 .position
3613 .and_then(language::proto::deserialize_anchor)
3614 .ok_or_else(|| anyhow!("invalid position"))?;
3615 let version = deserialize_version(envelope.payload.version);
3616 let buffer = this.read_with(&cx, |this, cx| {
3617 this.opened_buffers
3618 .get(&envelope.payload.buffer_id)
3619 .map(|buffer| buffer.upgrade(cx).unwrap())
3620 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3621 })?;
3622 buffer
3623 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3624 .await;
3625 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3626 let completions = this
3627 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3628 .await?;
3629
3630 Ok(proto::GetCompletionsResponse {
3631 completions: completions
3632 .iter()
3633 .map(language::proto::serialize_completion)
3634 .collect(),
3635 version: serialize_version(&version),
3636 })
3637 }
3638
3639 async fn handle_apply_additional_edits_for_completion(
3640 this: ModelHandle<Self>,
3641 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3642 _: Arc<Client>,
3643 mut cx: AsyncAppContext,
3644 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3645 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3646 let buffer = this
3647 .opened_buffers
3648 .get(&envelope.payload.buffer_id)
3649 .map(|buffer| buffer.upgrade(cx).unwrap())
3650 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3651 let language = buffer.read(cx).language();
3652 let completion = language::proto::deserialize_completion(
3653 envelope
3654 .payload
3655 .completion
3656 .ok_or_else(|| anyhow!("invalid completion"))?,
3657 language,
3658 )?;
3659 Ok::<_, anyhow::Error>(
3660 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3661 )
3662 })?;
3663
3664 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3665 transaction: apply_additional_edits
3666 .await?
3667 .as_ref()
3668 .map(language::proto::serialize_transaction),
3669 })
3670 }
3671
3672 async fn handle_get_code_actions(
3673 this: ModelHandle<Self>,
3674 envelope: TypedEnvelope<proto::GetCodeActions>,
3675 _: Arc<Client>,
3676 mut cx: AsyncAppContext,
3677 ) -> Result<proto::GetCodeActionsResponse> {
3678 let start = envelope
3679 .payload
3680 .start
3681 .and_then(language::proto::deserialize_anchor)
3682 .ok_or_else(|| anyhow!("invalid start"))?;
3683 let end = envelope
3684 .payload
3685 .end
3686 .and_then(language::proto::deserialize_anchor)
3687 .ok_or_else(|| anyhow!("invalid end"))?;
3688 let buffer = this.update(&mut cx, |this, cx| {
3689 this.opened_buffers
3690 .get(&envelope.payload.buffer_id)
3691 .map(|buffer| buffer.upgrade(cx).unwrap())
3692 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3693 })?;
3694 buffer
3695 .update(&mut cx, |buffer, _| {
3696 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3697 })
3698 .await;
3699
3700 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3701 let code_actions = this.update(&mut cx, |this, cx| {
3702 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3703 })?;
3704
3705 Ok(proto::GetCodeActionsResponse {
3706 actions: code_actions
3707 .await?
3708 .iter()
3709 .map(language::proto::serialize_code_action)
3710 .collect(),
3711 version: serialize_version(&version),
3712 })
3713 }
3714
3715 async fn handle_apply_code_action(
3716 this: ModelHandle<Self>,
3717 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3718 _: Arc<Client>,
3719 mut cx: AsyncAppContext,
3720 ) -> Result<proto::ApplyCodeActionResponse> {
3721 let sender_id = envelope.original_sender_id()?;
3722 let action = language::proto::deserialize_code_action(
3723 envelope
3724 .payload
3725 .action
3726 .ok_or_else(|| anyhow!("invalid action"))?,
3727 )?;
3728 let apply_code_action = this.update(&mut cx, |this, cx| {
3729 let buffer = this
3730 .opened_buffers
3731 .get(&envelope.payload.buffer_id)
3732 .map(|buffer| buffer.upgrade(cx).unwrap())
3733 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3734 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3735 })?;
3736
3737 let project_transaction = apply_code_action.await?;
3738 let project_transaction = this.update(&mut cx, |this, cx| {
3739 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3740 });
3741 Ok(proto::ApplyCodeActionResponse {
3742 transaction: Some(project_transaction),
3743 })
3744 }
3745
3746 async fn handle_lsp_command<T: LspCommand>(
3747 this: ModelHandle<Self>,
3748 envelope: TypedEnvelope<T::ProtoRequest>,
3749 _: Arc<Client>,
3750 mut cx: AsyncAppContext,
3751 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3752 where
3753 <T::LspRequest as lsp::request::Request>::Result: Send,
3754 {
3755 let sender_id = envelope.original_sender_id()?;
3756 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3757 let buffer_handle = this.read_with(&cx, |this, _| {
3758 this.opened_buffers
3759 .get(&buffer_id)
3760 .and_then(|buffer| buffer.upgrade(&cx))
3761 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3762 })?;
3763 let request = T::from_proto(
3764 envelope.payload,
3765 this.clone(),
3766 buffer_handle.clone(),
3767 cx.clone(),
3768 )
3769 .await?;
3770 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3771 let response = this
3772 .update(&mut cx, |this, cx| {
3773 this.request_lsp(buffer_handle, request, cx)
3774 })
3775 .await?;
3776 this.update(&mut cx, |this, cx| {
3777 Ok(T::response_to_proto(
3778 response,
3779 this,
3780 sender_id,
3781 &buffer_version,
3782 cx,
3783 ))
3784 })
3785 }
3786
3787 async fn handle_get_project_symbols(
3788 this: ModelHandle<Self>,
3789 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3790 _: Arc<Client>,
3791 mut cx: AsyncAppContext,
3792 ) -> Result<proto::GetProjectSymbolsResponse> {
3793 let symbols = this
3794 .update(&mut cx, |this, cx| {
3795 this.symbols(&envelope.payload.query, cx)
3796 })
3797 .await?;
3798
3799 Ok(proto::GetProjectSymbolsResponse {
3800 symbols: symbols.iter().map(serialize_symbol).collect(),
3801 })
3802 }
3803
3804 async fn handle_search_project(
3805 this: ModelHandle<Self>,
3806 envelope: TypedEnvelope<proto::SearchProject>,
3807 _: Arc<Client>,
3808 mut cx: AsyncAppContext,
3809 ) -> Result<proto::SearchProjectResponse> {
3810 let peer_id = envelope.original_sender_id()?;
3811 let query = SearchQuery::from_proto(envelope.payload)?;
3812 let result = this
3813 .update(&mut cx, |this, cx| this.search(query, cx))
3814 .await?;
3815
3816 this.update(&mut cx, |this, cx| {
3817 let mut locations = Vec::new();
3818 for (buffer, ranges) in result {
3819 for range in ranges {
3820 let start = serialize_anchor(&range.start);
3821 let end = serialize_anchor(&range.end);
3822 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3823 locations.push(proto::Location {
3824 buffer: Some(buffer),
3825 start: Some(start),
3826 end: Some(end),
3827 });
3828 }
3829 }
3830 Ok(proto::SearchProjectResponse { locations })
3831 })
3832 }
3833
3834 async fn handle_open_buffer_for_symbol(
3835 this: ModelHandle<Self>,
3836 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3837 _: Arc<Client>,
3838 mut cx: AsyncAppContext,
3839 ) -> Result<proto::OpenBufferForSymbolResponse> {
3840 let peer_id = envelope.original_sender_id()?;
3841 let symbol = envelope
3842 .payload
3843 .symbol
3844 .ok_or_else(|| anyhow!("invalid symbol"))?;
3845 let symbol = this.read_with(&cx, |this, _| {
3846 let symbol = this.deserialize_symbol(symbol)?;
3847 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3848 if signature == symbol.signature {
3849 Ok(symbol)
3850 } else {
3851 Err(anyhow!("invalid symbol signature"))
3852 }
3853 })?;
3854 let buffer = this
3855 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3856 .await?;
3857
3858 Ok(proto::OpenBufferForSymbolResponse {
3859 buffer: Some(this.update(&mut cx, |this, cx| {
3860 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3861 })),
3862 })
3863 }
3864
3865 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3866 let mut hasher = Sha256::new();
3867 hasher.update(worktree_id.to_proto().to_be_bytes());
3868 hasher.update(path.to_string_lossy().as_bytes());
3869 hasher.update(self.nonce.to_be_bytes());
3870 hasher.finalize().as_slice().try_into().unwrap()
3871 }
3872
3873 async fn handle_open_buffer(
3874 this: ModelHandle<Self>,
3875 envelope: TypedEnvelope<proto::OpenBuffer>,
3876 _: Arc<Client>,
3877 mut cx: AsyncAppContext,
3878 ) -> Result<proto::OpenBufferResponse> {
3879 let peer_id = envelope.original_sender_id()?;
3880 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3881 let open_buffer = this.update(&mut cx, |this, cx| {
3882 this.open_buffer(
3883 ProjectPath {
3884 worktree_id,
3885 path: PathBuf::from(envelope.payload.path).into(),
3886 },
3887 cx,
3888 )
3889 });
3890
3891 let buffer = open_buffer.await?;
3892 this.update(&mut cx, |this, cx| {
3893 Ok(proto::OpenBufferResponse {
3894 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3895 })
3896 })
3897 }
3898
3899 fn serialize_project_transaction_for_peer(
3900 &mut self,
3901 project_transaction: ProjectTransaction,
3902 peer_id: PeerId,
3903 cx: &AppContext,
3904 ) -> proto::ProjectTransaction {
3905 let mut serialized_transaction = proto::ProjectTransaction {
3906 buffers: Default::default(),
3907 transactions: Default::default(),
3908 };
3909 for (buffer, transaction) in project_transaction.0 {
3910 serialized_transaction
3911 .buffers
3912 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3913 serialized_transaction
3914 .transactions
3915 .push(language::proto::serialize_transaction(&transaction));
3916 }
3917 serialized_transaction
3918 }
3919
3920 fn deserialize_project_transaction(
3921 &mut self,
3922 message: proto::ProjectTransaction,
3923 push_to_history: bool,
3924 cx: &mut ModelContext<Self>,
3925 ) -> Task<Result<ProjectTransaction>> {
3926 cx.spawn(|this, mut cx| async move {
3927 let mut project_transaction = ProjectTransaction::default();
3928 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3929 let buffer = this
3930 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3931 .await?;
3932 let transaction = language::proto::deserialize_transaction(transaction)?;
3933 project_transaction.0.insert(buffer, transaction);
3934 }
3935
3936 for (buffer, transaction) in &project_transaction.0 {
3937 buffer
3938 .update(&mut cx, |buffer, _| {
3939 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3940 })
3941 .await;
3942
3943 if push_to_history {
3944 buffer.update(&mut cx, |buffer, _| {
3945 buffer.push_transaction(transaction.clone(), Instant::now());
3946 });
3947 }
3948 }
3949
3950 Ok(project_transaction)
3951 })
3952 }
3953
3954 fn serialize_buffer_for_peer(
3955 &mut self,
3956 buffer: &ModelHandle<Buffer>,
3957 peer_id: PeerId,
3958 cx: &AppContext,
3959 ) -> proto::Buffer {
3960 let buffer_id = buffer.read(cx).remote_id();
3961 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3962 if shared_buffers.insert(buffer_id) {
3963 proto::Buffer {
3964 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3965 }
3966 } else {
3967 proto::Buffer {
3968 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3969 }
3970 }
3971 }
3972
3973 fn deserialize_buffer(
3974 &mut self,
3975 buffer: proto::Buffer,
3976 cx: &mut ModelContext<Self>,
3977 ) -> Task<Result<ModelHandle<Buffer>>> {
3978 let replica_id = self.replica_id();
3979
3980 let opened_buffer_tx = self.opened_buffer.0.clone();
3981 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3982 cx.spawn(|this, mut cx| async move {
3983 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3984 proto::buffer::Variant::Id(id) => {
3985 let buffer = loop {
3986 let buffer = this.read_with(&cx, |this, cx| {
3987 this.opened_buffers
3988 .get(&id)
3989 .and_then(|buffer| buffer.upgrade(cx))
3990 });
3991 if let Some(buffer) = buffer {
3992 break buffer;
3993 }
3994 opened_buffer_rx
3995 .next()
3996 .await
3997 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3998 };
3999 Ok(buffer)
4000 }
4001 proto::buffer::Variant::State(mut buffer) => {
4002 let mut buffer_worktree = None;
4003 let mut buffer_file = None;
4004 if let Some(file) = buffer.file.take() {
4005 this.read_with(&cx, |this, cx| {
4006 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4007 let worktree =
4008 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4009 anyhow!("no worktree found for id {}", file.worktree_id)
4010 })?;
4011 buffer_file =
4012 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4013 as Box<dyn language::File>);
4014 buffer_worktree = Some(worktree);
4015 Ok::<_, anyhow::Error>(())
4016 })?;
4017 }
4018
4019 let buffer = cx.add_model(|cx| {
4020 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4021 });
4022
4023 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4024
4025 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4026 Ok(buffer)
4027 }
4028 }
4029 })
4030 }
4031
4032 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4033 let language = self
4034 .languages
4035 .get_language(&serialized_symbol.language_name);
4036 let start = serialized_symbol
4037 .start
4038 .ok_or_else(|| anyhow!("invalid start"))?;
4039 let end = serialized_symbol
4040 .end
4041 .ok_or_else(|| anyhow!("invalid end"))?;
4042 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4043 Ok(Symbol {
4044 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4045 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4046 language_name: serialized_symbol.language_name.clone(),
4047 label: language
4048 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4049 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4050 name: serialized_symbol.name,
4051 path: PathBuf::from(serialized_symbol.path),
4052 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4053 kind,
4054 signature: serialized_symbol
4055 .signature
4056 .try_into()
4057 .map_err(|_| anyhow!("invalid signature"))?,
4058 })
4059 }
4060
4061 async fn handle_buffer_saved(
4062 this: ModelHandle<Self>,
4063 envelope: TypedEnvelope<proto::BufferSaved>,
4064 _: Arc<Client>,
4065 mut cx: AsyncAppContext,
4066 ) -> Result<()> {
4067 let version = deserialize_version(envelope.payload.version);
4068 let mtime = envelope
4069 .payload
4070 .mtime
4071 .ok_or_else(|| anyhow!("missing mtime"))?
4072 .into();
4073
4074 this.update(&mut cx, |this, cx| {
4075 let buffer = this
4076 .opened_buffers
4077 .get(&envelope.payload.buffer_id)
4078 .and_then(|buffer| buffer.upgrade(cx));
4079 if let Some(buffer) = buffer {
4080 buffer.update(cx, |buffer, cx| {
4081 buffer.did_save(version, mtime, None, cx);
4082 });
4083 }
4084 Ok(())
4085 })
4086 }
4087
4088 async fn handle_buffer_reloaded(
4089 this: ModelHandle<Self>,
4090 envelope: TypedEnvelope<proto::BufferReloaded>,
4091 _: Arc<Client>,
4092 mut cx: AsyncAppContext,
4093 ) -> Result<()> {
4094 let payload = envelope.payload.clone();
4095 let version = deserialize_version(payload.version);
4096 let mtime = payload
4097 .mtime
4098 .ok_or_else(|| anyhow!("missing mtime"))?
4099 .into();
4100 this.update(&mut cx, |this, cx| {
4101 let buffer = this
4102 .opened_buffers
4103 .get(&payload.buffer_id)
4104 .and_then(|buffer| buffer.upgrade(cx));
4105 if let Some(buffer) = buffer {
4106 buffer.update(cx, |buffer, cx| {
4107 buffer.did_reload(version, mtime, cx);
4108 });
4109 }
4110 Ok(())
4111 })
4112 }
4113
4114 pub fn match_paths<'a>(
4115 &self,
4116 query: &'a str,
4117 include_ignored: bool,
4118 smart_case: bool,
4119 max_results: usize,
4120 cancel_flag: &'a AtomicBool,
4121 cx: &AppContext,
4122 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4123 let worktrees = self
4124 .worktrees(cx)
4125 .filter(|worktree| worktree.read(cx).is_visible())
4126 .collect::<Vec<_>>();
4127 let include_root_name = worktrees.len() > 1;
4128 let candidate_sets = worktrees
4129 .into_iter()
4130 .map(|worktree| CandidateSet {
4131 snapshot: worktree.read(cx).snapshot(),
4132 include_ignored,
4133 include_root_name,
4134 })
4135 .collect::<Vec<_>>();
4136
4137 let background = cx.background().clone();
4138 async move {
4139 fuzzy::match_paths(
4140 candidate_sets.as_slice(),
4141 query,
4142 smart_case,
4143 max_results,
4144 cancel_flag,
4145 background,
4146 )
4147 .await
4148 }
4149 }
4150
4151 fn edits_from_lsp(
4152 &mut self,
4153 buffer: &ModelHandle<Buffer>,
4154 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4155 version: Option<i32>,
4156 cx: &mut ModelContext<Self>,
4157 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4158 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4159 cx.background().spawn(async move {
4160 let snapshot = snapshot?;
4161 let mut lsp_edits = lsp_edits
4162 .into_iter()
4163 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4164 .peekable();
4165
4166 let mut edits = Vec::new();
4167 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4168 // Combine any LSP edits that are adjacent.
4169 //
4170 // Also, combine LSP edits that are separated from each other by only
4171 // a newline. This is important because for some code actions,
4172 // Rust-analyzer rewrites the entire buffer via a series of edits that
4173 // are separated by unchanged newline characters.
4174 //
4175 // In order for the diffing logic below to work properly, any edits that
4176 // cancel each other out must be combined into one.
4177 while let Some((next_range, next_text)) = lsp_edits.peek() {
4178 if next_range.start > range.end {
4179 if next_range.start.row > range.end.row + 1
4180 || next_range.start.column > 0
4181 || snapshot.clip_point_utf16(
4182 PointUtf16::new(range.end.row, u32::MAX),
4183 Bias::Left,
4184 ) > range.end
4185 {
4186 break;
4187 }
4188 new_text.push('\n');
4189 }
4190 range.end = next_range.end;
4191 new_text.push_str(&next_text);
4192 lsp_edits.next();
4193 }
4194
4195 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4196 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4197 {
4198 return Err(anyhow!("invalid edits received from language server"));
4199 }
4200
4201 // For multiline edits, perform a diff of the old and new text so that
4202 // we can identify the changes more precisely, preserving the locations
4203 // of any anchors positioned in the unchanged regions.
4204 if range.end.row > range.start.row {
4205 let mut offset = range.start.to_offset(&snapshot);
4206 let old_text = snapshot.text_for_range(range).collect::<String>();
4207
4208 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4209 let mut moved_since_edit = true;
4210 for change in diff.iter_all_changes() {
4211 let tag = change.tag();
4212 let value = change.value();
4213 match tag {
4214 ChangeTag::Equal => {
4215 offset += value.len();
4216 moved_since_edit = true;
4217 }
4218 ChangeTag::Delete => {
4219 let start = snapshot.anchor_after(offset);
4220 let end = snapshot.anchor_before(offset + value.len());
4221 if moved_since_edit {
4222 edits.push((start..end, String::new()));
4223 } else {
4224 edits.last_mut().unwrap().0.end = end;
4225 }
4226 offset += value.len();
4227 moved_since_edit = false;
4228 }
4229 ChangeTag::Insert => {
4230 if moved_since_edit {
4231 let anchor = snapshot.anchor_after(offset);
4232 edits.push((anchor.clone()..anchor, value.to_string()));
4233 } else {
4234 edits.last_mut().unwrap().1.push_str(value);
4235 }
4236 moved_since_edit = false;
4237 }
4238 }
4239 }
4240 } else if range.end == range.start {
4241 let anchor = snapshot.anchor_after(range.start);
4242 edits.push((anchor.clone()..anchor, new_text));
4243 } else {
4244 let edit_start = snapshot.anchor_after(range.start);
4245 let edit_end = snapshot.anchor_before(range.end);
4246 edits.push((edit_start..edit_end, new_text));
4247 }
4248 }
4249
4250 Ok(edits)
4251 })
4252 }
4253
4254 fn buffer_snapshot_for_lsp_version(
4255 &mut self,
4256 buffer: &ModelHandle<Buffer>,
4257 version: Option<i32>,
4258 cx: &AppContext,
4259 ) -> Result<TextBufferSnapshot> {
4260 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4261
4262 if let Some(version) = version {
4263 let buffer_id = buffer.read(cx).remote_id();
4264 let snapshots = self
4265 .buffer_snapshots
4266 .get_mut(&buffer_id)
4267 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4268 let mut found_snapshot = None;
4269 snapshots.retain(|(snapshot_version, snapshot)| {
4270 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4271 false
4272 } else {
4273 if *snapshot_version == version {
4274 found_snapshot = Some(snapshot.clone());
4275 }
4276 true
4277 }
4278 });
4279
4280 found_snapshot.ok_or_else(|| {
4281 anyhow!(
4282 "snapshot not found for buffer {} at version {}",
4283 buffer_id,
4284 version
4285 )
4286 })
4287 } else {
4288 Ok((buffer.read(cx)).text_snapshot())
4289 }
4290 }
4291
4292 fn language_server_for_buffer(
4293 &self,
4294 buffer: &Buffer,
4295 cx: &AppContext,
4296 ) -> Option<&Arc<LanguageServer>> {
4297 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4298 let worktree_id = file.worktree_id(cx);
4299 self.language_servers.get(&(worktree_id, language.name()))
4300 } else {
4301 None
4302 }
4303 }
4304}
4305
4306impl WorktreeHandle {
4307 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4308 match self {
4309 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4310 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4311 }
4312 }
4313}
4314
4315impl OpenBuffer {
4316 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4317 match self {
4318 OpenBuffer::Strong(handle) => Some(handle.clone()),
4319 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4320 OpenBuffer::Loading(_) => None,
4321 }
4322 }
4323}
4324
4325struct CandidateSet {
4326 snapshot: Snapshot,
4327 include_ignored: bool,
4328 include_root_name: bool,
4329}
4330
4331impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4332 type Candidates = CandidateSetIter<'a>;
4333
4334 fn id(&self) -> usize {
4335 self.snapshot.id().to_usize()
4336 }
4337
4338 fn len(&self) -> usize {
4339 if self.include_ignored {
4340 self.snapshot.file_count()
4341 } else {
4342 self.snapshot.visible_file_count()
4343 }
4344 }
4345
4346 fn prefix(&self) -> Arc<str> {
4347 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4348 self.snapshot.root_name().into()
4349 } else if self.include_root_name {
4350 format!("{}/", self.snapshot.root_name()).into()
4351 } else {
4352 "".into()
4353 }
4354 }
4355
4356 fn candidates(&'a self, start: usize) -> Self::Candidates {
4357 CandidateSetIter {
4358 traversal: self.snapshot.files(self.include_ignored, start),
4359 }
4360 }
4361}
4362
4363struct CandidateSetIter<'a> {
4364 traversal: Traversal<'a>,
4365}
4366
4367impl<'a> Iterator for CandidateSetIter<'a> {
4368 type Item = PathMatchCandidate<'a>;
4369
4370 fn next(&mut self) -> Option<Self::Item> {
4371 self.traversal.next().map(|entry| {
4372 if let EntryKind::File(char_bag) = entry.kind {
4373 PathMatchCandidate {
4374 path: &entry.path,
4375 char_bag,
4376 }
4377 } else {
4378 unreachable!()
4379 }
4380 })
4381 }
4382}
4383
4384impl Entity for Project {
4385 type Event = Event;
4386
4387 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4388 match &self.client_state {
4389 ProjectClientState::Local { remote_id_rx, .. } => {
4390 if let Some(project_id) = *remote_id_rx.borrow() {
4391 self.client
4392 .send(proto::UnregisterProject { project_id })
4393 .log_err();
4394 }
4395 }
4396 ProjectClientState::Remote { remote_id, .. } => {
4397 self.client
4398 .send(proto::LeaveProject {
4399 project_id: *remote_id,
4400 })
4401 .log_err();
4402 }
4403 }
4404 }
4405
4406 fn app_will_quit(
4407 &mut self,
4408 _: &mut MutableAppContext,
4409 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4410 let shutdown_futures = self
4411 .language_servers
4412 .drain()
4413 .filter_map(|(_, server)| server.shutdown())
4414 .collect::<Vec<_>>();
4415 Some(
4416 async move {
4417 futures::future::join_all(shutdown_futures).await;
4418 }
4419 .boxed(),
4420 )
4421 }
4422}
4423
4424impl Collaborator {
4425 fn from_proto(
4426 message: proto::Collaborator,
4427 user_store: &ModelHandle<UserStore>,
4428 cx: &mut AsyncAppContext,
4429 ) -> impl Future<Output = Result<Self>> {
4430 let user = user_store.update(cx, |user_store, cx| {
4431 user_store.fetch_user(message.user_id, cx)
4432 });
4433
4434 async move {
4435 Ok(Self {
4436 peer_id: PeerId(message.peer_id),
4437 user: user.await?,
4438 replica_id: message.replica_id as ReplicaId,
4439 })
4440 }
4441 }
4442}
4443
4444impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4445 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4446 Self {
4447 worktree_id,
4448 path: path.as_ref().into(),
4449 }
4450 }
4451}
4452
4453impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4454 fn from(options: lsp::CreateFileOptions) -> Self {
4455 Self {
4456 overwrite: options.overwrite.unwrap_or(false),
4457 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4458 }
4459 }
4460}
4461
4462impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4463 fn from(options: lsp::RenameFileOptions) -> Self {
4464 Self {
4465 overwrite: options.overwrite.unwrap_or(false),
4466 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4467 }
4468 }
4469}
4470
4471impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4472 fn from(options: lsp::DeleteFileOptions) -> Self {
4473 Self {
4474 recursive: options.recursive.unwrap_or(false),
4475 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4476 }
4477 }
4478}
4479
4480fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4481 proto::Symbol {
4482 source_worktree_id: symbol.source_worktree_id.to_proto(),
4483 worktree_id: symbol.worktree_id.to_proto(),
4484 language_name: symbol.language_name.clone(),
4485 name: symbol.name.clone(),
4486 kind: unsafe { mem::transmute(symbol.kind) },
4487 path: symbol.path.to_string_lossy().to_string(),
4488 start: Some(proto::Point {
4489 row: symbol.range.start.row,
4490 column: symbol.range.start.column,
4491 }),
4492 end: Some(proto::Point {
4493 row: symbol.range.end.row,
4494 column: symbol.range.end.column,
4495 }),
4496 signature: symbol.signature.to_vec(),
4497 }
4498}
4499
4500fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4501 let mut path_components = path.components();
4502 let mut base_components = base.components();
4503 let mut components: Vec<Component> = Vec::new();
4504 loop {
4505 match (path_components.next(), base_components.next()) {
4506 (None, None) => break,
4507 (Some(a), None) => {
4508 components.push(a);
4509 components.extend(path_components.by_ref());
4510 break;
4511 }
4512 (None, _) => components.push(Component::ParentDir),
4513 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4514 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4515 (Some(a), Some(_)) => {
4516 components.push(Component::ParentDir);
4517 for _ in base_components {
4518 components.push(Component::ParentDir);
4519 }
4520 components.push(a);
4521 components.extend(path_components.by_ref());
4522 break;
4523 }
4524 }
4525 }
4526 components.iter().map(|c| c.as_os_str()).collect()
4527}
4528
4529impl Item for Buffer {
4530 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4531 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4532 }
4533}
4534
4535#[cfg(test)]
4536mod tests {
4537 use super::{Event, *};
4538 use fs::RealFs;
4539 use futures::StreamExt;
4540 use gpui::test::subscribe;
4541 use language::{
4542 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4543 ToPoint,
4544 };
4545 use lsp::Url;
4546 use serde_json::json;
4547 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4548 use unindent::Unindent as _;
4549 use util::test::temp_tree;
4550 use worktree::WorktreeHandle as _;
4551
4552 #[gpui::test]
4553 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4554 let dir = temp_tree(json!({
4555 "root": {
4556 "apple": "",
4557 "banana": {
4558 "carrot": {
4559 "date": "",
4560 "endive": "",
4561 }
4562 },
4563 "fennel": {
4564 "grape": "",
4565 }
4566 }
4567 }));
4568
4569 let root_link_path = dir.path().join("root_link");
4570 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4571 unix::fs::symlink(
4572 &dir.path().join("root/fennel"),
4573 &dir.path().join("root/finnochio"),
4574 )
4575 .unwrap();
4576
4577 let project = Project::test(Arc::new(RealFs), cx);
4578
4579 let (tree, _) = project
4580 .update(cx, |project, cx| {
4581 project.find_or_create_local_worktree(&root_link_path, true, cx)
4582 })
4583 .await
4584 .unwrap();
4585
4586 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4587 .await;
4588 cx.read(|cx| {
4589 let tree = tree.read(cx);
4590 assert_eq!(tree.file_count(), 5);
4591 assert_eq!(
4592 tree.inode_for_path("fennel/grape"),
4593 tree.inode_for_path("finnochio/grape")
4594 );
4595 });
4596
4597 let cancel_flag = Default::default();
4598 let results = project
4599 .read_with(cx, |project, cx| {
4600 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4601 })
4602 .await;
4603 assert_eq!(
4604 results
4605 .into_iter()
4606 .map(|result| result.path)
4607 .collect::<Vec<Arc<Path>>>(),
4608 vec![
4609 PathBuf::from("banana/carrot/date").into(),
4610 PathBuf::from("banana/carrot/endive").into(),
4611 ]
4612 );
4613 }
4614
4615 #[gpui::test]
4616 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4617 cx.foreground().forbid_parking();
4618
4619 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4620 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4621 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4622 completion_provider: Some(lsp::CompletionOptions {
4623 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4624 ..Default::default()
4625 }),
4626 ..Default::default()
4627 });
4628 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4629 completion_provider: Some(lsp::CompletionOptions {
4630 trigger_characters: Some(vec![":".to_string()]),
4631 ..Default::default()
4632 }),
4633 ..Default::default()
4634 });
4635
4636 let rust_language = Arc::new(Language::new(
4637 LanguageConfig {
4638 name: "Rust".into(),
4639 path_suffixes: vec!["rs".to_string()],
4640 language_server: Some(rust_lsp_config),
4641 ..Default::default()
4642 },
4643 Some(tree_sitter_rust::language()),
4644 ));
4645 let json_language = Arc::new(Language::new(
4646 LanguageConfig {
4647 name: "JSON".into(),
4648 path_suffixes: vec!["json".to_string()],
4649 language_server: Some(json_lsp_config),
4650 ..Default::default()
4651 },
4652 None,
4653 ));
4654
4655 let fs = FakeFs::new(cx.background());
4656 fs.insert_tree(
4657 "/the-root",
4658 json!({
4659 "test.rs": "const A: i32 = 1;",
4660 "test2.rs": "",
4661 "Cargo.toml": "a = 1",
4662 "package.json": "{\"a\": 1}",
4663 }),
4664 )
4665 .await;
4666
4667 let project = Project::test(fs, cx);
4668 project.update(cx, |project, _| {
4669 project.languages.add(rust_language);
4670 project.languages.add(json_language);
4671 });
4672
4673 let worktree_id = project
4674 .update(cx, |project, cx| {
4675 project.find_or_create_local_worktree("/the-root", true, cx)
4676 })
4677 .await
4678 .unwrap()
4679 .0
4680 .read_with(cx, |tree, _| tree.id());
4681
4682 // Open a buffer without an associated language server.
4683 let toml_buffer = project
4684 .update(cx, |project, cx| {
4685 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4686 })
4687 .await
4688 .unwrap();
4689
4690 // Open a buffer with an associated language server.
4691 let rust_buffer = project
4692 .update(cx, |project, cx| {
4693 project.open_buffer((worktree_id, "test.rs"), cx)
4694 })
4695 .await
4696 .unwrap();
4697
4698 // A server is started up, and it is notified about Rust files.
4699 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4700 assert_eq!(
4701 fake_rust_server
4702 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4703 .await
4704 .text_document,
4705 lsp::TextDocumentItem {
4706 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4707 version: 0,
4708 text: "const A: i32 = 1;".to_string(),
4709 language_id: Default::default()
4710 }
4711 );
4712
4713 // The buffer is configured based on the language server's capabilities.
4714 rust_buffer.read_with(cx, |buffer, _| {
4715 assert_eq!(
4716 buffer.completion_triggers(),
4717 &[".".to_string(), "::".to_string()]
4718 );
4719 });
4720 toml_buffer.read_with(cx, |buffer, _| {
4721 assert!(buffer.completion_triggers().is_empty());
4722 });
4723
4724 // Edit a buffer. The changes are reported to the language server.
4725 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4726 assert_eq!(
4727 fake_rust_server
4728 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4729 .await
4730 .text_document,
4731 lsp::VersionedTextDocumentIdentifier::new(
4732 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4733 1
4734 )
4735 );
4736
4737 // Open a third buffer with a different associated language server.
4738 let json_buffer = project
4739 .update(cx, |project, cx| {
4740 project.open_buffer((worktree_id, "package.json"), cx)
4741 })
4742 .await
4743 .unwrap();
4744
4745 // Another language server is started up, and it is notified about
4746 // all three open buffers.
4747 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4748 assert_eq!(
4749 fake_json_server
4750 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4751 .await
4752 .text_document,
4753 lsp::TextDocumentItem {
4754 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4755 version: 0,
4756 text: "{\"a\": 1}".to_string(),
4757 language_id: Default::default()
4758 }
4759 );
4760
4761 // This buffer is configured based on the second language server's
4762 // capabilities.
4763 json_buffer.read_with(cx, |buffer, _| {
4764 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4765 });
4766
4767 // When opening another buffer whose language server is already running,
4768 // it is also configured based on the existing language server's capabilities.
4769 let rust_buffer2 = project
4770 .update(cx, |project, cx| {
4771 project.open_buffer((worktree_id, "test2.rs"), cx)
4772 })
4773 .await
4774 .unwrap();
4775 rust_buffer2.read_with(cx, |buffer, _| {
4776 assert_eq!(
4777 buffer.completion_triggers(),
4778 &[".".to_string(), "::".to_string()]
4779 );
4780 });
4781
4782 // Changes are reported only to servers matching the buffer's language.
4783 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4784 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4785 assert_eq!(
4786 fake_rust_server
4787 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4788 .await
4789 .text_document,
4790 lsp::VersionedTextDocumentIdentifier::new(
4791 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4792 1
4793 )
4794 );
4795
4796 // Save notifications are reported to all servers.
4797 toml_buffer
4798 .update(cx, |buffer, cx| buffer.save(cx))
4799 .await
4800 .unwrap();
4801 assert_eq!(
4802 fake_rust_server
4803 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4804 .await
4805 .text_document,
4806 lsp::TextDocumentIdentifier::new(
4807 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4808 )
4809 );
4810 assert_eq!(
4811 fake_json_server
4812 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4813 .await
4814 .text_document,
4815 lsp::TextDocumentIdentifier::new(
4816 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4817 )
4818 );
4819
4820 // Close notifications are reported only to servers matching the buffer's language.
4821 cx.update(|_| drop(json_buffer));
4822 let close_message = lsp::DidCloseTextDocumentParams {
4823 text_document: lsp::TextDocumentIdentifier::new(
4824 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4825 ),
4826 };
4827 assert_eq!(
4828 fake_json_server
4829 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4830 .await,
4831 close_message,
4832 );
4833 }
4834
4835 #[gpui::test]
4836 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4837 cx.foreground().forbid_parking();
4838
4839 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4840 let progress_token = language_server_config
4841 .disk_based_diagnostics_progress_token
4842 .clone()
4843 .unwrap();
4844
4845 let language = Arc::new(Language::new(
4846 LanguageConfig {
4847 name: "Rust".into(),
4848 path_suffixes: vec!["rs".to_string()],
4849 language_server: Some(language_server_config),
4850 ..Default::default()
4851 },
4852 Some(tree_sitter_rust::language()),
4853 ));
4854
4855 let fs = FakeFs::new(cx.background());
4856 fs.insert_tree(
4857 "/dir",
4858 json!({
4859 "a.rs": "fn a() { A }",
4860 "b.rs": "const y: i32 = 1",
4861 }),
4862 )
4863 .await;
4864
4865 let project = Project::test(fs, cx);
4866 project.update(cx, |project, _| project.languages.add(language));
4867
4868 let (tree, _) = project
4869 .update(cx, |project, cx| {
4870 project.find_or_create_local_worktree("/dir", true, cx)
4871 })
4872 .await
4873 .unwrap();
4874 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4875
4876 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4877 .await;
4878
4879 // Cause worktree to start the fake language server
4880 let _buffer = project
4881 .update(cx, |project, cx| {
4882 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4883 })
4884 .await
4885 .unwrap();
4886
4887 let mut events = subscribe(&project, cx);
4888
4889 let mut fake_server = fake_servers.next().await.unwrap();
4890 fake_server.start_progress(&progress_token).await;
4891 assert_eq!(
4892 events.next().await.unwrap(),
4893 Event::DiskBasedDiagnosticsStarted
4894 );
4895
4896 fake_server.start_progress(&progress_token).await;
4897 fake_server.end_progress(&progress_token).await;
4898 fake_server.start_progress(&progress_token).await;
4899
4900 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4901 lsp::PublishDiagnosticsParams {
4902 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4903 version: None,
4904 diagnostics: vec![lsp::Diagnostic {
4905 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4906 severity: Some(lsp::DiagnosticSeverity::ERROR),
4907 message: "undefined variable 'A'".to_string(),
4908 ..Default::default()
4909 }],
4910 },
4911 );
4912 assert_eq!(
4913 events.next().await.unwrap(),
4914 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4915 );
4916
4917 fake_server.end_progress(&progress_token).await;
4918 fake_server.end_progress(&progress_token).await;
4919 assert_eq!(
4920 events.next().await.unwrap(),
4921 Event::DiskBasedDiagnosticsUpdated
4922 );
4923 assert_eq!(
4924 events.next().await.unwrap(),
4925 Event::DiskBasedDiagnosticsFinished
4926 );
4927
4928 let buffer = project
4929 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4930 .await
4931 .unwrap();
4932
4933 buffer.read_with(cx, |buffer, _| {
4934 let snapshot = buffer.snapshot();
4935 let diagnostics = snapshot
4936 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4937 .collect::<Vec<_>>();
4938 assert_eq!(
4939 diagnostics,
4940 &[DiagnosticEntry {
4941 range: Point::new(0, 9)..Point::new(0, 10),
4942 diagnostic: Diagnostic {
4943 severity: lsp::DiagnosticSeverity::ERROR,
4944 message: "undefined variable 'A'".to_string(),
4945 group_id: 0,
4946 is_primary: true,
4947 ..Default::default()
4948 }
4949 }]
4950 )
4951 });
4952 }
4953
4954 #[gpui::test]
4955 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4956 cx.foreground().forbid_parking();
4957
4958 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4959 lsp_config
4960 .disk_based_diagnostic_sources
4961 .insert("disk".to_string());
4962 let language = Arc::new(Language::new(
4963 LanguageConfig {
4964 name: "Rust".into(),
4965 path_suffixes: vec!["rs".to_string()],
4966 language_server: Some(lsp_config),
4967 ..Default::default()
4968 },
4969 Some(tree_sitter_rust::language()),
4970 ));
4971
4972 let text = "
4973 fn a() { A }
4974 fn b() { BB }
4975 fn c() { CCC }
4976 "
4977 .unindent();
4978
4979 let fs = FakeFs::new(cx.background());
4980 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4981
4982 let project = Project::test(fs, cx);
4983 project.update(cx, |project, _| project.languages.add(language));
4984
4985 let worktree_id = project
4986 .update(cx, |project, cx| {
4987 project.find_or_create_local_worktree("/dir", true, cx)
4988 })
4989 .await
4990 .unwrap()
4991 .0
4992 .read_with(cx, |tree, _| tree.id());
4993
4994 let buffer = project
4995 .update(cx, |project, cx| {
4996 project.open_buffer((worktree_id, "a.rs"), cx)
4997 })
4998 .await
4999 .unwrap();
5000
5001 let mut fake_server = fake_servers.next().await.unwrap();
5002 let open_notification = fake_server
5003 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5004 .await;
5005
5006 // Edit the buffer, moving the content down
5007 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5008 let change_notification_1 = fake_server
5009 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5010 .await;
5011 assert!(
5012 change_notification_1.text_document.version > open_notification.text_document.version
5013 );
5014
5015 // Report some diagnostics for the initial version of the buffer
5016 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5017 lsp::PublishDiagnosticsParams {
5018 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5019 version: Some(open_notification.text_document.version),
5020 diagnostics: vec![
5021 lsp::Diagnostic {
5022 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5023 severity: Some(DiagnosticSeverity::ERROR),
5024 message: "undefined variable 'A'".to_string(),
5025 source: Some("disk".to_string()),
5026 ..Default::default()
5027 },
5028 lsp::Diagnostic {
5029 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5030 severity: Some(DiagnosticSeverity::ERROR),
5031 message: "undefined variable 'BB'".to_string(),
5032 source: Some("disk".to_string()),
5033 ..Default::default()
5034 },
5035 lsp::Diagnostic {
5036 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5037 severity: Some(DiagnosticSeverity::ERROR),
5038 source: Some("disk".to_string()),
5039 message: "undefined variable 'CCC'".to_string(),
5040 ..Default::default()
5041 },
5042 ],
5043 },
5044 );
5045
5046 // The diagnostics have moved down since they were created.
5047 buffer.next_notification(cx).await;
5048 buffer.read_with(cx, |buffer, _| {
5049 assert_eq!(
5050 buffer
5051 .snapshot()
5052 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5053 .collect::<Vec<_>>(),
5054 &[
5055 DiagnosticEntry {
5056 range: Point::new(3, 9)..Point::new(3, 11),
5057 diagnostic: Diagnostic {
5058 severity: DiagnosticSeverity::ERROR,
5059 message: "undefined variable 'BB'".to_string(),
5060 is_disk_based: true,
5061 group_id: 1,
5062 is_primary: true,
5063 ..Default::default()
5064 },
5065 },
5066 DiagnosticEntry {
5067 range: Point::new(4, 9)..Point::new(4, 12),
5068 diagnostic: Diagnostic {
5069 severity: DiagnosticSeverity::ERROR,
5070 message: "undefined variable 'CCC'".to_string(),
5071 is_disk_based: true,
5072 group_id: 2,
5073 is_primary: true,
5074 ..Default::default()
5075 }
5076 }
5077 ]
5078 );
5079 assert_eq!(
5080 chunks_with_diagnostics(buffer, 0..buffer.len()),
5081 [
5082 ("\n\nfn a() { ".to_string(), None),
5083 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5084 (" }\nfn b() { ".to_string(), None),
5085 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5086 (" }\nfn c() { ".to_string(), None),
5087 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5088 (" }\n".to_string(), None),
5089 ]
5090 );
5091 assert_eq!(
5092 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5093 [
5094 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5095 (" }\nfn c() { ".to_string(), None),
5096 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5097 ]
5098 );
5099 });
5100
5101 // Ensure overlapping diagnostics are highlighted correctly.
5102 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5103 lsp::PublishDiagnosticsParams {
5104 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5105 version: Some(open_notification.text_document.version),
5106 diagnostics: vec![
5107 lsp::Diagnostic {
5108 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5109 severity: Some(DiagnosticSeverity::ERROR),
5110 message: "undefined variable 'A'".to_string(),
5111 source: Some("disk".to_string()),
5112 ..Default::default()
5113 },
5114 lsp::Diagnostic {
5115 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5116 severity: Some(DiagnosticSeverity::WARNING),
5117 message: "unreachable statement".to_string(),
5118 source: Some("disk".to_string()),
5119 ..Default::default()
5120 },
5121 ],
5122 },
5123 );
5124
5125 buffer.next_notification(cx).await;
5126 buffer.read_with(cx, |buffer, _| {
5127 assert_eq!(
5128 buffer
5129 .snapshot()
5130 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5131 .collect::<Vec<_>>(),
5132 &[
5133 DiagnosticEntry {
5134 range: Point::new(2, 9)..Point::new(2, 12),
5135 diagnostic: Diagnostic {
5136 severity: DiagnosticSeverity::WARNING,
5137 message: "unreachable statement".to_string(),
5138 is_disk_based: true,
5139 group_id: 1,
5140 is_primary: true,
5141 ..Default::default()
5142 }
5143 },
5144 DiagnosticEntry {
5145 range: Point::new(2, 9)..Point::new(2, 10),
5146 diagnostic: Diagnostic {
5147 severity: DiagnosticSeverity::ERROR,
5148 message: "undefined variable 'A'".to_string(),
5149 is_disk_based: true,
5150 group_id: 0,
5151 is_primary: true,
5152 ..Default::default()
5153 },
5154 }
5155 ]
5156 );
5157 assert_eq!(
5158 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5159 [
5160 ("fn a() { ".to_string(), None),
5161 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5162 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5163 ("\n".to_string(), None),
5164 ]
5165 );
5166 assert_eq!(
5167 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5168 [
5169 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5170 ("\n".to_string(), None),
5171 ]
5172 );
5173 });
5174
5175 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5176 // changes since the last save.
5177 buffer.update(cx, |buffer, cx| {
5178 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5179 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5180 });
5181 let change_notification_2 =
5182 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5183 assert!(
5184 change_notification_2.await.text_document.version
5185 > change_notification_1.text_document.version
5186 );
5187
5188 // Handle out-of-order diagnostics
5189 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5190 lsp::PublishDiagnosticsParams {
5191 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5192 version: Some(open_notification.text_document.version),
5193 diagnostics: vec![
5194 lsp::Diagnostic {
5195 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5196 severity: Some(DiagnosticSeverity::ERROR),
5197 message: "undefined variable 'BB'".to_string(),
5198 source: Some("disk".to_string()),
5199 ..Default::default()
5200 },
5201 lsp::Diagnostic {
5202 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5203 severity: Some(DiagnosticSeverity::WARNING),
5204 message: "undefined variable 'A'".to_string(),
5205 source: Some("disk".to_string()),
5206 ..Default::default()
5207 },
5208 ],
5209 },
5210 );
5211
5212 buffer.next_notification(cx).await;
5213 buffer.read_with(cx, |buffer, _| {
5214 assert_eq!(
5215 buffer
5216 .snapshot()
5217 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5218 .collect::<Vec<_>>(),
5219 &[
5220 DiagnosticEntry {
5221 range: Point::new(2, 21)..Point::new(2, 22),
5222 diagnostic: Diagnostic {
5223 severity: DiagnosticSeverity::WARNING,
5224 message: "undefined variable 'A'".to_string(),
5225 is_disk_based: true,
5226 group_id: 1,
5227 is_primary: true,
5228 ..Default::default()
5229 }
5230 },
5231 DiagnosticEntry {
5232 range: Point::new(3, 9)..Point::new(3, 11),
5233 diagnostic: Diagnostic {
5234 severity: DiagnosticSeverity::ERROR,
5235 message: "undefined variable 'BB'".to_string(),
5236 is_disk_based: true,
5237 group_id: 0,
5238 is_primary: true,
5239 ..Default::default()
5240 },
5241 }
5242 ]
5243 );
5244 });
5245 }
5246
5247 #[gpui::test]
5248 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5249 cx.foreground().forbid_parking();
5250
5251 let text = concat!(
5252 "let one = ;\n", //
5253 "let two = \n",
5254 "let three = 3;\n",
5255 );
5256
5257 let fs = FakeFs::new(cx.background());
5258 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5259
5260 let project = Project::test(fs, cx);
5261 let worktree_id = project
5262 .update(cx, |project, cx| {
5263 project.find_or_create_local_worktree("/dir", true, cx)
5264 })
5265 .await
5266 .unwrap()
5267 .0
5268 .read_with(cx, |tree, _| tree.id());
5269
5270 let buffer = project
5271 .update(cx, |project, cx| {
5272 project.open_buffer((worktree_id, "a.rs"), cx)
5273 })
5274 .await
5275 .unwrap();
5276
5277 project.update(cx, |project, cx| {
5278 project
5279 .update_buffer_diagnostics(
5280 &buffer,
5281 vec![
5282 DiagnosticEntry {
5283 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5284 diagnostic: Diagnostic {
5285 severity: DiagnosticSeverity::ERROR,
5286 message: "syntax error 1".to_string(),
5287 ..Default::default()
5288 },
5289 },
5290 DiagnosticEntry {
5291 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5292 diagnostic: Diagnostic {
5293 severity: DiagnosticSeverity::ERROR,
5294 message: "syntax error 2".to_string(),
5295 ..Default::default()
5296 },
5297 },
5298 ],
5299 None,
5300 cx,
5301 )
5302 .unwrap();
5303 });
5304
5305 // An empty range is extended forward to include the following character.
5306 // At the end of a line, an empty range is extended backward to include
5307 // the preceding character.
5308 buffer.read_with(cx, |buffer, _| {
5309 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5310 assert_eq!(
5311 chunks
5312 .iter()
5313 .map(|(s, d)| (s.as_str(), *d))
5314 .collect::<Vec<_>>(),
5315 &[
5316 ("let one = ", None),
5317 (";", Some(DiagnosticSeverity::ERROR)),
5318 ("\nlet two =", None),
5319 (" ", Some(DiagnosticSeverity::ERROR)),
5320 ("\nlet three = 3;\n", None)
5321 ]
5322 );
5323 });
5324 }
5325
5326 #[gpui::test]
5327 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5328 cx.foreground().forbid_parking();
5329
5330 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5331 let language = Arc::new(Language::new(
5332 LanguageConfig {
5333 name: "Rust".into(),
5334 path_suffixes: vec!["rs".to_string()],
5335 language_server: Some(lsp_config),
5336 ..Default::default()
5337 },
5338 Some(tree_sitter_rust::language()),
5339 ));
5340
5341 let text = "
5342 fn a() {
5343 f1();
5344 }
5345 fn b() {
5346 f2();
5347 }
5348 fn c() {
5349 f3();
5350 }
5351 "
5352 .unindent();
5353
5354 let fs = FakeFs::new(cx.background());
5355 fs.insert_tree(
5356 "/dir",
5357 json!({
5358 "a.rs": text.clone(),
5359 }),
5360 )
5361 .await;
5362
5363 let project = Project::test(fs, cx);
5364 project.update(cx, |project, _| project.languages.add(language));
5365
5366 let worktree_id = project
5367 .update(cx, |project, cx| {
5368 project.find_or_create_local_worktree("/dir", true, cx)
5369 })
5370 .await
5371 .unwrap()
5372 .0
5373 .read_with(cx, |tree, _| tree.id());
5374
5375 let buffer = project
5376 .update(cx, |project, cx| {
5377 project.open_buffer((worktree_id, "a.rs"), cx)
5378 })
5379 .await
5380 .unwrap();
5381
5382 let mut fake_server = fake_servers.next().await.unwrap();
5383 let lsp_document_version = fake_server
5384 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5385 .await
5386 .text_document
5387 .version;
5388
5389 // Simulate editing the buffer after the language server computes some edits.
5390 buffer.update(cx, |buffer, cx| {
5391 buffer.edit(
5392 [Point::new(0, 0)..Point::new(0, 0)],
5393 "// above first function\n",
5394 cx,
5395 );
5396 buffer.edit(
5397 [Point::new(2, 0)..Point::new(2, 0)],
5398 " // inside first function\n",
5399 cx,
5400 );
5401 buffer.edit(
5402 [Point::new(6, 4)..Point::new(6, 4)],
5403 "// inside second function ",
5404 cx,
5405 );
5406
5407 assert_eq!(
5408 buffer.text(),
5409 "
5410 // above first function
5411 fn a() {
5412 // inside first function
5413 f1();
5414 }
5415 fn b() {
5416 // inside second function f2();
5417 }
5418 fn c() {
5419 f3();
5420 }
5421 "
5422 .unindent()
5423 );
5424 });
5425
5426 let edits = project
5427 .update(cx, |project, cx| {
5428 project.edits_from_lsp(
5429 &buffer,
5430 vec![
5431 // replace body of first function
5432 lsp::TextEdit {
5433 range: lsp::Range::new(
5434 lsp::Position::new(0, 0),
5435 lsp::Position::new(3, 0),
5436 ),
5437 new_text: "
5438 fn a() {
5439 f10();
5440 }
5441 "
5442 .unindent(),
5443 },
5444 // edit inside second function
5445 lsp::TextEdit {
5446 range: lsp::Range::new(
5447 lsp::Position::new(4, 6),
5448 lsp::Position::new(4, 6),
5449 ),
5450 new_text: "00".into(),
5451 },
5452 // edit inside third function via two distinct edits
5453 lsp::TextEdit {
5454 range: lsp::Range::new(
5455 lsp::Position::new(7, 5),
5456 lsp::Position::new(7, 5),
5457 ),
5458 new_text: "4000".into(),
5459 },
5460 lsp::TextEdit {
5461 range: lsp::Range::new(
5462 lsp::Position::new(7, 5),
5463 lsp::Position::new(7, 6),
5464 ),
5465 new_text: "".into(),
5466 },
5467 ],
5468 Some(lsp_document_version),
5469 cx,
5470 )
5471 })
5472 .await
5473 .unwrap();
5474
5475 buffer.update(cx, |buffer, cx| {
5476 for (range, new_text) in edits {
5477 buffer.edit([range], new_text, cx);
5478 }
5479 assert_eq!(
5480 buffer.text(),
5481 "
5482 // above first function
5483 fn a() {
5484 // inside first function
5485 f10();
5486 }
5487 fn b() {
5488 // inside second function f200();
5489 }
5490 fn c() {
5491 f4000();
5492 }
5493 "
5494 .unindent()
5495 );
5496 });
5497 }
5498
5499 #[gpui::test]
5500 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5501 cx.foreground().forbid_parking();
5502
5503 let text = "
5504 use a::b;
5505 use a::c;
5506
5507 fn f() {
5508 b();
5509 c();
5510 }
5511 "
5512 .unindent();
5513
5514 let fs = FakeFs::new(cx.background());
5515 fs.insert_tree(
5516 "/dir",
5517 json!({
5518 "a.rs": text.clone(),
5519 }),
5520 )
5521 .await;
5522
5523 let project = Project::test(fs, cx);
5524 let worktree_id = project
5525 .update(cx, |project, cx| {
5526 project.find_or_create_local_worktree("/dir", true, cx)
5527 })
5528 .await
5529 .unwrap()
5530 .0
5531 .read_with(cx, |tree, _| tree.id());
5532
5533 let buffer = project
5534 .update(cx, |project, cx| {
5535 project.open_buffer((worktree_id, "a.rs"), cx)
5536 })
5537 .await
5538 .unwrap();
5539
5540 // Simulate the language server sending us a small edit in the form of a very large diff.
5541 // Rust-analyzer does this when performing a merge-imports code action.
5542 let edits = project
5543 .update(cx, |project, cx| {
5544 project.edits_from_lsp(
5545 &buffer,
5546 [
5547 // Replace the first use statement without editing the semicolon.
5548 lsp::TextEdit {
5549 range: lsp::Range::new(
5550 lsp::Position::new(0, 4),
5551 lsp::Position::new(0, 8),
5552 ),
5553 new_text: "a::{b, c}".into(),
5554 },
5555 // Reinsert the remainder of the file between the semicolon and the final
5556 // newline of the file.
5557 lsp::TextEdit {
5558 range: lsp::Range::new(
5559 lsp::Position::new(0, 9),
5560 lsp::Position::new(0, 9),
5561 ),
5562 new_text: "\n\n".into(),
5563 },
5564 lsp::TextEdit {
5565 range: lsp::Range::new(
5566 lsp::Position::new(0, 9),
5567 lsp::Position::new(0, 9),
5568 ),
5569 new_text: "
5570 fn f() {
5571 b();
5572 c();
5573 }"
5574 .unindent(),
5575 },
5576 // Delete everything after the first newline of the file.
5577 lsp::TextEdit {
5578 range: lsp::Range::new(
5579 lsp::Position::new(1, 0),
5580 lsp::Position::new(7, 0),
5581 ),
5582 new_text: "".into(),
5583 },
5584 ],
5585 None,
5586 cx,
5587 )
5588 })
5589 .await
5590 .unwrap();
5591
5592 buffer.update(cx, |buffer, cx| {
5593 let edits = edits
5594 .into_iter()
5595 .map(|(range, text)| {
5596 (
5597 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5598 text,
5599 )
5600 })
5601 .collect::<Vec<_>>();
5602
5603 assert_eq!(
5604 edits,
5605 [
5606 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5607 (Point::new(1, 0)..Point::new(2, 0), "".into())
5608 ]
5609 );
5610
5611 for (range, new_text) in edits {
5612 buffer.edit([range], new_text, cx);
5613 }
5614 assert_eq!(
5615 buffer.text(),
5616 "
5617 use a::{b, c};
5618
5619 fn f() {
5620 b();
5621 c();
5622 }
5623 "
5624 .unindent()
5625 );
5626 });
5627 }
5628
5629 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5630 buffer: &Buffer,
5631 range: Range<T>,
5632 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5633 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5634 for chunk in buffer.snapshot().chunks(range, true) {
5635 if chunks.last().map_or(false, |prev_chunk| {
5636 prev_chunk.1 == chunk.diagnostic_severity
5637 }) {
5638 chunks.last_mut().unwrap().0.push_str(chunk.text);
5639 } else {
5640 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5641 }
5642 }
5643 chunks
5644 }
5645
5646 #[gpui::test]
5647 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5648 let dir = temp_tree(json!({
5649 "root": {
5650 "dir1": {},
5651 "dir2": {
5652 "dir3": {}
5653 }
5654 }
5655 }));
5656
5657 let project = Project::test(Arc::new(RealFs), cx);
5658 let (tree, _) = project
5659 .update(cx, |project, cx| {
5660 project.find_or_create_local_worktree(&dir.path(), true, cx)
5661 })
5662 .await
5663 .unwrap();
5664
5665 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5666 .await;
5667
5668 let cancel_flag = Default::default();
5669 let results = project
5670 .read_with(cx, |project, cx| {
5671 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5672 })
5673 .await;
5674
5675 assert!(results.is_empty());
5676 }
5677
5678 #[gpui::test]
5679 async fn test_definition(cx: &mut gpui::TestAppContext) {
5680 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5681 let language = Arc::new(Language::new(
5682 LanguageConfig {
5683 name: "Rust".into(),
5684 path_suffixes: vec!["rs".to_string()],
5685 language_server: Some(language_server_config),
5686 ..Default::default()
5687 },
5688 Some(tree_sitter_rust::language()),
5689 ));
5690
5691 let fs = FakeFs::new(cx.background());
5692 fs.insert_tree(
5693 "/dir",
5694 json!({
5695 "a.rs": "const fn a() { A }",
5696 "b.rs": "const y: i32 = crate::a()",
5697 }),
5698 )
5699 .await;
5700
5701 let project = Project::test(fs, cx);
5702 project.update(cx, |project, _| {
5703 Arc::get_mut(&mut project.languages).unwrap().add(language);
5704 });
5705
5706 let (tree, _) = project
5707 .update(cx, |project, cx| {
5708 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5709 })
5710 .await
5711 .unwrap();
5712 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5713 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5714 .await;
5715
5716 let buffer = project
5717 .update(cx, |project, cx| {
5718 project.open_buffer(
5719 ProjectPath {
5720 worktree_id,
5721 path: Path::new("").into(),
5722 },
5723 cx,
5724 )
5725 })
5726 .await
5727 .unwrap();
5728
5729 let mut fake_server = fake_servers.next().await.unwrap();
5730 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5731 let params = params.text_document_position_params;
5732 assert_eq!(
5733 params.text_document.uri.to_file_path().unwrap(),
5734 Path::new("/dir/b.rs"),
5735 );
5736 assert_eq!(params.position, lsp::Position::new(0, 22));
5737
5738 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5739 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5740 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5741 )))
5742 });
5743
5744 let mut definitions = project
5745 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5746 .await
5747 .unwrap();
5748
5749 assert_eq!(definitions.len(), 1);
5750 let definition = definitions.pop().unwrap();
5751 cx.update(|cx| {
5752 let target_buffer = definition.buffer.read(cx);
5753 assert_eq!(
5754 target_buffer
5755 .file()
5756 .unwrap()
5757 .as_local()
5758 .unwrap()
5759 .abs_path(cx),
5760 Path::new("/dir/a.rs"),
5761 );
5762 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5763 assert_eq!(
5764 list_worktrees(&project, cx),
5765 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5766 );
5767
5768 drop(definition);
5769 });
5770 cx.read(|cx| {
5771 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5772 });
5773
5774 fn list_worktrees<'a>(
5775 project: &'a ModelHandle<Project>,
5776 cx: &'a AppContext,
5777 ) -> Vec<(&'a Path, bool)> {
5778 project
5779 .read(cx)
5780 .worktrees(cx)
5781 .map(|worktree| {
5782 let worktree = worktree.read(cx);
5783 (
5784 worktree.as_local().unwrap().abs_path().as_ref(),
5785 worktree.is_visible(),
5786 )
5787 })
5788 .collect::<Vec<_>>()
5789 }
5790 }
5791
5792 #[gpui::test]
5793 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5794 let fs = FakeFs::new(cx.background());
5795 fs.insert_tree(
5796 "/dir",
5797 json!({
5798 "file1": "the old contents",
5799 }),
5800 )
5801 .await;
5802
5803 let project = Project::test(fs.clone(), cx);
5804 let worktree_id = project
5805 .update(cx, |p, cx| {
5806 p.find_or_create_local_worktree("/dir", true, cx)
5807 })
5808 .await
5809 .unwrap()
5810 .0
5811 .read_with(cx, |tree, _| tree.id());
5812
5813 let buffer = project
5814 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5815 .await
5816 .unwrap();
5817 buffer
5818 .update(cx, |buffer, cx| {
5819 assert_eq!(buffer.text(), "the old contents");
5820 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5821 buffer.save(cx)
5822 })
5823 .await
5824 .unwrap();
5825
5826 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5827 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5828 }
5829
5830 #[gpui::test]
5831 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5832 let fs = FakeFs::new(cx.background());
5833 fs.insert_tree(
5834 "/dir",
5835 json!({
5836 "file1": "the old contents",
5837 }),
5838 )
5839 .await;
5840
5841 let project = Project::test(fs.clone(), cx);
5842 let worktree_id = project
5843 .update(cx, |p, cx| {
5844 p.find_or_create_local_worktree("/dir/file1", true, cx)
5845 })
5846 .await
5847 .unwrap()
5848 .0
5849 .read_with(cx, |tree, _| tree.id());
5850
5851 let buffer = project
5852 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5853 .await
5854 .unwrap();
5855 buffer
5856 .update(cx, |buffer, cx| {
5857 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5858 buffer.save(cx)
5859 })
5860 .await
5861 .unwrap();
5862
5863 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5864 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5865 }
5866
5867 #[gpui::test]
5868 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5869 let fs = FakeFs::new(cx.background());
5870 fs.insert_tree("/dir", json!({})).await;
5871
5872 let project = Project::test(fs.clone(), cx);
5873 let (worktree, _) = project
5874 .update(cx, |project, cx| {
5875 project.find_or_create_local_worktree("/dir", true, cx)
5876 })
5877 .await
5878 .unwrap();
5879 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5880
5881 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5882 buffer.update(cx, |buffer, cx| {
5883 buffer.edit([0..0], "abc", cx);
5884 assert!(buffer.is_dirty());
5885 assert!(!buffer.has_conflict());
5886 });
5887 project
5888 .update(cx, |project, cx| {
5889 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5890 })
5891 .await
5892 .unwrap();
5893 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5894 buffer.read_with(cx, |buffer, cx| {
5895 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5896 assert!(!buffer.is_dirty());
5897 assert!(!buffer.has_conflict());
5898 });
5899
5900 let opened_buffer = project
5901 .update(cx, |project, cx| {
5902 project.open_buffer((worktree_id, "file1"), cx)
5903 })
5904 .await
5905 .unwrap();
5906 assert_eq!(opened_buffer, buffer);
5907 }
5908
5909 #[gpui::test(retries = 5)]
5910 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5911 let dir = temp_tree(json!({
5912 "a": {
5913 "file1": "",
5914 "file2": "",
5915 "file3": "",
5916 },
5917 "b": {
5918 "c": {
5919 "file4": "",
5920 "file5": "",
5921 }
5922 }
5923 }));
5924
5925 let project = Project::test(Arc::new(RealFs), cx);
5926 let rpc = project.read_with(cx, |p, _| p.client.clone());
5927
5928 let (tree, _) = project
5929 .update(cx, |p, cx| {
5930 p.find_or_create_local_worktree(dir.path(), true, cx)
5931 })
5932 .await
5933 .unwrap();
5934 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5935
5936 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5937 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5938 async move { buffer.await.unwrap() }
5939 };
5940 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5941 tree.read_with(cx, |tree, _| {
5942 tree.entry_for_path(path)
5943 .expect(&format!("no entry for path {}", path))
5944 .id
5945 })
5946 };
5947
5948 let buffer2 = buffer_for_path("a/file2", cx).await;
5949 let buffer3 = buffer_for_path("a/file3", cx).await;
5950 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5951 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5952
5953 let file2_id = id_for_path("a/file2", &cx);
5954 let file3_id = id_for_path("a/file3", &cx);
5955 let file4_id = id_for_path("b/c/file4", &cx);
5956
5957 // Wait for the initial scan.
5958 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5959 .await;
5960
5961 // Create a remote copy of this worktree.
5962 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5963 let (remote, load_task) = cx.update(|cx| {
5964 Worktree::remote(
5965 1,
5966 1,
5967 initial_snapshot.to_proto(&Default::default(), true),
5968 rpc.clone(),
5969 cx,
5970 )
5971 });
5972 load_task.await;
5973
5974 cx.read(|cx| {
5975 assert!(!buffer2.read(cx).is_dirty());
5976 assert!(!buffer3.read(cx).is_dirty());
5977 assert!(!buffer4.read(cx).is_dirty());
5978 assert!(!buffer5.read(cx).is_dirty());
5979 });
5980
5981 // Rename and delete files and directories.
5982 tree.flush_fs_events(&cx).await;
5983 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5984 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5985 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5986 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5987 tree.flush_fs_events(&cx).await;
5988
5989 let expected_paths = vec![
5990 "a",
5991 "a/file1",
5992 "a/file2.new",
5993 "b",
5994 "d",
5995 "d/file3",
5996 "d/file4",
5997 ];
5998
5999 cx.read(|app| {
6000 assert_eq!(
6001 tree.read(app)
6002 .paths()
6003 .map(|p| p.to_str().unwrap())
6004 .collect::<Vec<_>>(),
6005 expected_paths
6006 );
6007
6008 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6009 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6010 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6011
6012 assert_eq!(
6013 buffer2.read(app).file().unwrap().path().as_ref(),
6014 Path::new("a/file2.new")
6015 );
6016 assert_eq!(
6017 buffer3.read(app).file().unwrap().path().as_ref(),
6018 Path::new("d/file3")
6019 );
6020 assert_eq!(
6021 buffer4.read(app).file().unwrap().path().as_ref(),
6022 Path::new("d/file4")
6023 );
6024 assert_eq!(
6025 buffer5.read(app).file().unwrap().path().as_ref(),
6026 Path::new("b/c/file5")
6027 );
6028
6029 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6030 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6031 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6032 assert!(buffer5.read(app).file().unwrap().is_deleted());
6033 });
6034
6035 // Update the remote worktree. Check that it becomes consistent with the
6036 // local worktree.
6037 remote.update(cx, |remote, cx| {
6038 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6039 &initial_snapshot,
6040 1,
6041 1,
6042 true,
6043 );
6044 remote
6045 .as_remote_mut()
6046 .unwrap()
6047 .snapshot
6048 .apply_remote_update(update_message)
6049 .unwrap();
6050
6051 assert_eq!(
6052 remote
6053 .paths()
6054 .map(|p| p.to_str().unwrap())
6055 .collect::<Vec<_>>(),
6056 expected_paths
6057 );
6058 });
6059 }
6060
6061 #[gpui::test]
6062 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6063 let fs = FakeFs::new(cx.background());
6064 fs.insert_tree(
6065 "/the-dir",
6066 json!({
6067 "a.txt": "a-contents",
6068 "b.txt": "b-contents",
6069 }),
6070 )
6071 .await;
6072
6073 let project = Project::test(fs.clone(), cx);
6074 let worktree_id = project
6075 .update(cx, |p, cx| {
6076 p.find_or_create_local_worktree("/the-dir", true, cx)
6077 })
6078 .await
6079 .unwrap()
6080 .0
6081 .read_with(cx, |tree, _| tree.id());
6082
6083 // Spawn multiple tasks to open paths, repeating some paths.
6084 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6085 (
6086 p.open_buffer((worktree_id, "a.txt"), cx),
6087 p.open_buffer((worktree_id, "b.txt"), cx),
6088 p.open_buffer((worktree_id, "a.txt"), cx),
6089 )
6090 });
6091
6092 let buffer_a_1 = buffer_a_1.await.unwrap();
6093 let buffer_a_2 = buffer_a_2.await.unwrap();
6094 let buffer_b = buffer_b.await.unwrap();
6095 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6096 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6097
6098 // There is only one buffer per path.
6099 let buffer_a_id = buffer_a_1.id();
6100 assert_eq!(buffer_a_2.id(), buffer_a_id);
6101
6102 // Open the same path again while it is still open.
6103 drop(buffer_a_1);
6104 let buffer_a_3 = project
6105 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6106 .await
6107 .unwrap();
6108
6109 // There's still only one buffer per path.
6110 assert_eq!(buffer_a_3.id(), buffer_a_id);
6111 }
6112
6113 #[gpui::test]
6114 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6115 use std::fs;
6116
6117 let dir = temp_tree(json!({
6118 "file1": "abc",
6119 "file2": "def",
6120 "file3": "ghi",
6121 }));
6122
6123 let project = Project::test(Arc::new(RealFs), cx);
6124 let (worktree, _) = project
6125 .update(cx, |p, cx| {
6126 p.find_or_create_local_worktree(dir.path(), true, cx)
6127 })
6128 .await
6129 .unwrap();
6130 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6131
6132 worktree.flush_fs_events(&cx).await;
6133 worktree
6134 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6135 .await;
6136
6137 let buffer1 = project
6138 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6139 .await
6140 .unwrap();
6141 let events = Rc::new(RefCell::new(Vec::new()));
6142
6143 // initially, the buffer isn't dirty.
6144 buffer1.update(cx, |buffer, cx| {
6145 cx.subscribe(&buffer1, {
6146 let events = events.clone();
6147 move |_, _, event, _| match event {
6148 BufferEvent::Operation(_) => {}
6149 _ => events.borrow_mut().push(event.clone()),
6150 }
6151 })
6152 .detach();
6153
6154 assert!(!buffer.is_dirty());
6155 assert!(events.borrow().is_empty());
6156
6157 buffer.edit(vec![1..2], "", cx);
6158 });
6159
6160 // after the first edit, the buffer is dirty, and emits a dirtied event.
6161 buffer1.update(cx, |buffer, cx| {
6162 assert!(buffer.text() == "ac");
6163 assert!(buffer.is_dirty());
6164 assert_eq!(
6165 *events.borrow(),
6166 &[language::Event::Edited, language::Event::Dirtied]
6167 );
6168 events.borrow_mut().clear();
6169 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6170 });
6171
6172 // after saving, the buffer is not dirty, and emits a saved event.
6173 buffer1.update(cx, |buffer, cx| {
6174 assert!(!buffer.is_dirty());
6175 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6176 events.borrow_mut().clear();
6177
6178 buffer.edit(vec![1..1], "B", cx);
6179 buffer.edit(vec![2..2], "D", cx);
6180 });
6181
6182 // after editing again, the buffer is dirty, and emits another dirty event.
6183 buffer1.update(cx, |buffer, cx| {
6184 assert!(buffer.text() == "aBDc");
6185 assert!(buffer.is_dirty());
6186 assert_eq!(
6187 *events.borrow(),
6188 &[
6189 language::Event::Edited,
6190 language::Event::Dirtied,
6191 language::Event::Edited,
6192 ],
6193 );
6194 events.borrow_mut().clear();
6195
6196 // TODO - currently, after restoring the buffer to its
6197 // previously-saved state, the is still considered dirty.
6198 buffer.edit([1..3], "", cx);
6199 assert!(buffer.text() == "ac");
6200 assert!(buffer.is_dirty());
6201 });
6202
6203 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6204
6205 // When a file is deleted, the buffer is considered dirty.
6206 let events = Rc::new(RefCell::new(Vec::new()));
6207 let buffer2 = project
6208 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6209 .await
6210 .unwrap();
6211 buffer2.update(cx, |_, cx| {
6212 cx.subscribe(&buffer2, {
6213 let events = events.clone();
6214 move |_, _, event, _| events.borrow_mut().push(event.clone())
6215 })
6216 .detach();
6217 });
6218
6219 fs::remove_file(dir.path().join("file2")).unwrap();
6220 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6221 assert_eq!(
6222 *events.borrow(),
6223 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6224 );
6225
6226 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6227 let events = Rc::new(RefCell::new(Vec::new()));
6228 let buffer3 = project
6229 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6230 .await
6231 .unwrap();
6232 buffer3.update(cx, |_, cx| {
6233 cx.subscribe(&buffer3, {
6234 let events = events.clone();
6235 move |_, _, event, _| events.borrow_mut().push(event.clone())
6236 })
6237 .detach();
6238 });
6239
6240 worktree.flush_fs_events(&cx).await;
6241 buffer3.update(cx, |buffer, cx| {
6242 buffer.edit(Some(0..0), "x", cx);
6243 });
6244 events.borrow_mut().clear();
6245 fs::remove_file(dir.path().join("file3")).unwrap();
6246 buffer3
6247 .condition(&cx, |_, _| !events.borrow().is_empty())
6248 .await;
6249 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6250 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6251 }
6252
6253 #[gpui::test]
6254 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6255 use std::fs;
6256
6257 let initial_contents = "aaa\nbbbbb\nc\n";
6258 let dir = temp_tree(json!({ "the-file": initial_contents }));
6259
6260 let project = Project::test(Arc::new(RealFs), cx);
6261 let (worktree, _) = project
6262 .update(cx, |p, cx| {
6263 p.find_or_create_local_worktree(dir.path(), true, cx)
6264 })
6265 .await
6266 .unwrap();
6267 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6268
6269 worktree
6270 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6271 .await;
6272
6273 let abs_path = dir.path().join("the-file");
6274 let buffer = project
6275 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6276 .await
6277 .unwrap();
6278
6279 // TODO
6280 // Add a cursor on each row.
6281 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6282 // assert!(!buffer.is_dirty());
6283 // buffer.add_selection_set(
6284 // &(0..3)
6285 // .map(|row| Selection {
6286 // id: row as usize,
6287 // start: Point::new(row, 1),
6288 // end: Point::new(row, 1),
6289 // reversed: false,
6290 // goal: SelectionGoal::None,
6291 // })
6292 // .collect::<Vec<_>>(),
6293 // cx,
6294 // )
6295 // });
6296
6297 // Change the file on disk, adding two new lines of text, and removing
6298 // one line.
6299 buffer.read_with(cx, |buffer, _| {
6300 assert!(!buffer.is_dirty());
6301 assert!(!buffer.has_conflict());
6302 });
6303 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6304 fs::write(&abs_path, new_contents).unwrap();
6305
6306 // Because the buffer was not modified, it is reloaded from disk. Its
6307 // contents are edited according to the diff between the old and new
6308 // file contents.
6309 buffer
6310 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6311 .await;
6312
6313 buffer.update(cx, |buffer, _| {
6314 assert_eq!(buffer.text(), new_contents);
6315 assert!(!buffer.is_dirty());
6316 assert!(!buffer.has_conflict());
6317
6318 // TODO
6319 // let cursor_positions = buffer
6320 // .selection_set(selection_set_id)
6321 // .unwrap()
6322 // .selections::<Point>(&*buffer)
6323 // .map(|selection| {
6324 // assert_eq!(selection.start, selection.end);
6325 // selection.start
6326 // })
6327 // .collect::<Vec<_>>();
6328 // assert_eq!(
6329 // cursor_positions,
6330 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6331 // );
6332 });
6333
6334 // Modify the buffer
6335 buffer.update(cx, |buffer, cx| {
6336 buffer.edit(vec![0..0], " ", cx);
6337 assert!(buffer.is_dirty());
6338 assert!(!buffer.has_conflict());
6339 });
6340
6341 // Change the file on disk again, adding blank lines to the beginning.
6342 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6343
6344 // Because the buffer is modified, it doesn't reload from disk, but is
6345 // marked as having a conflict.
6346 buffer
6347 .condition(&cx, |buffer, _| buffer.has_conflict())
6348 .await;
6349 }
6350
6351 #[gpui::test]
6352 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6353 cx.foreground().forbid_parking();
6354
6355 let fs = FakeFs::new(cx.background());
6356 fs.insert_tree(
6357 "/the-dir",
6358 json!({
6359 "a.rs": "
6360 fn foo(mut v: Vec<usize>) {
6361 for x in &v {
6362 v.push(1);
6363 }
6364 }
6365 "
6366 .unindent(),
6367 }),
6368 )
6369 .await;
6370
6371 let project = Project::test(fs.clone(), cx);
6372 let (worktree, _) = project
6373 .update(cx, |p, cx| {
6374 p.find_or_create_local_worktree("/the-dir", true, cx)
6375 })
6376 .await
6377 .unwrap();
6378 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6379
6380 let buffer = project
6381 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6382 .await
6383 .unwrap();
6384
6385 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6386 let message = lsp::PublishDiagnosticsParams {
6387 uri: buffer_uri.clone(),
6388 diagnostics: vec![
6389 lsp::Diagnostic {
6390 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6391 severity: Some(DiagnosticSeverity::WARNING),
6392 message: "error 1".to_string(),
6393 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6394 location: lsp::Location {
6395 uri: buffer_uri.clone(),
6396 range: lsp::Range::new(
6397 lsp::Position::new(1, 8),
6398 lsp::Position::new(1, 9),
6399 ),
6400 },
6401 message: "error 1 hint 1".to_string(),
6402 }]),
6403 ..Default::default()
6404 },
6405 lsp::Diagnostic {
6406 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6407 severity: Some(DiagnosticSeverity::HINT),
6408 message: "error 1 hint 1".to_string(),
6409 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6410 location: lsp::Location {
6411 uri: buffer_uri.clone(),
6412 range: lsp::Range::new(
6413 lsp::Position::new(1, 8),
6414 lsp::Position::new(1, 9),
6415 ),
6416 },
6417 message: "original diagnostic".to_string(),
6418 }]),
6419 ..Default::default()
6420 },
6421 lsp::Diagnostic {
6422 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6423 severity: Some(DiagnosticSeverity::ERROR),
6424 message: "error 2".to_string(),
6425 related_information: Some(vec![
6426 lsp::DiagnosticRelatedInformation {
6427 location: lsp::Location {
6428 uri: buffer_uri.clone(),
6429 range: lsp::Range::new(
6430 lsp::Position::new(1, 13),
6431 lsp::Position::new(1, 15),
6432 ),
6433 },
6434 message: "error 2 hint 1".to_string(),
6435 },
6436 lsp::DiagnosticRelatedInformation {
6437 location: lsp::Location {
6438 uri: buffer_uri.clone(),
6439 range: lsp::Range::new(
6440 lsp::Position::new(1, 13),
6441 lsp::Position::new(1, 15),
6442 ),
6443 },
6444 message: "error 2 hint 2".to_string(),
6445 },
6446 ]),
6447 ..Default::default()
6448 },
6449 lsp::Diagnostic {
6450 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6451 severity: Some(DiagnosticSeverity::HINT),
6452 message: "error 2 hint 1".to_string(),
6453 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6454 location: lsp::Location {
6455 uri: buffer_uri.clone(),
6456 range: lsp::Range::new(
6457 lsp::Position::new(2, 8),
6458 lsp::Position::new(2, 17),
6459 ),
6460 },
6461 message: "original diagnostic".to_string(),
6462 }]),
6463 ..Default::default()
6464 },
6465 lsp::Diagnostic {
6466 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6467 severity: Some(DiagnosticSeverity::HINT),
6468 message: "error 2 hint 2".to_string(),
6469 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6470 location: lsp::Location {
6471 uri: buffer_uri.clone(),
6472 range: lsp::Range::new(
6473 lsp::Position::new(2, 8),
6474 lsp::Position::new(2, 17),
6475 ),
6476 },
6477 message: "original diagnostic".to_string(),
6478 }]),
6479 ..Default::default()
6480 },
6481 ],
6482 version: None,
6483 };
6484
6485 project
6486 .update(cx, |p, cx| {
6487 p.update_diagnostics(message, &Default::default(), cx)
6488 })
6489 .unwrap();
6490 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6491
6492 assert_eq!(
6493 buffer
6494 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6495 .collect::<Vec<_>>(),
6496 &[
6497 DiagnosticEntry {
6498 range: Point::new(1, 8)..Point::new(1, 9),
6499 diagnostic: Diagnostic {
6500 severity: DiagnosticSeverity::WARNING,
6501 message: "error 1".to_string(),
6502 group_id: 0,
6503 is_primary: true,
6504 ..Default::default()
6505 }
6506 },
6507 DiagnosticEntry {
6508 range: Point::new(1, 8)..Point::new(1, 9),
6509 diagnostic: Diagnostic {
6510 severity: DiagnosticSeverity::HINT,
6511 message: "error 1 hint 1".to_string(),
6512 group_id: 0,
6513 is_primary: false,
6514 ..Default::default()
6515 }
6516 },
6517 DiagnosticEntry {
6518 range: Point::new(1, 13)..Point::new(1, 15),
6519 diagnostic: Diagnostic {
6520 severity: DiagnosticSeverity::HINT,
6521 message: "error 2 hint 1".to_string(),
6522 group_id: 1,
6523 is_primary: false,
6524 ..Default::default()
6525 }
6526 },
6527 DiagnosticEntry {
6528 range: Point::new(1, 13)..Point::new(1, 15),
6529 diagnostic: Diagnostic {
6530 severity: DiagnosticSeverity::HINT,
6531 message: "error 2 hint 2".to_string(),
6532 group_id: 1,
6533 is_primary: false,
6534 ..Default::default()
6535 }
6536 },
6537 DiagnosticEntry {
6538 range: Point::new(2, 8)..Point::new(2, 17),
6539 diagnostic: Diagnostic {
6540 severity: DiagnosticSeverity::ERROR,
6541 message: "error 2".to_string(),
6542 group_id: 1,
6543 is_primary: true,
6544 ..Default::default()
6545 }
6546 }
6547 ]
6548 );
6549
6550 assert_eq!(
6551 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6552 &[
6553 DiagnosticEntry {
6554 range: Point::new(1, 8)..Point::new(1, 9),
6555 diagnostic: Diagnostic {
6556 severity: DiagnosticSeverity::WARNING,
6557 message: "error 1".to_string(),
6558 group_id: 0,
6559 is_primary: true,
6560 ..Default::default()
6561 }
6562 },
6563 DiagnosticEntry {
6564 range: Point::new(1, 8)..Point::new(1, 9),
6565 diagnostic: Diagnostic {
6566 severity: DiagnosticSeverity::HINT,
6567 message: "error 1 hint 1".to_string(),
6568 group_id: 0,
6569 is_primary: false,
6570 ..Default::default()
6571 }
6572 },
6573 ]
6574 );
6575 assert_eq!(
6576 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6577 &[
6578 DiagnosticEntry {
6579 range: Point::new(1, 13)..Point::new(1, 15),
6580 diagnostic: Diagnostic {
6581 severity: DiagnosticSeverity::HINT,
6582 message: "error 2 hint 1".to_string(),
6583 group_id: 1,
6584 is_primary: false,
6585 ..Default::default()
6586 }
6587 },
6588 DiagnosticEntry {
6589 range: Point::new(1, 13)..Point::new(1, 15),
6590 diagnostic: Diagnostic {
6591 severity: DiagnosticSeverity::HINT,
6592 message: "error 2 hint 2".to_string(),
6593 group_id: 1,
6594 is_primary: false,
6595 ..Default::default()
6596 }
6597 },
6598 DiagnosticEntry {
6599 range: Point::new(2, 8)..Point::new(2, 17),
6600 diagnostic: Diagnostic {
6601 severity: DiagnosticSeverity::ERROR,
6602 message: "error 2".to_string(),
6603 group_id: 1,
6604 is_primary: true,
6605 ..Default::default()
6606 }
6607 }
6608 ]
6609 );
6610 }
6611
6612 #[gpui::test]
6613 async fn test_rename(cx: &mut gpui::TestAppContext) {
6614 cx.foreground().forbid_parking();
6615
6616 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6617 let language = Arc::new(Language::new(
6618 LanguageConfig {
6619 name: "Rust".into(),
6620 path_suffixes: vec!["rs".to_string()],
6621 language_server: Some(language_server_config),
6622 ..Default::default()
6623 },
6624 Some(tree_sitter_rust::language()),
6625 ));
6626
6627 let fs = FakeFs::new(cx.background());
6628 fs.insert_tree(
6629 "/dir",
6630 json!({
6631 "one.rs": "const ONE: usize = 1;",
6632 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6633 }),
6634 )
6635 .await;
6636
6637 let project = Project::test(fs.clone(), cx);
6638 project.update(cx, |project, _| {
6639 Arc::get_mut(&mut project.languages).unwrap().add(language);
6640 });
6641
6642 let (tree, _) = project
6643 .update(cx, |project, cx| {
6644 project.find_or_create_local_worktree("/dir", true, cx)
6645 })
6646 .await
6647 .unwrap();
6648 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6649 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6650 .await;
6651
6652 let buffer = project
6653 .update(cx, |project, cx| {
6654 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6655 })
6656 .await
6657 .unwrap();
6658
6659 let mut fake_server = fake_servers.next().await.unwrap();
6660
6661 let response = project.update(cx, |project, cx| {
6662 project.prepare_rename(buffer.clone(), 7, cx)
6663 });
6664 fake_server
6665 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6666 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6667 assert_eq!(params.position, lsp::Position::new(0, 7));
6668 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6669 lsp::Position::new(0, 6),
6670 lsp::Position::new(0, 9),
6671 )))
6672 })
6673 .next()
6674 .await
6675 .unwrap();
6676 let range = response.await.unwrap().unwrap();
6677 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6678 assert_eq!(range, 6..9);
6679
6680 let response = project.update(cx, |project, cx| {
6681 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6682 });
6683 fake_server
6684 .handle_request::<lsp::request::Rename, _>(|params, _| {
6685 assert_eq!(
6686 params.text_document_position.text_document.uri.as_str(),
6687 "file:///dir/one.rs"
6688 );
6689 assert_eq!(
6690 params.text_document_position.position,
6691 lsp::Position::new(0, 7)
6692 );
6693 assert_eq!(params.new_name, "THREE");
6694 Some(lsp::WorkspaceEdit {
6695 changes: Some(
6696 [
6697 (
6698 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6699 vec![lsp::TextEdit::new(
6700 lsp::Range::new(
6701 lsp::Position::new(0, 6),
6702 lsp::Position::new(0, 9),
6703 ),
6704 "THREE".to_string(),
6705 )],
6706 ),
6707 (
6708 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6709 vec![
6710 lsp::TextEdit::new(
6711 lsp::Range::new(
6712 lsp::Position::new(0, 24),
6713 lsp::Position::new(0, 27),
6714 ),
6715 "THREE".to_string(),
6716 ),
6717 lsp::TextEdit::new(
6718 lsp::Range::new(
6719 lsp::Position::new(0, 35),
6720 lsp::Position::new(0, 38),
6721 ),
6722 "THREE".to_string(),
6723 ),
6724 ],
6725 ),
6726 ]
6727 .into_iter()
6728 .collect(),
6729 ),
6730 ..Default::default()
6731 })
6732 })
6733 .next()
6734 .await
6735 .unwrap();
6736 let mut transaction = response.await.unwrap().0;
6737 assert_eq!(transaction.len(), 2);
6738 assert_eq!(
6739 transaction
6740 .remove_entry(&buffer)
6741 .unwrap()
6742 .0
6743 .read_with(cx, |buffer, _| buffer.text()),
6744 "const THREE: usize = 1;"
6745 );
6746 assert_eq!(
6747 transaction
6748 .into_keys()
6749 .next()
6750 .unwrap()
6751 .read_with(cx, |buffer, _| buffer.text()),
6752 "const TWO: usize = one::THREE + one::THREE;"
6753 );
6754 }
6755
6756 #[gpui::test]
6757 async fn test_search(cx: &mut gpui::TestAppContext) {
6758 let fs = FakeFs::new(cx.background());
6759 fs.insert_tree(
6760 "/dir",
6761 json!({
6762 "one.rs": "const ONE: usize = 1;",
6763 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6764 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6765 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6766 }),
6767 )
6768 .await;
6769 let project = Project::test(fs.clone(), cx);
6770 let (tree, _) = project
6771 .update(cx, |project, cx| {
6772 project.find_or_create_local_worktree("/dir", true, cx)
6773 })
6774 .await
6775 .unwrap();
6776 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6777 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6778 .await;
6779
6780 assert_eq!(
6781 search(&project, SearchQuery::text("TWO", false, true), cx)
6782 .await
6783 .unwrap(),
6784 HashMap::from_iter([
6785 ("two.rs".to_string(), vec![6..9]),
6786 ("three.rs".to_string(), vec![37..40])
6787 ])
6788 );
6789
6790 let buffer_4 = project
6791 .update(cx, |project, cx| {
6792 project.open_buffer((worktree_id, "four.rs"), cx)
6793 })
6794 .await
6795 .unwrap();
6796 buffer_4.update(cx, |buffer, cx| {
6797 buffer.edit([20..28, 31..43], "two::TWO", cx);
6798 });
6799
6800 assert_eq!(
6801 search(&project, SearchQuery::text("TWO", false, true), cx)
6802 .await
6803 .unwrap(),
6804 HashMap::from_iter([
6805 ("two.rs".to_string(), vec![6..9]),
6806 ("three.rs".to_string(), vec![37..40]),
6807 ("four.rs".to_string(), vec![25..28, 36..39])
6808 ])
6809 );
6810
6811 async fn search(
6812 project: &ModelHandle<Project>,
6813 query: SearchQuery,
6814 cx: &mut gpui::TestAppContext,
6815 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6816 let results = project
6817 .update(cx, |project, cx| project.search(query, cx))
6818 .await?;
6819
6820 Ok(results
6821 .into_iter()
6822 .map(|(buffer, ranges)| {
6823 buffer.read_with(cx, |buffer, _| {
6824 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6825 let ranges = ranges
6826 .into_iter()
6827 .map(|range| range.to_offset(buffer))
6828 .collect::<Vec<_>>();
6829 (path, ranges)
6830 })
6831 })
6832 .collect())
6833 }
6834 }
6835}