1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, Patch, PointUtf16, TextBufferSnapshot, ToLspPosition,
22 ToOffset, ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
61 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
62 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
63 language_server_settings: Arc<Mutex<serde_json::Value>>,
64 next_language_server_id: usize,
65 client: Arc<client::Client>,
66 next_entry_id: Arc<AtomicUsize>,
67 user_store: ModelHandle<UserStore>,
68 fs: Arc<dyn Fs>,
69 client_state: ProjectClientState,
70 collaborators: HashMap<PeerId, Collaborator>,
71 subscriptions: Vec<client::Subscription>,
72 language_servers_with_diagnostics_running: isize,
73 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
74 shared_buffers: HashMap<PeerId, HashSet<u64>>,
75 loading_buffers: HashMap<
76 ProjectPath,
77 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
78 >,
79 loading_local_worktrees:
80 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
81 opened_buffers: HashMap<u64, OpenBuffer>,
82 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
83 nonce: u128,
84}
85
86enum OpenBuffer {
87 Strong(ModelHandle<Buffer>),
88 Weak(WeakModelHandle<Buffer>),
89 Loading(Vec<Operation>),
90}
91
92enum WorktreeHandle {
93 Strong(ModelHandle<Worktree>),
94 Weak(WeakModelHandle<Worktree>),
95}
96
97enum ProjectClientState {
98 Local {
99 is_shared: bool,
100 remote_id_tx: watch::Sender<Option<u64>>,
101 remote_id_rx: watch::Receiver<Option<u64>>,
102 _maintain_remote_id_task: Task<Option<()>>,
103 },
104 Remote {
105 sharing_has_stopped: bool,
106 remote_id: u64,
107 replica_id: ReplicaId,
108 _detect_unshare_task: Task<Option<()>>,
109 },
110}
111
112#[derive(Clone, Debug)]
113pub struct Collaborator {
114 pub user: Arc<User>,
115 pub peer_id: PeerId,
116 pub replica_id: ReplicaId,
117}
118
119#[derive(Clone, Debug, PartialEq)]
120pub enum Event {
121 ActiveEntryChanged(Option<ProjectEntryId>),
122 WorktreeRemoved(WorktreeId),
123 DiskBasedDiagnosticsStarted,
124 DiskBasedDiagnosticsUpdated,
125 DiskBasedDiagnosticsFinished,
126 DiagnosticsUpdated(ProjectPath),
127 RemoteIdChanged(Option<u64>),
128 CollaboratorLeft(PeerId),
129}
130
131enum LanguageServerEvent {
132 WorkStart {
133 token: String,
134 },
135 WorkProgress {
136 token: String,
137 progress: LanguageServerProgress,
138 },
139 WorkEnd {
140 token: String,
141 },
142 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
143}
144
145pub struct LanguageServerStatus {
146 pub name: String,
147 pub pending_work: BTreeMap<String, LanguageServerProgress>,
148 pending_diagnostic_updates: isize,
149}
150
151#[derive(Clone, Debug)]
152pub struct LanguageServerProgress {
153 pub message: Option<String>,
154 pub percentage: Option<usize>,
155 pub last_update_at: Instant,
156}
157
158#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
159pub struct ProjectPath {
160 pub worktree_id: WorktreeId,
161 pub path: Arc<Path>,
162}
163
164#[derive(Clone, Debug, Default, PartialEq)]
165pub struct DiagnosticSummary {
166 pub error_count: usize,
167 pub warning_count: usize,
168 pub info_count: usize,
169 pub hint_count: usize,
170}
171
172#[derive(Debug)]
173pub struct Location {
174 pub buffer: ModelHandle<Buffer>,
175 pub range: Range<language::Anchor>,
176}
177
178#[derive(Debug)]
179pub struct DocumentHighlight {
180 pub range: Range<language::Anchor>,
181 pub kind: DocumentHighlightKind,
182}
183
184#[derive(Clone, Debug)]
185pub struct Symbol {
186 pub source_worktree_id: WorktreeId,
187 pub worktree_id: WorktreeId,
188 pub language_name: String,
189 pub path: PathBuf,
190 pub label: CodeLabel,
191 pub name: String,
192 pub kind: lsp::SymbolKind,
193 pub range: Range<PointUtf16>,
194 pub signature: [u8; 32],
195}
196
197#[derive(Default)]
198pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
199
200impl DiagnosticSummary {
201 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
202 let mut this = Self {
203 error_count: 0,
204 warning_count: 0,
205 info_count: 0,
206 hint_count: 0,
207 };
208
209 for entry in diagnostics {
210 if entry.diagnostic.is_primary {
211 match entry.diagnostic.severity {
212 DiagnosticSeverity::ERROR => this.error_count += 1,
213 DiagnosticSeverity::WARNING => this.warning_count += 1,
214 DiagnosticSeverity::INFORMATION => this.info_count += 1,
215 DiagnosticSeverity::HINT => this.hint_count += 1,
216 _ => {}
217 }
218 }
219 }
220
221 this
222 }
223
224 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
225 proto::DiagnosticSummary {
226 path: path.to_string_lossy().to_string(),
227 error_count: self.error_count as u32,
228 warning_count: self.warning_count as u32,
229 info_count: self.info_count as u32,
230 hint_count: self.hint_count as u32,
231 }
232 }
233}
234
235#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
236pub struct ProjectEntryId(usize);
237
238impl ProjectEntryId {
239 pub fn new(counter: &AtomicUsize) -> Self {
240 Self(counter.fetch_add(1, SeqCst))
241 }
242
243 pub fn from_proto(id: u64) -> Self {
244 Self(id as usize)
245 }
246
247 pub fn to_proto(&self) -> u64 {
248 self.0 as u64
249 }
250
251 pub fn to_usize(&self) -> usize {
252 self.0
253 }
254}
255
256impl Project {
257 pub fn init(client: &Arc<Client>) {
258 client.add_model_message_handler(Self::handle_add_collaborator);
259 client.add_model_message_handler(Self::handle_buffer_reloaded);
260 client.add_model_message_handler(Self::handle_buffer_saved);
261 client.add_model_message_handler(Self::handle_start_language_server);
262 client.add_model_message_handler(Self::handle_update_language_server);
263 client.add_model_message_handler(Self::handle_remove_collaborator);
264 client.add_model_message_handler(Self::handle_register_worktree);
265 client.add_model_message_handler(Self::handle_unregister_worktree);
266 client.add_model_message_handler(Self::handle_unshare_project);
267 client.add_model_message_handler(Self::handle_update_buffer_file);
268 client.add_model_message_handler(Self::handle_update_buffer);
269 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
270 client.add_model_message_handler(Self::handle_update_worktree);
271 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
272 client.add_model_request_handler(Self::handle_apply_code_action);
273 client.add_model_request_handler(Self::handle_reload_buffers);
274 client.add_model_request_handler(Self::handle_format_buffers);
275 client.add_model_request_handler(Self::handle_get_code_actions);
276 client.add_model_request_handler(Self::handle_get_completions);
277 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
278 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
279 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
280 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
281 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
282 client.add_model_request_handler(Self::handle_search_project);
283 client.add_model_request_handler(Self::handle_get_project_symbols);
284 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
285 client.add_model_request_handler(Self::handle_open_buffer_by_id);
286 client.add_model_request_handler(Self::handle_open_buffer_by_path);
287 client.add_model_request_handler(Self::handle_save_buffer);
288 }
289
290 pub fn local(
291 client: Arc<Client>,
292 user_store: ModelHandle<UserStore>,
293 languages: Arc<LanguageRegistry>,
294 fs: Arc<dyn Fs>,
295 cx: &mut MutableAppContext,
296 ) -> ModelHandle<Self> {
297 cx.add_model(|cx: &mut ModelContext<Self>| {
298 let (remote_id_tx, remote_id_rx) = watch::channel();
299 let _maintain_remote_id_task = cx.spawn_weak({
300 let rpc = client.clone();
301 move |this, mut cx| {
302 async move {
303 let mut status = rpc.status();
304 while let Some(status) = status.next().await {
305 if let Some(this) = this.upgrade(&cx) {
306 if status.is_connected() {
307 this.update(&mut cx, |this, cx| this.register(cx)).await?;
308 } else {
309 this.update(&mut cx, |this, cx| this.unregister(cx));
310 }
311 }
312 }
313 Ok(())
314 }
315 .log_err()
316 }
317 });
318
319 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
320 Self {
321 worktrees: Default::default(),
322 collaborators: Default::default(),
323 opened_buffers: Default::default(),
324 shared_buffers: Default::default(),
325 loading_buffers: Default::default(),
326 loading_local_worktrees: Default::default(),
327 buffer_snapshots: Default::default(),
328 client_state: ProjectClientState::Local {
329 is_shared: false,
330 remote_id_tx,
331 remote_id_rx,
332 _maintain_remote_id_task,
333 },
334 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
335 subscriptions: Vec::new(),
336 active_entry: None,
337 languages,
338 client,
339 user_store,
340 fs,
341 next_entry_id: Default::default(),
342 language_servers_with_diagnostics_running: 0,
343 language_servers: Default::default(),
344 started_language_servers: Default::default(),
345 language_server_statuses: Default::default(),
346 language_server_settings: Default::default(),
347 next_language_server_id: 0,
348 nonce: StdRng::from_entropy().gen(),
349 }
350 })
351 }
352
353 pub async fn remote(
354 remote_id: u64,
355 client: Arc<Client>,
356 user_store: ModelHandle<UserStore>,
357 languages: Arc<LanguageRegistry>,
358 fs: Arc<dyn Fs>,
359 cx: &mut AsyncAppContext,
360 ) -> Result<ModelHandle<Self>> {
361 client.authenticate_and_connect(true, &cx).await?;
362
363 let response = client
364 .request(proto::JoinProject {
365 project_id: remote_id,
366 })
367 .await?;
368
369 let replica_id = response.replica_id as ReplicaId;
370
371 let mut worktrees = Vec::new();
372 for worktree in response.worktrees {
373 let (worktree, load_task) = cx
374 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
375 worktrees.push(worktree);
376 load_task.detach();
377 }
378
379 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
380 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
381 let mut this = Self {
382 worktrees: Vec::new(),
383 loading_buffers: Default::default(),
384 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
385 shared_buffers: Default::default(),
386 loading_local_worktrees: Default::default(),
387 active_entry: None,
388 collaborators: Default::default(),
389 languages,
390 user_store: user_store.clone(),
391 fs,
392 next_entry_id: Default::default(),
393 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
394 client: client.clone(),
395 client_state: ProjectClientState::Remote {
396 sharing_has_stopped: false,
397 remote_id,
398 replica_id,
399 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
400 async move {
401 let mut status = client.status();
402 let is_connected =
403 status.next().await.map_or(false, |s| s.is_connected());
404 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
405 if !is_connected || status.next().await.is_some() {
406 if let Some(this) = this.upgrade(&cx) {
407 this.update(&mut cx, |this, cx| this.project_unshared(cx))
408 }
409 }
410 Ok(())
411 }
412 .log_err()
413 }),
414 },
415 language_servers_with_diagnostics_running: 0,
416 language_servers: Default::default(),
417 started_language_servers: Default::default(),
418 language_server_settings: Default::default(),
419 language_server_statuses: response
420 .language_servers
421 .into_iter()
422 .map(|server| {
423 (
424 server.id as usize,
425 LanguageServerStatus {
426 name: server.name,
427 pending_work: Default::default(),
428 pending_diagnostic_updates: 0,
429 },
430 )
431 })
432 .collect(),
433 next_language_server_id: 0,
434 opened_buffers: Default::default(),
435 buffer_snapshots: Default::default(),
436 nonce: StdRng::from_entropy().gen(),
437 };
438 for worktree in worktrees {
439 this.add_worktree(&worktree, cx);
440 }
441 this
442 });
443
444 let user_ids = response
445 .collaborators
446 .iter()
447 .map(|peer| peer.user_id)
448 .collect();
449 user_store
450 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
451 .await?;
452 let mut collaborators = HashMap::default();
453 for message in response.collaborators {
454 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
455 collaborators.insert(collaborator.peer_id, collaborator);
456 }
457
458 this.update(cx, |this, _| {
459 this.collaborators = collaborators;
460 });
461
462 Ok(this)
463 }
464
465 #[cfg(any(test, feature = "test-support"))]
466 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
467 let languages = Arc::new(LanguageRegistry::test());
468 let http_client = client::test::FakeHttpClient::with_404_response();
469 let client = client::Client::new(http_client.clone());
470 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
471 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
472 }
473
474 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
475 self.opened_buffers
476 .get(&remote_id)
477 .and_then(|buffer| buffer.upgrade(cx))
478 }
479
480 #[cfg(any(test, feature = "test-support"))]
481 pub fn languages(&self) -> &Arc<LanguageRegistry> {
482 &self.languages
483 }
484
485 #[cfg(any(test, feature = "test-support"))]
486 pub fn check_invariants(&self, cx: &AppContext) {
487 if self.is_local() {
488 let mut worktree_root_paths = HashMap::default();
489 for worktree in self.worktrees(cx) {
490 let worktree = worktree.read(cx);
491 let abs_path = worktree.as_local().unwrap().abs_path().clone();
492 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
493 assert_eq!(
494 prev_worktree_id,
495 None,
496 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
497 abs_path,
498 worktree.id(),
499 prev_worktree_id
500 )
501 }
502 } else {
503 let replica_id = self.replica_id();
504 for buffer in self.opened_buffers.values() {
505 if let Some(buffer) = buffer.upgrade(cx) {
506 let buffer = buffer.read(cx);
507 assert_eq!(
508 buffer.deferred_ops_len(),
509 0,
510 "replica {}, buffer {} has deferred operations",
511 replica_id,
512 buffer.remote_id()
513 );
514 }
515 }
516 }
517 }
518
519 #[cfg(any(test, feature = "test-support"))]
520 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
521 let path = path.into();
522 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
523 self.opened_buffers.iter().any(|(_, buffer)| {
524 if let Some(buffer) = buffer.upgrade(cx) {
525 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
526 if file.worktree == worktree && file.path() == &path.path {
527 return true;
528 }
529 }
530 }
531 false
532 })
533 } else {
534 false
535 }
536 }
537
538 pub fn fs(&self) -> &Arc<dyn Fs> {
539 &self.fs
540 }
541
542 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
543 self.unshare(cx);
544 for worktree in &self.worktrees {
545 if let Some(worktree) = worktree.upgrade(cx) {
546 worktree.update(cx, |worktree, _| {
547 worktree.as_local_mut().unwrap().unregister();
548 });
549 }
550 }
551
552 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
553 *remote_id_tx.borrow_mut() = None;
554 }
555
556 self.subscriptions.clear();
557 }
558
559 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
560 self.unregister(cx);
561
562 let response = self.client.request(proto::RegisterProject {});
563 cx.spawn(|this, mut cx| async move {
564 let remote_id = response.await?.project_id;
565
566 let mut registrations = Vec::new();
567 this.update(&mut cx, |this, cx| {
568 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
569 *remote_id_tx.borrow_mut() = Some(remote_id);
570 }
571
572 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
573
574 this.subscriptions
575 .push(this.client.add_model_for_remote_entity(remote_id, cx));
576
577 for worktree in &this.worktrees {
578 if let Some(worktree) = worktree.upgrade(cx) {
579 registrations.push(worktree.update(cx, |worktree, cx| {
580 let worktree = worktree.as_local_mut().unwrap();
581 worktree.register(remote_id, cx)
582 }));
583 }
584 }
585 });
586
587 futures::future::try_join_all(registrations).await?;
588 Ok(())
589 })
590 }
591
592 pub fn remote_id(&self) -> Option<u64> {
593 match &self.client_state {
594 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
595 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
596 }
597 }
598
599 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
600 let mut id = None;
601 let mut watch = None;
602 match &self.client_state {
603 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
604 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
605 }
606
607 async move {
608 if let Some(id) = id {
609 return id;
610 }
611 let mut watch = watch.unwrap();
612 loop {
613 let id = *watch.borrow();
614 if let Some(id) = id {
615 return id;
616 }
617 watch.next().await;
618 }
619 }
620 }
621
622 pub fn replica_id(&self) -> ReplicaId {
623 match &self.client_state {
624 ProjectClientState::Local { .. } => 0,
625 ProjectClientState::Remote { replica_id, .. } => *replica_id,
626 }
627 }
628
629 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
630 &self.collaborators
631 }
632
633 pub fn worktrees<'a>(
634 &'a self,
635 cx: &'a AppContext,
636 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
637 self.worktrees
638 .iter()
639 .filter_map(move |worktree| worktree.upgrade(cx))
640 }
641
642 pub fn visible_worktrees<'a>(
643 &'a self,
644 cx: &'a AppContext,
645 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
646 self.worktrees.iter().filter_map(|worktree| {
647 worktree.upgrade(cx).and_then(|worktree| {
648 if worktree.read(cx).is_visible() {
649 Some(worktree)
650 } else {
651 None
652 }
653 })
654 })
655 }
656
657 pub fn worktree_for_id(
658 &self,
659 id: WorktreeId,
660 cx: &AppContext,
661 ) -> Option<ModelHandle<Worktree>> {
662 self.worktrees(cx)
663 .find(|worktree| worktree.read(cx).id() == id)
664 }
665
666 pub fn worktree_for_entry(
667 &self,
668 entry_id: ProjectEntryId,
669 cx: &AppContext,
670 ) -> Option<ModelHandle<Worktree>> {
671 self.worktrees(cx)
672 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
673 }
674
675 pub fn worktree_id_for_entry(
676 &self,
677 entry_id: ProjectEntryId,
678 cx: &AppContext,
679 ) -> Option<WorktreeId> {
680 self.worktree_for_entry(entry_id, cx)
681 .map(|worktree| worktree.read(cx).id())
682 }
683
684 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
685 let rpc = self.client.clone();
686 cx.spawn(|this, mut cx| async move {
687 let project_id = this.update(&mut cx, |this, cx| {
688 if let ProjectClientState::Local {
689 is_shared,
690 remote_id_rx,
691 ..
692 } = &mut this.client_state
693 {
694 *is_shared = true;
695
696 for open_buffer in this.opened_buffers.values_mut() {
697 match open_buffer {
698 OpenBuffer::Strong(_) => {}
699 OpenBuffer::Weak(buffer) => {
700 if let Some(buffer) = buffer.upgrade(cx) {
701 *open_buffer = OpenBuffer::Strong(buffer);
702 }
703 }
704 OpenBuffer::Loading(_) => unreachable!(),
705 }
706 }
707
708 for worktree_handle in this.worktrees.iter_mut() {
709 match worktree_handle {
710 WorktreeHandle::Strong(_) => {}
711 WorktreeHandle::Weak(worktree) => {
712 if let Some(worktree) = worktree.upgrade(cx) {
713 *worktree_handle = WorktreeHandle::Strong(worktree);
714 }
715 }
716 }
717 }
718
719 remote_id_rx
720 .borrow()
721 .ok_or_else(|| anyhow!("no project id"))
722 } else {
723 Err(anyhow!("can't share a remote project"))
724 }
725 })?;
726
727 rpc.request(proto::ShareProject { project_id }).await?;
728
729 let mut tasks = Vec::new();
730 this.update(&mut cx, |this, cx| {
731 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
732 worktree.update(cx, |worktree, cx| {
733 let worktree = worktree.as_local_mut().unwrap();
734 tasks.push(worktree.share(project_id, cx));
735 });
736 }
737 });
738 for task in tasks {
739 task.await?;
740 }
741 this.update(&mut cx, |_, cx| cx.notify());
742 Ok(())
743 })
744 }
745
746 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
747 let rpc = self.client.clone();
748
749 if let ProjectClientState::Local {
750 is_shared,
751 remote_id_rx,
752 ..
753 } = &mut self.client_state
754 {
755 if !*is_shared {
756 return;
757 }
758
759 *is_shared = false;
760 self.collaborators.clear();
761 self.shared_buffers.clear();
762 for worktree_handle in self.worktrees.iter_mut() {
763 if let WorktreeHandle::Strong(worktree) = worktree_handle {
764 let is_visible = worktree.update(cx, |worktree, _| {
765 worktree.as_local_mut().unwrap().unshare();
766 worktree.is_visible()
767 });
768 if !is_visible {
769 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
770 }
771 }
772 }
773
774 for open_buffer in self.opened_buffers.values_mut() {
775 match open_buffer {
776 OpenBuffer::Strong(buffer) => {
777 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
778 }
779 _ => {}
780 }
781 }
782
783 if let Some(project_id) = *remote_id_rx.borrow() {
784 rpc.send(proto::UnshareProject { project_id }).log_err();
785 }
786
787 cx.notify();
788 } else {
789 log::error!("attempted to unshare a remote project");
790 }
791 }
792
793 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
794 if let ProjectClientState::Remote {
795 sharing_has_stopped,
796 ..
797 } = &mut self.client_state
798 {
799 *sharing_has_stopped = true;
800 self.collaborators.clear();
801 cx.notify();
802 }
803 }
804
805 pub fn is_read_only(&self) -> bool {
806 match &self.client_state {
807 ProjectClientState::Local { .. } => false,
808 ProjectClientState::Remote {
809 sharing_has_stopped,
810 ..
811 } => *sharing_has_stopped,
812 }
813 }
814
815 pub fn is_local(&self) -> bool {
816 match &self.client_state {
817 ProjectClientState::Local { .. } => true,
818 ProjectClientState::Remote { .. } => false,
819 }
820 }
821
822 pub fn is_remote(&self) -> bool {
823 !self.is_local()
824 }
825
826 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
827 if self.is_remote() {
828 return Err(anyhow!("creating buffers as a guest is not supported yet"));
829 }
830
831 let buffer = cx.add_model(|cx| {
832 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
833 });
834 self.register_buffer(&buffer, cx)?;
835 Ok(buffer)
836 }
837
838 pub fn open_path(
839 &mut self,
840 path: impl Into<ProjectPath>,
841 cx: &mut ModelContext<Self>,
842 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
843 let task = self.open_buffer(path, cx);
844 cx.spawn_weak(|_, cx| async move {
845 let buffer = task.await?;
846 let project_entry_id = buffer
847 .read_with(&cx, |buffer, cx| {
848 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
849 })
850 .ok_or_else(|| anyhow!("no project entry"))?;
851 Ok((project_entry_id, buffer.into()))
852 })
853 }
854
855 pub fn open_buffer(
856 &mut self,
857 path: impl Into<ProjectPath>,
858 cx: &mut ModelContext<Self>,
859 ) -> Task<Result<ModelHandle<Buffer>>> {
860 let project_path = path.into();
861 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
862 worktree
863 } else {
864 return Task::ready(Err(anyhow!("no such worktree")));
865 };
866
867 // If there is already a buffer for the given path, then return it.
868 let existing_buffer = self.get_open_buffer(&project_path, cx);
869 if let Some(existing_buffer) = existing_buffer {
870 return Task::ready(Ok(existing_buffer));
871 }
872
873 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
874 // If the given path is already being loaded, then wait for that existing
875 // task to complete and return the same buffer.
876 hash_map::Entry::Occupied(e) => e.get().clone(),
877
878 // Otherwise, record the fact that this path is now being loaded.
879 hash_map::Entry::Vacant(entry) => {
880 let (mut tx, rx) = postage::watch::channel();
881 entry.insert(rx.clone());
882
883 let load_buffer = if worktree.read(cx).is_local() {
884 self.open_local_buffer(&project_path.path, &worktree, cx)
885 } else {
886 self.open_remote_buffer(&project_path.path, &worktree, cx)
887 };
888
889 cx.spawn(move |this, mut cx| async move {
890 let load_result = load_buffer.await;
891 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
892 // Record the fact that the buffer is no longer loading.
893 this.loading_buffers.remove(&project_path);
894 let buffer = load_result.map_err(Arc::new)?;
895 Ok(buffer)
896 }));
897 })
898 .detach();
899 rx
900 }
901 };
902
903 cx.foreground().spawn(async move {
904 loop {
905 if let Some(result) = loading_watch.borrow().as_ref() {
906 match result {
907 Ok(buffer) => return Ok(buffer.clone()),
908 Err(error) => return Err(anyhow!("{}", error)),
909 }
910 }
911 loading_watch.next().await;
912 }
913 })
914 }
915
916 fn open_local_buffer(
917 &mut self,
918 path: &Arc<Path>,
919 worktree: &ModelHandle<Worktree>,
920 cx: &mut ModelContext<Self>,
921 ) -> Task<Result<ModelHandle<Buffer>>> {
922 let load_buffer = worktree.update(cx, |worktree, cx| {
923 let worktree = worktree.as_local_mut().unwrap();
924 worktree.load_buffer(path, cx)
925 });
926 cx.spawn(|this, mut cx| async move {
927 let buffer = load_buffer.await?;
928 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
929 Ok(buffer)
930 })
931 }
932
933 fn open_remote_buffer(
934 &mut self,
935 path: &Arc<Path>,
936 worktree: &ModelHandle<Worktree>,
937 cx: &mut ModelContext<Self>,
938 ) -> Task<Result<ModelHandle<Buffer>>> {
939 let rpc = self.client.clone();
940 let project_id = self.remote_id().unwrap();
941 let remote_worktree_id = worktree.read(cx).id();
942 let path = path.clone();
943 let path_string = path.to_string_lossy().to_string();
944 cx.spawn(|this, mut cx| async move {
945 let response = rpc
946 .request(proto::OpenBufferByPath {
947 project_id,
948 worktree_id: remote_worktree_id.to_proto(),
949 path: path_string,
950 })
951 .await?;
952 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
953 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
954 .await
955 })
956 }
957
958 fn open_local_buffer_via_lsp(
959 &mut self,
960 abs_path: lsp::Url,
961 lang_name: Arc<str>,
962 lang_server: Arc<LanguageServer>,
963 cx: &mut ModelContext<Self>,
964 ) -> Task<Result<ModelHandle<Buffer>>> {
965 cx.spawn(|this, mut cx| async move {
966 let abs_path = abs_path
967 .to_file_path()
968 .map_err(|_| anyhow!("can't convert URI to path"))?;
969 let (worktree, relative_path) = if let Some(result) =
970 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
971 {
972 result
973 } else {
974 let worktree = this
975 .update(&mut cx, |this, cx| {
976 this.create_local_worktree(&abs_path, false, cx)
977 })
978 .await?;
979 this.update(&mut cx, |this, cx| {
980 this.language_servers
981 .insert((worktree.read(cx).id(), lang_name), lang_server);
982 });
983 (worktree, PathBuf::new())
984 };
985
986 let project_path = ProjectPath {
987 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
988 path: relative_path.into(),
989 };
990 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
991 .await
992 })
993 }
994
995 pub fn open_buffer_by_id(
996 &mut self,
997 id: u64,
998 cx: &mut ModelContext<Self>,
999 ) -> Task<Result<ModelHandle<Buffer>>> {
1000 if let Some(buffer) = self.buffer_for_id(id, cx) {
1001 Task::ready(Ok(buffer))
1002 } else if self.is_local() {
1003 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1004 } else if let Some(project_id) = self.remote_id() {
1005 let request = self
1006 .client
1007 .request(proto::OpenBufferById { project_id, id });
1008 cx.spawn(|this, mut cx| async move {
1009 let buffer = request
1010 .await?
1011 .buffer
1012 .ok_or_else(|| anyhow!("invalid buffer"))?;
1013 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1014 .await
1015 })
1016 } else {
1017 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1018 }
1019 }
1020
1021 pub fn save_buffer_as(
1022 &mut self,
1023 buffer: ModelHandle<Buffer>,
1024 abs_path: PathBuf,
1025 cx: &mut ModelContext<Project>,
1026 ) -> Task<Result<()>> {
1027 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1028 cx.spawn(|this, mut cx| async move {
1029 let (worktree, path) = worktree_task.await?;
1030 worktree
1031 .update(&mut cx, |worktree, cx| {
1032 worktree
1033 .as_local_mut()
1034 .unwrap()
1035 .save_buffer_as(buffer.clone(), path, cx)
1036 })
1037 .await?;
1038 this.update(&mut cx, |this, cx| {
1039 this.assign_language_to_buffer(&buffer, cx);
1040 this.register_buffer_with_language_server(&buffer, cx);
1041 });
1042 Ok(())
1043 })
1044 }
1045
1046 pub fn get_open_buffer(
1047 &mut self,
1048 path: &ProjectPath,
1049 cx: &mut ModelContext<Self>,
1050 ) -> Option<ModelHandle<Buffer>> {
1051 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1052 self.opened_buffers.values().find_map(|buffer| {
1053 let buffer = buffer.upgrade(cx)?;
1054 let file = File::from_dyn(buffer.read(cx).file())?;
1055 if file.worktree == worktree && file.path() == &path.path {
1056 Some(buffer)
1057 } else {
1058 None
1059 }
1060 })
1061 }
1062
1063 fn register_buffer(
1064 &mut self,
1065 buffer: &ModelHandle<Buffer>,
1066 cx: &mut ModelContext<Self>,
1067 ) -> Result<()> {
1068 let remote_id = buffer.read(cx).remote_id();
1069 let open_buffer = if self.is_remote() || self.is_shared() {
1070 OpenBuffer::Strong(buffer.clone())
1071 } else {
1072 OpenBuffer::Weak(buffer.downgrade())
1073 };
1074
1075 match self.opened_buffers.insert(remote_id, open_buffer) {
1076 None => {}
1077 Some(OpenBuffer::Loading(operations)) => {
1078 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1079 }
1080 Some(OpenBuffer::Weak(existing_handle)) => {
1081 if existing_handle.upgrade(cx).is_some() {
1082 Err(anyhow!(
1083 "already registered buffer with remote id {}",
1084 remote_id
1085 ))?
1086 }
1087 }
1088 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1089 "already registered buffer with remote id {}",
1090 remote_id
1091 ))?,
1092 }
1093 cx.subscribe(buffer, |this, buffer, event, cx| {
1094 this.on_buffer_event(buffer, event, cx);
1095 })
1096 .detach();
1097
1098 self.assign_language_to_buffer(buffer, cx);
1099 self.register_buffer_with_language_server(buffer, cx);
1100
1101 Ok(())
1102 }
1103
1104 fn register_buffer_with_language_server(
1105 &mut self,
1106 buffer_handle: &ModelHandle<Buffer>,
1107 cx: &mut ModelContext<Self>,
1108 ) {
1109 let buffer = buffer_handle.read(cx);
1110 let buffer_id = buffer.remote_id();
1111 if let Some(file) = File::from_dyn(buffer.file()) {
1112 if file.is_local() {
1113 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1114 let initial_snapshot = buffer.text_snapshot();
1115 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1116
1117 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1118 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1119 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1120 .log_err();
1121 }
1122 }
1123
1124 if let Some(server) = language_server {
1125 server
1126 .notify::<lsp::notification::DidOpenTextDocument>(
1127 lsp::DidOpenTextDocumentParams {
1128 text_document: lsp::TextDocumentItem::new(
1129 uri,
1130 Default::default(),
1131 0,
1132 initial_snapshot.text(),
1133 ),
1134 }
1135 .clone(),
1136 )
1137 .log_err();
1138 buffer_handle.update(cx, |buffer, cx| {
1139 buffer.set_completion_triggers(
1140 server
1141 .capabilities()
1142 .completion_provider
1143 .as_ref()
1144 .and_then(|provider| provider.trigger_characters.clone())
1145 .unwrap_or(Vec::new()),
1146 cx,
1147 )
1148 });
1149 self.buffer_snapshots
1150 .insert(buffer_id, vec![(0, initial_snapshot)]);
1151 }
1152
1153 cx.observe_release(buffer_handle, |this, buffer, cx| {
1154 if let Some(file) = File::from_dyn(buffer.file()) {
1155 if file.is_local() {
1156 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1157 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1158 server
1159 .notify::<lsp::notification::DidCloseTextDocument>(
1160 lsp::DidCloseTextDocumentParams {
1161 text_document: lsp::TextDocumentIdentifier::new(
1162 uri.clone(),
1163 ),
1164 },
1165 )
1166 .log_err();
1167 }
1168 }
1169 }
1170 })
1171 .detach();
1172 }
1173 }
1174 }
1175
1176 fn on_buffer_event(
1177 &mut self,
1178 buffer: ModelHandle<Buffer>,
1179 event: &BufferEvent,
1180 cx: &mut ModelContext<Self>,
1181 ) -> Option<()> {
1182 match event {
1183 BufferEvent::Operation(operation) => {
1184 let project_id = self.remote_id()?;
1185 let request = self.client.request(proto::UpdateBuffer {
1186 project_id,
1187 buffer_id: buffer.read(cx).remote_id(),
1188 operations: vec![language::proto::serialize_operation(&operation)],
1189 });
1190 cx.background().spawn(request).detach_and_log_err(cx);
1191 }
1192 BufferEvent::Edited { .. } => {
1193 let language_server = self
1194 .language_server_for_buffer(buffer.read(cx), cx)?
1195 .clone();
1196 let buffer = buffer.read(cx);
1197 let file = File::from_dyn(buffer.file())?;
1198 let abs_path = file.as_local()?.abs_path(cx);
1199 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1200 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1201 let (version, prev_snapshot) = buffer_snapshots.last()?;
1202 let next_snapshot = buffer.text_snapshot();
1203 let next_version = version + 1;
1204
1205 let content_changes = buffer
1206 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1207 .map(|edit| {
1208 let edit_start = edit.new.start.0;
1209 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1210 let new_text = next_snapshot
1211 .text_for_range(edit.new.start.1..edit.new.end.1)
1212 .collect();
1213 lsp::TextDocumentContentChangeEvent {
1214 range: Some(lsp::Range::new(
1215 edit_start.to_lsp_position(),
1216 edit_end.to_lsp_position(),
1217 )),
1218 range_length: None,
1219 text: new_text,
1220 }
1221 })
1222 .collect();
1223
1224 buffer_snapshots.push((next_version, next_snapshot));
1225
1226 language_server
1227 .notify::<lsp::notification::DidChangeTextDocument>(
1228 lsp::DidChangeTextDocumentParams {
1229 text_document: lsp::VersionedTextDocumentIdentifier::new(
1230 uri,
1231 next_version,
1232 ),
1233 content_changes,
1234 },
1235 )
1236 .log_err();
1237 }
1238 BufferEvent::Saved => {
1239 let file = File::from_dyn(buffer.read(cx).file())?;
1240 let worktree_id = file.worktree_id(cx);
1241 let abs_path = file.as_local()?.abs_path(cx);
1242 let text_document = lsp::TextDocumentIdentifier {
1243 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1244 };
1245
1246 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1247 server
1248 .notify::<lsp::notification::DidSaveTextDocument>(
1249 lsp::DidSaveTextDocumentParams {
1250 text_document: text_document.clone(),
1251 text: None,
1252 },
1253 )
1254 .log_err();
1255 }
1256 }
1257 _ => {}
1258 }
1259
1260 None
1261 }
1262
1263 fn language_servers_for_worktree(
1264 &self,
1265 worktree_id: WorktreeId,
1266 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1267 self.language_servers.iter().filter_map(
1268 move |((language_server_worktree_id, language_name), server)| {
1269 if *language_server_worktree_id == worktree_id {
1270 Some((language_name.as_ref(), server))
1271 } else {
1272 None
1273 }
1274 },
1275 )
1276 }
1277
1278 fn assign_language_to_buffer(
1279 &mut self,
1280 buffer: &ModelHandle<Buffer>,
1281 cx: &mut ModelContext<Self>,
1282 ) -> Option<()> {
1283 // If the buffer has a language, set it and start the language server if we haven't already.
1284 let full_path = buffer.read(cx).file()?.full_path(cx);
1285 let language = self.languages.select_language(&full_path)?;
1286 buffer.update(cx, |buffer, cx| {
1287 buffer.set_language(Some(language.clone()), cx);
1288 });
1289
1290 let file = File::from_dyn(buffer.read(cx).file())?;
1291 let worktree = file.worktree.read(cx).as_local()?;
1292 let worktree_id = worktree.id();
1293 let worktree_abs_path = worktree.abs_path().clone();
1294 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1295
1296 None
1297 }
1298
1299 fn start_language_server(
1300 &mut self,
1301 worktree_id: WorktreeId,
1302 worktree_path: Arc<Path>,
1303 language: Arc<Language>,
1304 cx: &mut ModelContext<Self>,
1305 ) {
1306 let key = (worktree_id, language.name());
1307 self.started_language_servers
1308 .entry(key.clone())
1309 .or_insert_with(|| {
1310 let server_id = post_inc(&mut self.next_language_server_id);
1311 let language_server = self.languages.start_language_server(
1312 server_id,
1313 language.clone(),
1314 worktree_path,
1315 self.client.http_client(),
1316 cx,
1317 );
1318 cx.spawn_weak(|this, mut cx| async move {
1319 let mut language_server = language_server?.await.log_err()?;
1320 let this = this.upgrade(&cx)?;
1321 let (language_server_events_tx, language_server_events_rx) =
1322 smol::channel::unbounded();
1323
1324 language_server
1325 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1326 let language_server_events_tx = language_server_events_tx.clone();
1327 move |params| {
1328 language_server_events_tx
1329 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1330 .ok();
1331 }
1332 })
1333 .detach();
1334
1335 language_server
1336 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1337 let settings = this
1338 .read_with(&cx, |this, _| this.language_server_settings.clone());
1339 move |params| {
1340 let settings = settings.lock();
1341 Ok(params
1342 .items
1343 .into_iter()
1344 .map(|item| {
1345 if let Some(section) = &item.section {
1346 settings
1347 .get(section)
1348 .cloned()
1349 .unwrap_or(serde_json::Value::Null)
1350 } else {
1351 settings.clone()
1352 }
1353 })
1354 .collect())
1355 }
1356 })
1357 .detach();
1358
1359 language_server
1360 .on_notification::<lsp::notification::Progress, _>(move |params| {
1361 let token = match params.token {
1362 lsp::NumberOrString::String(token) => token,
1363 lsp::NumberOrString::Number(token) => {
1364 log::info!("skipping numeric progress token {}", token);
1365 return;
1366 }
1367 };
1368
1369 match params.value {
1370 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1371 lsp::WorkDoneProgress::Begin(_) => {
1372 language_server_events_tx
1373 .try_send(LanguageServerEvent::WorkStart { token })
1374 .ok();
1375 }
1376 lsp::WorkDoneProgress::Report(report) => {
1377 language_server_events_tx
1378 .try_send(LanguageServerEvent::WorkProgress {
1379 token,
1380 progress: LanguageServerProgress {
1381 message: report.message,
1382 percentage: report
1383 .percentage
1384 .map(|p| p as usize),
1385 last_update_at: Instant::now(),
1386 },
1387 })
1388 .ok();
1389 }
1390 lsp::WorkDoneProgress::End(_) => {
1391 language_server_events_tx
1392 .try_send(LanguageServerEvent::WorkEnd { token })
1393 .ok();
1394 }
1395 },
1396 }
1397 })
1398 .detach();
1399
1400 // Process all the LSP events.
1401 cx.spawn(|mut cx| {
1402 let this = this.downgrade();
1403 async move {
1404 while let Ok(event) = language_server_events_rx.recv().await {
1405 let this = this.upgrade(&cx)?;
1406 this.update(&mut cx, |this, cx| {
1407 this.on_lsp_event(server_id, event, &language, cx)
1408 });
1409
1410 // Don't starve the main thread when lots of events arrive all at once.
1411 smol::future::yield_now().await;
1412 }
1413 Some(())
1414 }
1415 })
1416 .detach();
1417
1418 let language_server = language_server.initialize().await.log_err()?;
1419 this.update(&mut cx, |this, cx| {
1420 this.language_servers
1421 .insert(key.clone(), language_server.clone());
1422 this.language_server_statuses.insert(
1423 server_id,
1424 LanguageServerStatus {
1425 name: language_server.name().to_string(),
1426 pending_work: Default::default(),
1427 pending_diagnostic_updates: 0,
1428 },
1429 );
1430 language_server
1431 .notify::<lsp::notification::DidChangeConfiguration>(
1432 lsp::DidChangeConfigurationParams {
1433 settings: this.language_server_settings.lock().clone(),
1434 },
1435 )
1436 .ok();
1437
1438 if let Some(project_id) = this.remote_id() {
1439 this.client
1440 .send(proto::StartLanguageServer {
1441 project_id,
1442 server: Some(proto::LanguageServer {
1443 id: server_id as u64,
1444 name: language_server.name().to_string(),
1445 }),
1446 })
1447 .log_err();
1448 }
1449
1450 // Tell the language server about every open buffer in the worktree that matches the language.
1451 for buffer in this.opened_buffers.values() {
1452 if let Some(buffer_handle) = buffer.upgrade(cx) {
1453 let buffer = buffer_handle.read(cx);
1454 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1455 file
1456 } else {
1457 continue;
1458 };
1459 let language = if let Some(language) = buffer.language() {
1460 language
1461 } else {
1462 continue;
1463 };
1464 if (file.worktree.read(cx).id(), language.name()) != key {
1465 continue;
1466 }
1467
1468 let file = file.as_local()?;
1469 let versions = this
1470 .buffer_snapshots
1471 .entry(buffer.remote_id())
1472 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1473 let (version, initial_snapshot) = versions.last().unwrap();
1474 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1475 language_server
1476 .notify::<lsp::notification::DidOpenTextDocument>(
1477 lsp::DidOpenTextDocumentParams {
1478 text_document: lsp::TextDocumentItem::new(
1479 uri,
1480 Default::default(),
1481 *version,
1482 initial_snapshot.text(),
1483 ),
1484 },
1485 )
1486 .log_err()?;
1487 buffer_handle.update(cx, |buffer, cx| {
1488 buffer.set_completion_triggers(
1489 language_server
1490 .capabilities()
1491 .completion_provider
1492 .as_ref()
1493 .and_then(|provider| {
1494 provider.trigger_characters.clone()
1495 })
1496 .unwrap_or(Vec::new()),
1497 cx,
1498 )
1499 });
1500 }
1501 }
1502
1503 cx.notify();
1504 Some(())
1505 });
1506
1507 Some(language_server)
1508 })
1509 });
1510 }
1511
1512 pub fn restart_language_servers_for_buffers(
1513 &mut self,
1514 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1515 cx: &mut ModelContext<Self>,
1516 ) -> Option<()> {
1517 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1518 .into_iter()
1519 .filter_map(|buffer| {
1520 let file = File::from_dyn(buffer.read(cx).file())?;
1521 let worktree = file.worktree.read(cx).as_local()?;
1522 let worktree_id = worktree.id();
1523 let worktree_abs_path = worktree.abs_path().clone();
1524 let full_path = file.full_path(cx);
1525 Some((worktree_id, worktree_abs_path, full_path))
1526 })
1527 .collect();
1528 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1529 let language = self.languages.select_language(&full_path)?;
1530 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1531 }
1532
1533 None
1534 }
1535
1536 fn restart_language_server(
1537 &mut self,
1538 worktree_id: WorktreeId,
1539 worktree_path: Arc<Path>,
1540 language: Arc<Language>,
1541 cx: &mut ModelContext<Self>,
1542 ) {
1543 let key = (worktree_id, language.name());
1544 let server_to_shutdown = self.language_servers.remove(&key);
1545 self.started_language_servers.remove(&key);
1546 server_to_shutdown
1547 .as_ref()
1548 .map(|server| self.language_server_statuses.remove(&server.server_id()));
1549 cx.spawn_weak(|this, mut cx| async move {
1550 if let Some(this) = this.upgrade(&cx) {
1551 if let Some(server_to_shutdown) = server_to_shutdown {
1552 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1553 shutdown_task.await;
1554 }
1555 }
1556
1557 this.update(&mut cx, |this, cx| {
1558 this.start_language_server(worktree_id, worktree_path, language, cx);
1559 });
1560 }
1561 })
1562 .detach();
1563 }
1564
1565 fn on_lsp_event(
1566 &mut self,
1567 language_server_id: usize,
1568 event: LanguageServerEvent,
1569 language: &Arc<Language>,
1570 cx: &mut ModelContext<Self>,
1571 ) {
1572 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1573 let language_server_status =
1574 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1575 status
1576 } else {
1577 return;
1578 };
1579
1580 match event {
1581 LanguageServerEvent::WorkStart { token } => {
1582 if Some(&token) == disk_diagnostics_token {
1583 language_server_status.pending_diagnostic_updates += 1;
1584 if language_server_status.pending_diagnostic_updates == 1 {
1585 self.disk_based_diagnostics_started(cx);
1586 self.broadcast_language_server_update(
1587 language_server_id,
1588 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1589 proto::LspDiskBasedDiagnosticsUpdating {},
1590 ),
1591 );
1592 }
1593 } else {
1594 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1595 self.broadcast_language_server_update(
1596 language_server_id,
1597 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1598 token,
1599 }),
1600 );
1601 }
1602 }
1603 LanguageServerEvent::WorkProgress { token, progress } => {
1604 if Some(&token) != disk_diagnostics_token {
1605 self.on_lsp_work_progress(
1606 language_server_id,
1607 token.clone(),
1608 progress.clone(),
1609 cx,
1610 );
1611 self.broadcast_language_server_update(
1612 language_server_id,
1613 proto::update_language_server::Variant::WorkProgress(
1614 proto::LspWorkProgress {
1615 token,
1616 message: progress.message,
1617 percentage: progress.percentage.map(|p| p as u32),
1618 },
1619 ),
1620 );
1621 }
1622 }
1623 LanguageServerEvent::WorkEnd { token } => {
1624 if Some(&token) == disk_diagnostics_token {
1625 language_server_status.pending_diagnostic_updates -= 1;
1626 if language_server_status.pending_diagnostic_updates == 0 {
1627 self.disk_based_diagnostics_finished(cx);
1628 self.broadcast_language_server_update(
1629 language_server_id,
1630 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1631 proto::LspDiskBasedDiagnosticsUpdated {},
1632 ),
1633 );
1634 }
1635 } else {
1636 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1637 self.broadcast_language_server_update(
1638 language_server_id,
1639 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1640 token,
1641 }),
1642 );
1643 }
1644 }
1645 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1646 language.process_diagnostics(&mut params);
1647
1648 if disk_diagnostics_token.is_none() {
1649 self.disk_based_diagnostics_started(cx);
1650 self.broadcast_language_server_update(
1651 language_server_id,
1652 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1653 proto::LspDiskBasedDiagnosticsUpdating {},
1654 ),
1655 );
1656 }
1657 self.update_diagnostics(
1658 params,
1659 language
1660 .disk_based_diagnostic_sources()
1661 .unwrap_or(&Default::default()),
1662 cx,
1663 )
1664 .log_err();
1665 if disk_diagnostics_token.is_none() {
1666 self.disk_based_diagnostics_finished(cx);
1667 self.broadcast_language_server_update(
1668 language_server_id,
1669 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1670 proto::LspDiskBasedDiagnosticsUpdated {},
1671 ),
1672 );
1673 }
1674 }
1675 }
1676 }
1677
1678 fn on_lsp_work_start(
1679 &mut self,
1680 language_server_id: usize,
1681 token: String,
1682 cx: &mut ModelContext<Self>,
1683 ) {
1684 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1685 status.pending_work.insert(
1686 token,
1687 LanguageServerProgress {
1688 message: None,
1689 percentage: None,
1690 last_update_at: Instant::now(),
1691 },
1692 );
1693 cx.notify();
1694 }
1695 }
1696
1697 fn on_lsp_work_progress(
1698 &mut self,
1699 language_server_id: usize,
1700 token: String,
1701 progress: LanguageServerProgress,
1702 cx: &mut ModelContext<Self>,
1703 ) {
1704 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1705 status.pending_work.insert(token, progress);
1706 cx.notify();
1707 }
1708 }
1709
1710 fn on_lsp_work_end(
1711 &mut self,
1712 language_server_id: usize,
1713 token: String,
1714 cx: &mut ModelContext<Self>,
1715 ) {
1716 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1717 status.pending_work.remove(&token);
1718 cx.notify();
1719 }
1720 }
1721
1722 fn broadcast_language_server_update(
1723 &self,
1724 language_server_id: usize,
1725 event: proto::update_language_server::Variant,
1726 ) {
1727 if let Some(project_id) = self.remote_id() {
1728 self.client
1729 .send(proto::UpdateLanguageServer {
1730 project_id,
1731 language_server_id: language_server_id as u64,
1732 variant: Some(event),
1733 })
1734 .log_err();
1735 }
1736 }
1737
1738 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1739 for server in self.language_servers.values() {
1740 server
1741 .notify::<lsp::notification::DidChangeConfiguration>(
1742 lsp::DidChangeConfigurationParams {
1743 settings: settings.clone(),
1744 },
1745 )
1746 .ok();
1747 }
1748 *self.language_server_settings.lock() = settings;
1749 }
1750
1751 pub fn language_server_statuses(
1752 &self,
1753 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1754 self.language_server_statuses.values()
1755 }
1756
1757 pub fn update_diagnostics(
1758 &mut self,
1759 params: lsp::PublishDiagnosticsParams,
1760 disk_based_sources: &HashSet<String>,
1761 cx: &mut ModelContext<Self>,
1762 ) -> Result<()> {
1763 let abs_path = params
1764 .uri
1765 .to_file_path()
1766 .map_err(|_| anyhow!("URI is not a file"))?;
1767 let mut next_group_id = 0;
1768 let mut diagnostics = Vec::default();
1769 let mut primary_diagnostic_group_ids = HashMap::default();
1770 let mut sources_by_group_id = HashMap::default();
1771 let mut supporting_diagnostics = HashMap::default();
1772 for diagnostic in ¶ms.diagnostics {
1773 let source = diagnostic.source.as_ref();
1774 let code = diagnostic.code.as_ref().map(|code| match code {
1775 lsp::NumberOrString::Number(code) => code.to_string(),
1776 lsp::NumberOrString::String(code) => code.clone(),
1777 });
1778 let range = range_from_lsp(diagnostic.range);
1779 let is_supporting = diagnostic
1780 .related_information
1781 .as_ref()
1782 .map_or(false, |infos| {
1783 infos.iter().any(|info| {
1784 primary_diagnostic_group_ids.contains_key(&(
1785 source,
1786 code.clone(),
1787 range_from_lsp(info.location.range),
1788 ))
1789 })
1790 });
1791
1792 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1793 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1794 });
1795
1796 if is_supporting {
1797 supporting_diagnostics.insert(
1798 (source, code.clone(), range),
1799 (diagnostic.severity, is_unnecessary),
1800 );
1801 } else {
1802 let group_id = post_inc(&mut next_group_id);
1803 let is_disk_based =
1804 source.map_or(false, |source| disk_based_sources.contains(source));
1805
1806 sources_by_group_id.insert(group_id, source);
1807 primary_diagnostic_group_ids
1808 .insert((source, code.clone(), range.clone()), group_id);
1809
1810 diagnostics.push(DiagnosticEntry {
1811 range,
1812 diagnostic: Diagnostic {
1813 code: code.clone(),
1814 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1815 message: diagnostic.message.clone(),
1816 group_id,
1817 is_primary: true,
1818 is_valid: true,
1819 is_disk_based,
1820 is_unnecessary,
1821 },
1822 });
1823 if let Some(infos) = &diagnostic.related_information {
1824 for info in infos {
1825 if info.location.uri == params.uri && !info.message.is_empty() {
1826 let range = range_from_lsp(info.location.range);
1827 diagnostics.push(DiagnosticEntry {
1828 range,
1829 diagnostic: Diagnostic {
1830 code: code.clone(),
1831 severity: DiagnosticSeverity::INFORMATION,
1832 message: info.message.clone(),
1833 group_id,
1834 is_primary: false,
1835 is_valid: true,
1836 is_disk_based,
1837 is_unnecessary: false,
1838 },
1839 });
1840 }
1841 }
1842 }
1843 }
1844 }
1845
1846 for entry in &mut diagnostics {
1847 let diagnostic = &mut entry.diagnostic;
1848 if !diagnostic.is_primary {
1849 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1850 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1851 source,
1852 diagnostic.code.clone(),
1853 entry.range.clone(),
1854 )) {
1855 if let Some(severity) = severity {
1856 diagnostic.severity = severity;
1857 }
1858 diagnostic.is_unnecessary = is_unnecessary;
1859 }
1860 }
1861 }
1862
1863 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1864 Ok(())
1865 }
1866
1867 pub fn update_diagnostic_entries(
1868 &mut self,
1869 abs_path: PathBuf,
1870 version: Option<i32>,
1871 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1872 cx: &mut ModelContext<Project>,
1873 ) -> Result<(), anyhow::Error> {
1874 let (worktree, relative_path) = self
1875 .find_local_worktree(&abs_path, cx)
1876 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1877 if !worktree.read(cx).is_visible() {
1878 return Ok(());
1879 }
1880
1881 let project_path = ProjectPath {
1882 worktree_id: worktree.read(cx).id(),
1883 path: relative_path.into(),
1884 };
1885
1886 for buffer in self.opened_buffers.values() {
1887 if let Some(buffer) = buffer.upgrade(cx) {
1888 if buffer
1889 .read(cx)
1890 .file()
1891 .map_or(false, |file| *file.path() == project_path.path)
1892 {
1893 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1894 break;
1895 }
1896 }
1897 }
1898 worktree.update(cx, |worktree, cx| {
1899 worktree
1900 .as_local_mut()
1901 .ok_or_else(|| anyhow!("not a local worktree"))?
1902 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1903 })?;
1904 cx.emit(Event::DiagnosticsUpdated(project_path));
1905 Ok(())
1906 }
1907
1908 fn update_buffer_diagnostics(
1909 &mut self,
1910 buffer: &ModelHandle<Buffer>,
1911 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1912 version: Option<i32>,
1913 cx: &mut ModelContext<Self>,
1914 ) -> Result<()> {
1915 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1916 Ordering::Equal
1917 .then_with(|| b.is_primary.cmp(&a.is_primary))
1918 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1919 .then_with(|| a.severity.cmp(&b.severity))
1920 .then_with(|| a.message.cmp(&b.message))
1921 }
1922
1923 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1924
1925 diagnostics.sort_unstable_by(|a, b| {
1926 Ordering::Equal
1927 .then_with(|| a.range.start.cmp(&b.range.start))
1928 .then_with(|| b.range.end.cmp(&a.range.end))
1929 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1930 });
1931
1932 let mut sanitized_diagnostics = Vec::new();
1933 let edits_since_save = Patch::new(
1934 snapshot
1935 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1936 .collect(),
1937 );
1938 for entry in diagnostics {
1939 let start;
1940 let end;
1941 if entry.diagnostic.is_disk_based {
1942 // Some diagnostics are based on files on disk instead of buffers'
1943 // current contents. Adjust these diagnostics' ranges to reflect
1944 // any unsaved edits.
1945 start = edits_since_save.old_to_new(entry.range.start);
1946 end = edits_since_save.old_to_new(entry.range.end);
1947 } else {
1948 start = entry.range.start;
1949 end = entry.range.end;
1950 }
1951
1952 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1953 ..snapshot.clip_point_utf16(end, Bias::Right);
1954
1955 // Expand empty ranges by one character
1956 if range.start == range.end {
1957 range.end.column += 1;
1958 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1959 if range.start == range.end && range.end.column > 0 {
1960 range.start.column -= 1;
1961 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1962 }
1963 }
1964
1965 sanitized_diagnostics.push(DiagnosticEntry {
1966 range,
1967 diagnostic: entry.diagnostic,
1968 });
1969 }
1970 drop(edits_since_save);
1971
1972 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1973 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1974 Ok(())
1975 }
1976
1977 pub fn reload_buffers(
1978 &self,
1979 buffers: HashSet<ModelHandle<Buffer>>,
1980 push_to_history: bool,
1981 cx: &mut ModelContext<Self>,
1982 ) -> Task<Result<ProjectTransaction>> {
1983 let mut local_buffers = Vec::new();
1984 let mut remote_buffers = None;
1985 for buffer_handle in buffers {
1986 let buffer = buffer_handle.read(cx);
1987 if buffer.is_dirty() {
1988 if let Some(file) = File::from_dyn(buffer.file()) {
1989 if file.is_local() {
1990 local_buffers.push(buffer_handle);
1991 } else {
1992 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1993 }
1994 }
1995 }
1996 }
1997
1998 let remote_buffers = self.remote_id().zip(remote_buffers);
1999 let client = self.client.clone();
2000
2001 cx.spawn(|this, mut cx| async move {
2002 let mut project_transaction = ProjectTransaction::default();
2003
2004 if let Some((project_id, remote_buffers)) = remote_buffers {
2005 let response = client
2006 .request(proto::ReloadBuffers {
2007 project_id,
2008 buffer_ids: remote_buffers
2009 .iter()
2010 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2011 .collect(),
2012 })
2013 .await?
2014 .transaction
2015 .ok_or_else(|| anyhow!("missing transaction"))?;
2016 project_transaction = this
2017 .update(&mut cx, |this, cx| {
2018 this.deserialize_project_transaction(response, push_to_history, cx)
2019 })
2020 .await?;
2021 }
2022
2023 for buffer in local_buffers {
2024 let transaction = buffer
2025 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2026 .await?;
2027 buffer.update(&mut cx, |buffer, cx| {
2028 if let Some(transaction) = transaction {
2029 if !push_to_history {
2030 buffer.forget_transaction(transaction.id);
2031 }
2032 project_transaction.0.insert(cx.handle(), transaction);
2033 }
2034 });
2035 }
2036
2037 Ok(project_transaction)
2038 })
2039 }
2040
2041 pub fn format(
2042 &self,
2043 buffers: HashSet<ModelHandle<Buffer>>,
2044 push_to_history: bool,
2045 cx: &mut ModelContext<Project>,
2046 ) -> Task<Result<ProjectTransaction>> {
2047 let mut local_buffers = Vec::new();
2048 let mut remote_buffers = None;
2049 for buffer_handle in buffers {
2050 let buffer = buffer_handle.read(cx);
2051 if let Some(file) = File::from_dyn(buffer.file()) {
2052 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2053 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2054 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2055 }
2056 } else {
2057 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2058 }
2059 } else {
2060 return Task::ready(Ok(Default::default()));
2061 }
2062 }
2063
2064 let remote_buffers = self.remote_id().zip(remote_buffers);
2065 let client = self.client.clone();
2066
2067 cx.spawn(|this, mut cx| async move {
2068 let mut project_transaction = ProjectTransaction::default();
2069
2070 if let Some((project_id, remote_buffers)) = remote_buffers {
2071 let response = client
2072 .request(proto::FormatBuffers {
2073 project_id,
2074 buffer_ids: remote_buffers
2075 .iter()
2076 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2077 .collect(),
2078 })
2079 .await?
2080 .transaction
2081 .ok_or_else(|| anyhow!("missing transaction"))?;
2082 project_transaction = this
2083 .update(&mut cx, |this, cx| {
2084 this.deserialize_project_transaction(response, push_to_history, cx)
2085 })
2086 .await?;
2087 }
2088
2089 for (buffer, buffer_abs_path, language_server) in local_buffers {
2090 let text_document = lsp::TextDocumentIdentifier::new(
2091 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2092 );
2093 let capabilities = &language_server.capabilities();
2094 let lsp_edits = if capabilities
2095 .document_formatting_provider
2096 .as_ref()
2097 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2098 {
2099 language_server
2100 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2101 text_document,
2102 options: Default::default(),
2103 work_done_progress_params: Default::default(),
2104 })
2105 .await?
2106 } else if capabilities
2107 .document_range_formatting_provider
2108 .as_ref()
2109 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2110 {
2111 let buffer_start = lsp::Position::new(0, 0);
2112 let buffer_end = buffer
2113 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
2114 .to_lsp_position();
2115 language_server
2116 .request::<lsp::request::RangeFormatting>(
2117 lsp::DocumentRangeFormattingParams {
2118 text_document,
2119 range: lsp::Range::new(buffer_start, buffer_end),
2120 options: Default::default(),
2121 work_done_progress_params: Default::default(),
2122 },
2123 )
2124 .await?
2125 } else {
2126 continue;
2127 };
2128
2129 if let Some(lsp_edits) = lsp_edits {
2130 let edits = this
2131 .update(&mut cx, |this, cx| {
2132 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2133 })
2134 .await?;
2135 buffer.update(&mut cx, |buffer, cx| {
2136 buffer.finalize_last_transaction();
2137 buffer.start_transaction();
2138 for (range, text) in edits {
2139 buffer.edit([range], text, cx);
2140 }
2141 if buffer.end_transaction(cx).is_some() {
2142 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2143 if !push_to_history {
2144 buffer.forget_transaction(transaction.id);
2145 }
2146 project_transaction.0.insert(cx.handle(), transaction);
2147 }
2148 });
2149 }
2150 }
2151
2152 Ok(project_transaction)
2153 })
2154 }
2155
2156 pub fn definition<T: ToPointUtf16>(
2157 &self,
2158 buffer: &ModelHandle<Buffer>,
2159 position: T,
2160 cx: &mut ModelContext<Self>,
2161 ) -> Task<Result<Vec<Location>>> {
2162 let position = position.to_point_utf16(buffer.read(cx));
2163 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2164 }
2165
2166 pub fn references<T: ToPointUtf16>(
2167 &self,
2168 buffer: &ModelHandle<Buffer>,
2169 position: T,
2170 cx: &mut ModelContext<Self>,
2171 ) -> Task<Result<Vec<Location>>> {
2172 let position = position.to_point_utf16(buffer.read(cx));
2173 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2174 }
2175
2176 pub fn document_highlights<T: ToPointUtf16>(
2177 &self,
2178 buffer: &ModelHandle<Buffer>,
2179 position: T,
2180 cx: &mut ModelContext<Self>,
2181 ) -> Task<Result<Vec<DocumentHighlight>>> {
2182 let position = position.to_point_utf16(buffer.read(cx));
2183
2184 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2185 }
2186
2187 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2188 if self.is_local() {
2189 let mut language_servers = HashMap::default();
2190 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2191 if let Some((worktree, language)) = self
2192 .worktree_for_id(*worktree_id, cx)
2193 .and_then(|worktree| worktree.read(cx).as_local())
2194 .zip(self.languages.get_language(language_name))
2195 {
2196 language_servers
2197 .entry(Arc::as_ptr(language_server))
2198 .or_insert((
2199 language_server.clone(),
2200 *worktree_id,
2201 worktree.abs_path().clone(),
2202 language.clone(),
2203 ));
2204 }
2205 }
2206
2207 let mut requests = Vec::new();
2208 for (language_server, _, _, _) in language_servers.values() {
2209 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2210 lsp::WorkspaceSymbolParams {
2211 query: query.to_string(),
2212 ..Default::default()
2213 },
2214 ));
2215 }
2216
2217 cx.spawn_weak(|this, cx| async move {
2218 let responses = futures::future::try_join_all(requests).await?;
2219
2220 let mut symbols = Vec::new();
2221 if let Some(this) = this.upgrade(&cx) {
2222 this.read_with(&cx, |this, cx| {
2223 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2224 language_servers.into_values().zip(responses)
2225 {
2226 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2227 |lsp_symbol| {
2228 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2229 let mut worktree_id = source_worktree_id;
2230 let path;
2231 if let Some((worktree, rel_path)) =
2232 this.find_local_worktree(&abs_path, cx)
2233 {
2234 worktree_id = worktree.read(cx).id();
2235 path = rel_path;
2236 } else {
2237 path = relativize_path(&worktree_abs_path, &abs_path);
2238 }
2239
2240 let label = language
2241 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2242 .unwrap_or_else(|| {
2243 CodeLabel::plain(lsp_symbol.name.clone(), None)
2244 });
2245 let signature = this.symbol_signature(worktree_id, &path);
2246
2247 Some(Symbol {
2248 source_worktree_id,
2249 worktree_id,
2250 language_name: language.name().to_string(),
2251 name: lsp_symbol.name,
2252 kind: lsp_symbol.kind,
2253 label,
2254 path,
2255 range: range_from_lsp(lsp_symbol.location.range),
2256 signature,
2257 })
2258 },
2259 ));
2260 }
2261 })
2262 }
2263
2264 Ok(symbols)
2265 })
2266 } else if let Some(project_id) = self.remote_id() {
2267 let request = self.client.request(proto::GetProjectSymbols {
2268 project_id,
2269 query: query.to_string(),
2270 });
2271 cx.spawn_weak(|this, cx| async move {
2272 let response = request.await?;
2273 let mut symbols = Vec::new();
2274 if let Some(this) = this.upgrade(&cx) {
2275 this.read_with(&cx, |this, _| {
2276 symbols.extend(
2277 response
2278 .symbols
2279 .into_iter()
2280 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2281 );
2282 })
2283 }
2284 Ok(symbols)
2285 })
2286 } else {
2287 Task::ready(Ok(Default::default()))
2288 }
2289 }
2290
2291 pub fn open_buffer_for_symbol(
2292 &mut self,
2293 symbol: &Symbol,
2294 cx: &mut ModelContext<Self>,
2295 ) -> Task<Result<ModelHandle<Buffer>>> {
2296 if self.is_local() {
2297 let language_server = if let Some(server) = self.language_servers.get(&(
2298 symbol.source_worktree_id,
2299 Arc::from(symbol.language_name.as_str()),
2300 )) {
2301 server.clone()
2302 } else {
2303 return Task::ready(Err(anyhow!(
2304 "language server for worktree and language not found"
2305 )));
2306 };
2307
2308 let worktree_abs_path = if let Some(worktree_abs_path) = self
2309 .worktree_for_id(symbol.worktree_id, cx)
2310 .and_then(|worktree| worktree.read(cx).as_local())
2311 .map(|local_worktree| local_worktree.abs_path())
2312 {
2313 worktree_abs_path
2314 } else {
2315 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2316 };
2317 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2318 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2319 uri
2320 } else {
2321 return Task::ready(Err(anyhow!("invalid symbol path")));
2322 };
2323
2324 self.open_local_buffer_via_lsp(
2325 symbol_uri,
2326 Arc::from(symbol.language_name.as_str()),
2327 language_server,
2328 cx,
2329 )
2330 } else if let Some(project_id) = self.remote_id() {
2331 let request = self.client.request(proto::OpenBufferForSymbol {
2332 project_id,
2333 symbol: Some(serialize_symbol(symbol)),
2334 });
2335 cx.spawn(|this, mut cx| async move {
2336 let response = request.await?;
2337 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2338 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2339 .await
2340 })
2341 } else {
2342 Task::ready(Err(anyhow!("project does not have a remote id")))
2343 }
2344 }
2345
2346 pub fn completions<T: ToPointUtf16>(
2347 &self,
2348 source_buffer_handle: &ModelHandle<Buffer>,
2349 position: T,
2350 cx: &mut ModelContext<Self>,
2351 ) -> Task<Result<Vec<Completion>>> {
2352 let source_buffer_handle = source_buffer_handle.clone();
2353 let source_buffer = source_buffer_handle.read(cx);
2354 let buffer_id = source_buffer.remote_id();
2355 let language = source_buffer.language().cloned();
2356 let worktree;
2357 let buffer_abs_path;
2358 if let Some(file) = File::from_dyn(source_buffer.file()) {
2359 worktree = file.worktree.clone();
2360 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2361 } else {
2362 return Task::ready(Ok(Default::default()));
2363 };
2364
2365 let position = position.to_point_utf16(source_buffer);
2366 let anchor = source_buffer.anchor_after(position);
2367
2368 if worktree.read(cx).as_local().is_some() {
2369 let buffer_abs_path = buffer_abs_path.unwrap();
2370 let lang_server =
2371 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2372 server.clone()
2373 } else {
2374 return Task::ready(Ok(Default::default()));
2375 };
2376
2377 cx.spawn(|_, cx| async move {
2378 let completions = lang_server
2379 .request::<lsp::request::Completion>(lsp::CompletionParams {
2380 text_document_position: lsp::TextDocumentPositionParams::new(
2381 lsp::TextDocumentIdentifier::new(
2382 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2383 ),
2384 position.to_lsp_position(),
2385 ),
2386 context: Default::default(),
2387 work_done_progress_params: Default::default(),
2388 partial_result_params: Default::default(),
2389 })
2390 .await
2391 .context("lsp completion request failed")?;
2392
2393 let completions = if let Some(completions) = completions {
2394 match completions {
2395 lsp::CompletionResponse::Array(completions) => completions,
2396 lsp::CompletionResponse::List(list) => list.items,
2397 }
2398 } else {
2399 Default::default()
2400 };
2401
2402 source_buffer_handle.read_with(&cx, |this, _| {
2403 Ok(completions
2404 .into_iter()
2405 .filter_map(|lsp_completion| {
2406 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2407 lsp::CompletionTextEdit::Edit(edit) => {
2408 (range_from_lsp(edit.range), edit.new_text.clone())
2409 }
2410 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2411 log::info!("unsupported insert/replace completion");
2412 return None;
2413 }
2414 };
2415
2416 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2417 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2418 if clipped_start == old_range.start && clipped_end == old_range.end {
2419 Some(Completion {
2420 old_range: this.anchor_before(old_range.start)
2421 ..this.anchor_after(old_range.end),
2422 new_text,
2423 label: language
2424 .as_ref()
2425 .and_then(|l| l.label_for_completion(&lsp_completion))
2426 .unwrap_or_else(|| {
2427 CodeLabel::plain(
2428 lsp_completion.label.clone(),
2429 lsp_completion.filter_text.as_deref(),
2430 )
2431 }),
2432 lsp_completion,
2433 })
2434 } else {
2435 None
2436 }
2437 })
2438 .collect())
2439 })
2440 })
2441 } else if let Some(project_id) = self.remote_id() {
2442 let rpc = self.client.clone();
2443 let message = proto::GetCompletions {
2444 project_id,
2445 buffer_id,
2446 position: Some(language::proto::serialize_anchor(&anchor)),
2447 version: serialize_version(&source_buffer.version()),
2448 };
2449 cx.spawn_weak(|_, mut cx| async move {
2450 let response = rpc.request(message).await?;
2451
2452 source_buffer_handle
2453 .update(&mut cx, |buffer, _| {
2454 buffer.wait_for_version(deserialize_version(response.version))
2455 })
2456 .await;
2457
2458 response
2459 .completions
2460 .into_iter()
2461 .map(|completion| {
2462 language::proto::deserialize_completion(completion, language.as_ref())
2463 })
2464 .collect()
2465 })
2466 } else {
2467 Task::ready(Ok(Default::default()))
2468 }
2469 }
2470
2471 pub fn apply_additional_edits_for_completion(
2472 &self,
2473 buffer_handle: ModelHandle<Buffer>,
2474 completion: Completion,
2475 push_to_history: bool,
2476 cx: &mut ModelContext<Self>,
2477 ) -> Task<Result<Option<Transaction>>> {
2478 let buffer = buffer_handle.read(cx);
2479 let buffer_id = buffer.remote_id();
2480
2481 if self.is_local() {
2482 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2483 server.clone()
2484 } else {
2485 return Task::ready(Ok(Default::default()));
2486 };
2487
2488 cx.spawn(|this, mut cx| async move {
2489 let resolved_completion = lang_server
2490 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2491 .await?;
2492 if let Some(edits) = resolved_completion.additional_text_edits {
2493 let edits = this
2494 .update(&mut cx, |this, cx| {
2495 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2496 })
2497 .await?;
2498 buffer_handle.update(&mut cx, |buffer, cx| {
2499 buffer.finalize_last_transaction();
2500 buffer.start_transaction();
2501 for (range, text) in edits {
2502 buffer.edit([range], text, cx);
2503 }
2504 let transaction = if buffer.end_transaction(cx).is_some() {
2505 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2506 if !push_to_history {
2507 buffer.forget_transaction(transaction.id);
2508 }
2509 Some(transaction)
2510 } else {
2511 None
2512 };
2513 Ok(transaction)
2514 })
2515 } else {
2516 Ok(None)
2517 }
2518 })
2519 } else if let Some(project_id) = self.remote_id() {
2520 let client = self.client.clone();
2521 cx.spawn(|_, mut cx| async move {
2522 let response = client
2523 .request(proto::ApplyCompletionAdditionalEdits {
2524 project_id,
2525 buffer_id,
2526 completion: Some(language::proto::serialize_completion(&completion)),
2527 })
2528 .await?;
2529
2530 if let Some(transaction) = response.transaction {
2531 let transaction = language::proto::deserialize_transaction(transaction)?;
2532 buffer_handle
2533 .update(&mut cx, |buffer, _| {
2534 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2535 })
2536 .await;
2537 if push_to_history {
2538 buffer_handle.update(&mut cx, |buffer, _| {
2539 buffer.push_transaction(transaction.clone(), Instant::now());
2540 });
2541 }
2542 Ok(Some(transaction))
2543 } else {
2544 Ok(None)
2545 }
2546 })
2547 } else {
2548 Task::ready(Err(anyhow!("project does not have a remote id")))
2549 }
2550 }
2551
2552 pub fn code_actions<T: ToOffset>(
2553 &self,
2554 buffer_handle: &ModelHandle<Buffer>,
2555 range: Range<T>,
2556 cx: &mut ModelContext<Self>,
2557 ) -> Task<Result<Vec<CodeAction>>> {
2558 let buffer_handle = buffer_handle.clone();
2559 let buffer = buffer_handle.read(cx);
2560 let buffer_id = buffer.remote_id();
2561 let worktree;
2562 let buffer_abs_path;
2563 if let Some(file) = File::from_dyn(buffer.file()) {
2564 worktree = file.worktree.clone();
2565 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2566 } else {
2567 return Task::ready(Ok(Default::default()));
2568 };
2569 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2570
2571 if worktree.read(cx).as_local().is_some() {
2572 let buffer_abs_path = buffer_abs_path.unwrap();
2573 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2574 server.clone()
2575 } else {
2576 return Task::ready(Ok(Default::default()));
2577 };
2578
2579 let lsp_range = lsp::Range::new(
2580 range.start.to_point_utf16(buffer).to_lsp_position(),
2581 range.end.to_point_utf16(buffer).to_lsp_position(),
2582 );
2583 cx.foreground().spawn(async move {
2584 if !lang_server.capabilities().code_action_provider.is_some() {
2585 return Ok(Default::default());
2586 }
2587
2588 Ok(lang_server
2589 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2590 text_document: lsp::TextDocumentIdentifier::new(
2591 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2592 ),
2593 range: lsp_range,
2594 work_done_progress_params: Default::default(),
2595 partial_result_params: Default::default(),
2596 context: lsp::CodeActionContext {
2597 diagnostics: Default::default(),
2598 only: Some(vec![
2599 lsp::CodeActionKind::QUICKFIX,
2600 lsp::CodeActionKind::REFACTOR,
2601 lsp::CodeActionKind::REFACTOR_EXTRACT,
2602 ]),
2603 },
2604 })
2605 .await?
2606 .unwrap_or_default()
2607 .into_iter()
2608 .filter_map(|entry| {
2609 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2610 Some(CodeAction {
2611 range: range.clone(),
2612 lsp_action,
2613 })
2614 } else {
2615 None
2616 }
2617 })
2618 .collect())
2619 })
2620 } else if let Some(project_id) = self.remote_id() {
2621 let rpc = self.client.clone();
2622 let version = buffer.version();
2623 cx.spawn_weak(|_, mut cx| async move {
2624 let response = rpc
2625 .request(proto::GetCodeActions {
2626 project_id,
2627 buffer_id,
2628 start: Some(language::proto::serialize_anchor(&range.start)),
2629 end: Some(language::proto::serialize_anchor(&range.end)),
2630 version: serialize_version(&version),
2631 })
2632 .await?;
2633
2634 buffer_handle
2635 .update(&mut cx, |buffer, _| {
2636 buffer.wait_for_version(deserialize_version(response.version))
2637 })
2638 .await;
2639
2640 response
2641 .actions
2642 .into_iter()
2643 .map(language::proto::deserialize_code_action)
2644 .collect()
2645 })
2646 } else {
2647 Task::ready(Ok(Default::default()))
2648 }
2649 }
2650
2651 pub fn apply_code_action(
2652 &self,
2653 buffer_handle: ModelHandle<Buffer>,
2654 mut action: CodeAction,
2655 push_to_history: bool,
2656 cx: &mut ModelContext<Self>,
2657 ) -> Task<Result<ProjectTransaction>> {
2658 if self.is_local() {
2659 let buffer = buffer_handle.read(cx);
2660 let lang_name = if let Some(lang) = buffer.language() {
2661 lang.name()
2662 } else {
2663 return Task::ready(Ok(Default::default()));
2664 };
2665 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2666 server.clone()
2667 } else {
2668 return Task::ready(Ok(Default::default()));
2669 };
2670 let range = action.range.to_point_utf16(buffer);
2671
2672 cx.spawn(|this, mut cx| async move {
2673 if let Some(lsp_range) = action
2674 .lsp_action
2675 .data
2676 .as_mut()
2677 .and_then(|d| d.get_mut("codeActionParams"))
2678 .and_then(|d| d.get_mut("range"))
2679 {
2680 *lsp_range = serde_json::to_value(&lsp::Range::new(
2681 range.start.to_lsp_position(),
2682 range.end.to_lsp_position(),
2683 ))
2684 .unwrap();
2685 action.lsp_action = lang_server
2686 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2687 .await?;
2688 } else {
2689 let actions = this
2690 .update(&mut cx, |this, cx| {
2691 this.code_actions(&buffer_handle, action.range, cx)
2692 })
2693 .await?;
2694 action.lsp_action = actions
2695 .into_iter()
2696 .find(|a| a.lsp_action.title == action.lsp_action.title)
2697 .ok_or_else(|| anyhow!("code action is outdated"))?
2698 .lsp_action;
2699 }
2700
2701 if let Some(edit) = action.lsp_action.edit {
2702 Self::deserialize_workspace_edit(
2703 this,
2704 edit,
2705 push_to_history,
2706 lang_name,
2707 lang_server,
2708 &mut cx,
2709 )
2710 .await
2711 } else {
2712 Ok(ProjectTransaction::default())
2713 }
2714 })
2715 } else if let Some(project_id) = self.remote_id() {
2716 let client = self.client.clone();
2717 let request = proto::ApplyCodeAction {
2718 project_id,
2719 buffer_id: buffer_handle.read(cx).remote_id(),
2720 action: Some(language::proto::serialize_code_action(&action)),
2721 };
2722 cx.spawn(|this, mut cx| async move {
2723 let response = client
2724 .request(request)
2725 .await?
2726 .transaction
2727 .ok_or_else(|| anyhow!("missing transaction"))?;
2728 this.update(&mut cx, |this, cx| {
2729 this.deserialize_project_transaction(response, push_to_history, cx)
2730 })
2731 .await
2732 })
2733 } else {
2734 Task::ready(Err(anyhow!("project does not have a remote id")))
2735 }
2736 }
2737
2738 async fn deserialize_workspace_edit(
2739 this: ModelHandle<Self>,
2740 edit: lsp::WorkspaceEdit,
2741 push_to_history: bool,
2742 language_name: Arc<str>,
2743 language_server: Arc<LanguageServer>,
2744 cx: &mut AsyncAppContext,
2745 ) -> Result<ProjectTransaction> {
2746 let fs = this.read_with(cx, |this, _| this.fs.clone());
2747 let mut operations = Vec::new();
2748 if let Some(document_changes) = edit.document_changes {
2749 match document_changes {
2750 lsp::DocumentChanges::Edits(edits) => {
2751 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2752 }
2753 lsp::DocumentChanges::Operations(ops) => operations = ops,
2754 }
2755 } else if let Some(changes) = edit.changes {
2756 operations.extend(changes.into_iter().map(|(uri, edits)| {
2757 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2758 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2759 uri,
2760 version: None,
2761 },
2762 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2763 })
2764 }));
2765 }
2766
2767 let mut project_transaction = ProjectTransaction::default();
2768 for operation in operations {
2769 match operation {
2770 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2771 let abs_path = op
2772 .uri
2773 .to_file_path()
2774 .map_err(|_| anyhow!("can't convert URI to path"))?;
2775
2776 if let Some(parent_path) = abs_path.parent() {
2777 fs.create_dir(parent_path).await?;
2778 }
2779 if abs_path.ends_with("/") {
2780 fs.create_dir(&abs_path).await?;
2781 } else {
2782 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2783 .await?;
2784 }
2785 }
2786 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2787 let source_abs_path = op
2788 .old_uri
2789 .to_file_path()
2790 .map_err(|_| anyhow!("can't convert URI to path"))?;
2791 let target_abs_path = op
2792 .new_uri
2793 .to_file_path()
2794 .map_err(|_| anyhow!("can't convert URI to path"))?;
2795 fs.rename(
2796 &source_abs_path,
2797 &target_abs_path,
2798 op.options.map(Into::into).unwrap_or_default(),
2799 )
2800 .await?;
2801 }
2802 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2803 let abs_path = op
2804 .uri
2805 .to_file_path()
2806 .map_err(|_| anyhow!("can't convert URI to path"))?;
2807 let options = op.options.map(Into::into).unwrap_or_default();
2808 if abs_path.ends_with("/") {
2809 fs.remove_dir(&abs_path, options).await?;
2810 } else {
2811 fs.remove_file(&abs_path, options).await?;
2812 }
2813 }
2814 lsp::DocumentChangeOperation::Edit(op) => {
2815 let buffer_to_edit = this
2816 .update(cx, |this, cx| {
2817 this.open_local_buffer_via_lsp(
2818 op.text_document.uri,
2819 language_name.clone(),
2820 language_server.clone(),
2821 cx,
2822 )
2823 })
2824 .await?;
2825
2826 let edits = this
2827 .update(cx, |this, cx| {
2828 let edits = op.edits.into_iter().map(|edit| match edit {
2829 lsp::OneOf::Left(edit) => edit,
2830 lsp::OneOf::Right(edit) => edit.text_edit,
2831 });
2832 this.edits_from_lsp(
2833 &buffer_to_edit,
2834 edits,
2835 op.text_document.version,
2836 cx,
2837 )
2838 })
2839 .await?;
2840
2841 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2842 buffer.finalize_last_transaction();
2843 buffer.start_transaction();
2844 for (range, text) in edits {
2845 buffer.edit([range], text, cx);
2846 }
2847 let transaction = if buffer.end_transaction(cx).is_some() {
2848 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2849 if !push_to_history {
2850 buffer.forget_transaction(transaction.id);
2851 }
2852 Some(transaction)
2853 } else {
2854 None
2855 };
2856
2857 transaction
2858 });
2859 if let Some(transaction) = transaction {
2860 project_transaction.0.insert(buffer_to_edit, transaction);
2861 }
2862 }
2863 }
2864 }
2865
2866 Ok(project_transaction)
2867 }
2868
2869 pub fn prepare_rename<T: ToPointUtf16>(
2870 &self,
2871 buffer: ModelHandle<Buffer>,
2872 position: T,
2873 cx: &mut ModelContext<Self>,
2874 ) -> Task<Result<Option<Range<Anchor>>>> {
2875 let position = position.to_point_utf16(buffer.read(cx));
2876 self.request_lsp(buffer, PrepareRename { position }, cx)
2877 }
2878
2879 pub fn perform_rename<T: ToPointUtf16>(
2880 &self,
2881 buffer: ModelHandle<Buffer>,
2882 position: T,
2883 new_name: String,
2884 push_to_history: bool,
2885 cx: &mut ModelContext<Self>,
2886 ) -> Task<Result<ProjectTransaction>> {
2887 let position = position.to_point_utf16(buffer.read(cx));
2888 self.request_lsp(
2889 buffer,
2890 PerformRename {
2891 position,
2892 new_name,
2893 push_to_history,
2894 },
2895 cx,
2896 )
2897 }
2898
2899 pub fn search(
2900 &self,
2901 query: SearchQuery,
2902 cx: &mut ModelContext<Self>,
2903 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2904 if self.is_local() {
2905 let snapshots = self
2906 .visible_worktrees(cx)
2907 .filter_map(|tree| {
2908 let tree = tree.read(cx).as_local()?;
2909 Some(tree.snapshot())
2910 })
2911 .collect::<Vec<_>>();
2912
2913 let background = cx.background().clone();
2914 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2915 if path_count == 0 {
2916 return Task::ready(Ok(Default::default()));
2917 }
2918 let workers = background.num_cpus().min(path_count);
2919 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2920 cx.background()
2921 .spawn({
2922 let fs = self.fs.clone();
2923 let background = cx.background().clone();
2924 let query = query.clone();
2925 async move {
2926 let fs = &fs;
2927 let query = &query;
2928 let matching_paths_tx = &matching_paths_tx;
2929 let paths_per_worker = (path_count + workers - 1) / workers;
2930 let snapshots = &snapshots;
2931 background
2932 .scoped(|scope| {
2933 for worker_ix in 0..workers {
2934 let worker_start_ix = worker_ix * paths_per_worker;
2935 let worker_end_ix = worker_start_ix + paths_per_worker;
2936 scope.spawn(async move {
2937 let mut snapshot_start_ix = 0;
2938 let mut abs_path = PathBuf::new();
2939 for snapshot in snapshots {
2940 let snapshot_end_ix =
2941 snapshot_start_ix + snapshot.visible_file_count();
2942 if worker_end_ix <= snapshot_start_ix {
2943 break;
2944 } else if worker_start_ix > snapshot_end_ix {
2945 snapshot_start_ix = snapshot_end_ix;
2946 continue;
2947 } else {
2948 let start_in_snapshot = worker_start_ix
2949 .saturating_sub(snapshot_start_ix);
2950 let end_in_snapshot =
2951 cmp::min(worker_end_ix, snapshot_end_ix)
2952 - snapshot_start_ix;
2953
2954 for entry in snapshot
2955 .files(false, start_in_snapshot)
2956 .take(end_in_snapshot - start_in_snapshot)
2957 {
2958 if matching_paths_tx.is_closed() {
2959 break;
2960 }
2961
2962 abs_path.clear();
2963 abs_path.push(&snapshot.abs_path());
2964 abs_path.push(&entry.path);
2965 let matches = if let Some(file) =
2966 fs.open_sync(&abs_path).await.log_err()
2967 {
2968 query.detect(file).unwrap_or(false)
2969 } else {
2970 false
2971 };
2972
2973 if matches {
2974 let project_path =
2975 (snapshot.id(), entry.path.clone());
2976 if matching_paths_tx
2977 .send(project_path)
2978 .await
2979 .is_err()
2980 {
2981 break;
2982 }
2983 }
2984 }
2985
2986 snapshot_start_ix = snapshot_end_ix;
2987 }
2988 }
2989 });
2990 }
2991 })
2992 .await;
2993 }
2994 })
2995 .detach();
2996
2997 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2998 let open_buffers = self
2999 .opened_buffers
3000 .values()
3001 .filter_map(|b| b.upgrade(cx))
3002 .collect::<HashSet<_>>();
3003 cx.spawn(|this, cx| async move {
3004 for buffer in &open_buffers {
3005 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3006 buffers_tx.send((buffer.clone(), snapshot)).await?;
3007 }
3008
3009 let open_buffers = Rc::new(RefCell::new(open_buffers));
3010 while let Some(project_path) = matching_paths_rx.next().await {
3011 if buffers_tx.is_closed() {
3012 break;
3013 }
3014
3015 let this = this.clone();
3016 let open_buffers = open_buffers.clone();
3017 let buffers_tx = buffers_tx.clone();
3018 cx.spawn(|mut cx| async move {
3019 if let Some(buffer) = this
3020 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3021 .await
3022 .log_err()
3023 {
3024 if open_buffers.borrow_mut().insert(buffer.clone()) {
3025 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3026 buffers_tx.send((buffer, snapshot)).await?;
3027 }
3028 }
3029
3030 Ok::<_, anyhow::Error>(())
3031 })
3032 .detach();
3033 }
3034
3035 Ok::<_, anyhow::Error>(())
3036 })
3037 .detach_and_log_err(cx);
3038
3039 let background = cx.background().clone();
3040 cx.background().spawn(async move {
3041 let query = &query;
3042 let mut matched_buffers = Vec::new();
3043 for _ in 0..workers {
3044 matched_buffers.push(HashMap::default());
3045 }
3046 background
3047 .scoped(|scope| {
3048 for worker_matched_buffers in matched_buffers.iter_mut() {
3049 let mut buffers_rx = buffers_rx.clone();
3050 scope.spawn(async move {
3051 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3052 let buffer_matches = query
3053 .search(snapshot.as_rope())
3054 .await
3055 .iter()
3056 .map(|range| {
3057 snapshot.anchor_before(range.start)
3058 ..snapshot.anchor_after(range.end)
3059 })
3060 .collect::<Vec<_>>();
3061 if !buffer_matches.is_empty() {
3062 worker_matched_buffers
3063 .insert(buffer.clone(), buffer_matches);
3064 }
3065 }
3066 });
3067 }
3068 })
3069 .await;
3070 Ok(matched_buffers.into_iter().flatten().collect())
3071 })
3072 } else if let Some(project_id) = self.remote_id() {
3073 let request = self.client.request(query.to_proto(project_id));
3074 cx.spawn(|this, mut cx| async move {
3075 let response = request.await?;
3076 let mut result = HashMap::default();
3077 for location in response.locations {
3078 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3079 let target_buffer = this
3080 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3081 .await?;
3082 let start = location
3083 .start
3084 .and_then(deserialize_anchor)
3085 .ok_or_else(|| anyhow!("missing target start"))?;
3086 let end = location
3087 .end
3088 .and_then(deserialize_anchor)
3089 .ok_or_else(|| anyhow!("missing target end"))?;
3090 result
3091 .entry(target_buffer)
3092 .or_insert(Vec::new())
3093 .push(start..end)
3094 }
3095 Ok(result)
3096 })
3097 } else {
3098 Task::ready(Ok(Default::default()))
3099 }
3100 }
3101
3102 fn request_lsp<R: LspCommand>(
3103 &self,
3104 buffer_handle: ModelHandle<Buffer>,
3105 request: R,
3106 cx: &mut ModelContext<Self>,
3107 ) -> Task<Result<R::Response>>
3108 where
3109 <R::LspRequest as lsp::request::Request>::Result: Send,
3110 {
3111 let buffer = buffer_handle.read(cx);
3112 if self.is_local() {
3113 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3114 if let Some((file, language_server)) =
3115 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3116 {
3117 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3118 return cx.spawn(|this, cx| async move {
3119 if !request.check_capabilities(&language_server.capabilities()) {
3120 return Ok(Default::default());
3121 }
3122
3123 let response = language_server
3124 .request::<R::LspRequest>(lsp_params)
3125 .await
3126 .context("lsp request failed")?;
3127 request
3128 .response_from_lsp(response, this, buffer_handle, cx)
3129 .await
3130 });
3131 }
3132 } else if let Some(project_id) = self.remote_id() {
3133 let rpc = self.client.clone();
3134 let message = request.to_proto(project_id, buffer);
3135 return cx.spawn(|this, cx| async move {
3136 let response = rpc.request(message).await?;
3137 request
3138 .response_from_proto(response, this, buffer_handle, cx)
3139 .await
3140 });
3141 }
3142 Task::ready(Ok(Default::default()))
3143 }
3144
3145 pub fn find_or_create_local_worktree(
3146 &mut self,
3147 abs_path: impl AsRef<Path>,
3148 visible: bool,
3149 cx: &mut ModelContext<Self>,
3150 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3151 let abs_path = abs_path.as_ref();
3152 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3153 Task::ready(Ok((tree.clone(), relative_path.into())))
3154 } else {
3155 let worktree = self.create_local_worktree(abs_path, visible, cx);
3156 cx.foreground()
3157 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3158 }
3159 }
3160
3161 pub fn find_local_worktree(
3162 &self,
3163 abs_path: &Path,
3164 cx: &AppContext,
3165 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3166 for tree in self.worktrees(cx) {
3167 if let Some(relative_path) = tree
3168 .read(cx)
3169 .as_local()
3170 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3171 {
3172 return Some((tree.clone(), relative_path.into()));
3173 }
3174 }
3175 None
3176 }
3177
3178 pub fn is_shared(&self) -> bool {
3179 match &self.client_state {
3180 ProjectClientState::Local { is_shared, .. } => *is_shared,
3181 ProjectClientState::Remote { .. } => false,
3182 }
3183 }
3184
3185 fn create_local_worktree(
3186 &mut self,
3187 abs_path: impl AsRef<Path>,
3188 visible: bool,
3189 cx: &mut ModelContext<Self>,
3190 ) -> Task<Result<ModelHandle<Worktree>>> {
3191 let fs = self.fs.clone();
3192 let client = self.client.clone();
3193 let next_entry_id = self.next_entry_id.clone();
3194 let path: Arc<Path> = abs_path.as_ref().into();
3195 let task = self
3196 .loading_local_worktrees
3197 .entry(path.clone())
3198 .or_insert_with(|| {
3199 cx.spawn(|project, mut cx| {
3200 async move {
3201 let worktree = Worktree::local(
3202 client.clone(),
3203 path.clone(),
3204 visible,
3205 fs,
3206 next_entry_id,
3207 &mut cx,
3208 )
3209 .await;
3210 project.update(&mut cx, |project, _| {
3211 project.loading_local_worktrees.remove(&path);
3212 });
3213 let worktree = worktree?;
3214
3215 let (remote_project_id, is_shared) =
3216 project.update(&mut cx, |project, cx| {
3217 project.add_worktree(&worktree, cx);
3218 (project.remote_id(), project.is_shared())
3219 });
3220
3221 if let Some(project_id) = remote_project_id {
3222 if is_shared {
3223 worktree
3224 .update(&mut cx, |worktree, cx| {
3225 worktree.as_local_mut().unwrap().share(project_id, cx)
3226 })
3227 .await?;
3228 } else {
3229 worktree
3230 .update(&mut cx, |worktree, cx| {
3231 worktree.as_local_mut().unwrap().register(project_id, cx)
3232 })
3233 .await?;
3234 }
3235 }
3236
3237 Ok(worktree)
3238 }
3239 .map_err(|err| Arc::new(err))
3240 })
3241 .shared()
3242 })
3243 .clone();
3244 cx.foreground().spawn(async move {
3245 match task.await {
3246 Ok(worktree) => Ok(worktree),
3247 Err(err) => Err(anyhow!("{}", err)),
3248 }
3249 })
3250 }
3251
3252 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3253 self.worktrees.retain(|worktree| {
3254 worktree
3255 .upgrade(cx)
3256 .map_or(false, |w| w.read(cx).id() != id)
3257 });
3258 cx.notify();
3259 }
3260
3261 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3262 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3263 if worktree.read(cx).is_local() {
3264 cx.subscribe(&worktree, |this, worktree, _, cx| {
3265 this.update_local_worktree_buffers(worktree, cx);
3266 })
3267 .detach();
3268 }
3269
3270 let push_strong_handle = {
3271 let worktree = worktree.read(cx);
3272 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3273 };
3274 if push_strong_handle {
3275 self.worktrees
3276 .push(WorktreeHandle::Strong(worktree.clone()));
3277 } else {
3278 cx.observe_release(&worktree, |this, _, cx| {
3279 this.worktrees
3280 .retain(|worktree| worktree.upgrade(cx).is_some());
3281 cx.notify();
3282 })
3283 .detach();
3284 self.worktrees
3285 .push(WorktreeHandle::Weak(worktree.downgrade()));
3286 }
3287 cx.notify();
3288 }
3289
3290 fn update_local_worktree_buffers(
3291 &mut self,
3292 worktree_handle: ModelHandle<Worktree>,
3293 cx: &mut ModelContext<Self>,
3294 ) {
3295 let snapshot = worktree_handle.read(cx).snapshot();
3296 let mut buffers_to_delete = Vec::new();
3297 for (buffer_id, buffer) in &self.opened_buffers {
3298 if let Some(buffer) = buffer.upgrade(cx) {
3299 buffer.update(cx, |buffer, cx| {
3300 if let Some(old_file) = File::from_dyn(buffer.file()) {
3301 if old_file.worktree != worktree_handle {
3302 return;
3303 }
3304
3305 let new_file = if let Some(entry) = old_file
3306 .entry_id
3307 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3308 {
3309 File {
3310 is_local: true,
3311 entry_id: Some(entry.id),
3312 mtime: entry.mtime,
3313 path: entry.path.clone(),
3314 worktree: worktree_handle.clone(),
3315 }
3316 } else if let Some(entry) =
3317 snapshot.entry_for_path(old_file.path().as_ref())
3318 {
3319 File {
3320 is_local: true,
3321 entry_id: Some(entry.id),
3322 mtime: entry.mtime,
3323 path: entry.path.clone(),
3324 worktree: worktree_handle.clone(),
3325 }
3326 } else {
3327 File {
3328 is_local: true,
3329 entry_id: None,
3330 path: old_file.path().clone(),
3331 mtime: old_file.mtime(),
3332 worktree: worktree_handle.clone(),
3333 }
3334 };
3335
3336 if let Some(project_id) = self.remote_id() {
3337 self.client
3338 .send(proto::UpdateBufferFile {
3339 project_id,
3340 buffer_id: *buffer_id as u64,
3341 file: Some(new_file.to_proto()),
3342 })
3343 .log_err();
3344 }
3345 buffer.file_updated(Box::new(new_file), cx).detach();
3346 }
3347 });
3348 } else {
3349 buffers_to_delete.push(*buffer_id);
3350 }
3351 }
3352
3353 for buffer_id in buffers_to_delete {
3354 self.opened_buffers.remove(&buffer_id);
3355 }
3356 }
3357
3358 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3359 let new_active_entry = entry.and_then(|project_path| {
3360 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3361 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3362 Some(entry.id)
3363 });
3364 if new_active_entry != self.active_entry {
3365 self.active_entry = new_active_entry;
3366 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3367 }
3368 }
3369
3370 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3371 self.language_servers_with_diagnostics_running > 0
3372 }
3373
3374 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3375 let mut summary = DiagnosticSummary::default();
3376 for (_, path_summary) in self.diagnostic_summaries(cx) {
3377 summary.error_count += path_summary.error_count;
3378 summary.warning_count += path_summary.warning_count;
3379 summary.info_count += path_summary.info_count;
3380 summary.hint_count += path_summary.hint_count;
3381 }
3382 summary
3383 }
3384
3385 pub fn diagnostic_summaries<'a>(
3386 &'a self,
3387 cx: &'a AppContext,
3388 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3389 self.worktrees(cx).flat_map(move |worktree| {
3390 let worktree = worktree.read(cx);
3391 let worktree_id = worktree.id();
3392 worktree
3393 .diagnostic_summaries()
3394 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3395 })
3396 }
3397
3398 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3399 self.language_servers_with_diagnostics_running += 1;
3400 if self.language_servers_with_diagnostics_running == 1 {
3401 cx.emit(Event::DiskBasedDiagnosticsStarted);
3402 }
3403 }
3404
3405 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3406 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3407 self.language_servers_with_diagnostics_running -= 1;
3408 if self.language_servers_with_diagnostics_running == 0 {
3409 cx.emit(Event::DiskBasedDiagnosticsFinished);
3410 }
3411 }
3412
3413 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3414 self.active_entry
3415 }
3416
3417 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3418 self.worktree_for_id(path.worktree_id, cx)?
3419 .read(cx)
3420 .entry_for_path(&path.path)
3421 .map(|entry| entry.id)
3422 }
3423
3424 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3425 let worktree = self.worktree_for_entry(entry_id, cx)?;
3426 let worktree = worktree.read(cx);
3427 let worktree_id = worktree.id();
3428 let path = worktree.entry_for_id(entry_id)?.path.clone();
3429 Some(ProjectPath { worktree_id, path })
3430 }
3431
3432 // RPC message handlers
3433
3434 async fn handle_unshare_project(
3435 this: ModelHandle<Self>,
3436 _: TypedEnvelope<proto::UnshareProject>,
3437 _: Arc<Client>,
3438 mut cx: AsyncAppContext,
3439 ) -> Result<()> {
3440 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3441 Ok(())
3442 }
3443
3444 async fn handle_add_collaborator(
3445 this: ModelHandle<Self>,
3446 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3447 _: Arc<Client>,
3448 mut cx: AsyncAppContext,
3449 ) -> Result<()> {
3450 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3451 let collaborator = envelope
3452 .payload
3453 .collaborator
3454 .take()
3455 .ok_or_else(|| anyhow!("empty collaborator"))?;
3456
3457 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3458 this.update(&mut cx, |this, cx| {
3459 this.collaborators
3460 .insert(collaborator.peer_id, collaborator);
3461 cx.notify();
3462 });
3463
3464 Ok(())
3465 }
3466
3467 async fn handle_remove_collaborator(
3468 this: ModelHandle<Self>,
3469 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3470 _: Arc<Client>,
3471 mut cx: AsyncAppContext,
3472 ) -> Result<()> {
3473 this.update(&mut cx, |this, cx| {
3474 let peer_id = PeerId(envelope.payload.peer_id);
3475 let replica_id = this
3476 .collaborators
3477 .remove(&peer_id)
3478 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3479 .replica_id;
3480 for (_, buffer) in &this.opened_buffers {
3481 if let Some(buffer) = buffer.upgrade(cx) {
3482 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3483 }
3484 }
3485 cx.emit(Event::CollaboratorLeft(peer_id));
3486 cx.notify();
3487 Ok(())
3488 })
3489 }
3490
3491 async fn handle_register_worktree(
3492 this: ModelHandle<Self>,
3493 envelope: TypedEnvelope<proto::RegisterWorktree>,
3494 client: Arc<Client>,
3495 mut cx: AsyncAppContext,
3496 ) -> Result<()> {
3497 this.update(&mut cx, |this, cx| {
3498 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3499 let replica_id = this.replica_id();
3500 let worktree = proto::Worktree {
3501 id: envelope.payload.worktree_id,
3502 root_name: envelope.payload.root_name,
3503 entries: Default::default(),
3504 diagnostic_summaries: Default::default(),
3505 visible: envelope.payload.visible,
3506 };
3507 let (worktree, load_task) =
3508 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3509 this.add_worktree(&worktree, cx);
3510 load_task.detach();
3511 Ok(())
3512 })
3513 }
3514
3515 async fn handle_unregister_worktree(
3516 this: ModelHandle<Self>,
3517 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3518 _: Arc<Client>,
3519 mut cx: AsyncAppContext,
3520 ) -> Result<()> {
3521 this.update(&mut cx, |this, cx| {
3522 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3523 this.remove_worktree(worktree_id, cx);
3524 Ok(())
3525 })
3526 }
3527
3528 async fn handle_update_worktree(
3529 this: ModelHandle<Self>,
3530 envelope: TypedEnvelope<proto::UpdateWorktree>,
3531 _: Arc<Client>,
3532 mut cx: AsyncAppContext,
3533 ) -> Result<()> {
3534 this.update(&mut cx, |this, cx| {
3535 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3536 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3537 worktree.update(cx, |worktree, _| {
3538 let worktree = worktree.as_remote_mut().unwrap();
3539 worktree.update_from_remote(envelope)
3540 })?;
3541 }
3542 Ok(())
3543 })
3544 }
3545
3546 async fn handle_update_diagnostic_summary(
3547 this: ModelHandle<Self>,
3548 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3549 _: Arc<Client>,
3550 mut cx: AsyncAppContext,
3551 ) -> Result<()> {
3552 this.update(&mut cx, |this, cx| {
3553 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3554 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3555 if let Some(summary) = envelope.payload.summary {
3556 let project_path = ProjectPath {
3557 worktree_id,
3558 path: Path::new(&summary.path).into(),
3559 };
3560 worktree.update(cx, |worktree, _| {
3561 worktree
3562 .as_remote_mut()
3563 .unwrap()
3564 .update_diagnostic_summary(project_path.path.clone(), &summary);
3565 });
3566 cx.emit(Event::DiagnosticsUpdated(project_path));
3567 }
3568 }
3569 Ok(())
3570 })
3571 }
3572
3573 async fn handle_start_language_server(
3574 this: ModelHandle<Self>,
3575 envelope: TypedEnvelope<proto::StartLanguageServer>,
3576 _: Arc<Client>,
3577 mut cx: AsyncAppContext,
3578 ) -> Result<()> {
3579 let server = envelope
3580 .payload
3581 .server
3582 .ok_or_else(|| anyhow!("invalid server"))?;
3583 this.update(&mut cx, |this, cx| {
3584 this.language_server_statuses.insert(
3585 server.id as usize,
3586 LanguageServerStatus {
3587 name: server.name,
3588 pending_work: Default::default(),
3589 pending_diagnostic_updates: 0,
3590 },
3591 );
3592 cx.notify();
3593 });
3594 Ok(())
3595 }
3596
3597 async fn handle_update_language_server(
3598 this: ModelHandle<Self>,
3599 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3600 _: Arc<Client>,
3601 mut cx: AsyncAppContext,
3602 ) -> Result<()> {
3603 let language_server_id = envelope.payload.language_server_id as usize;
3604 match envelope
3605 .payload
3606 .variant
3607 .ok_or_else(|| anyhow!("invalid variant"))?
3608 {
3609 proto::update_language_server::Variant::WorkStart(payload) => {
3610 this.update(&mut cx, |this, cx| {
3611 this.on_lsp_work_start(language_server_id, payload.token, cx);
3612 })
3613 }
3614 proto::update_language_server::Variant::WorkProgress(payload) => {
3615 this.update(&mut cx, |this, cx| {
3616 this.on_lsp_work_progress(
3617 language_server_id,
3618 payload.token,
3619 LanguageServerProgress {
3620 message: payload.message,
3621 percentage: payload.percentage.map(|p| p as usize),
3622 last_update_at: Instant::now(),
3623 },
3624 cx,
3625 );
3626 })
3627 }
3628 proto::update_language_server::Variant::WorkEnd(payload) => {
3629 this.update(&mut cx, |this, cx| {
3630 this.on_lsp_work_end(language_server_id, payload.token, cx);
3631 })
3632 }
3633 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3634 this.update(&mut cx, |this, cx| {
3635 this.disk_based_diagnostics_started(cx);
3636 })
3637 }
3638 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3639 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3640 }
3641 }
3642
3643 Ok(())
3644 }
3645
3646 async fn handle_update_buffer(
3647 this: ModelHandle<Self>,
3648 envelope: TypedEnvelope<proto::UpdateBuffer>,
3649 _: Arc<Client>,
3650 mut cx: AsyncAppContext,
3651 ) -> Result<()> {
3652 this.update(&mut cx, |this, cx| {
3653 let payload = envelope.payload.clone();
3654 let buffer_id = payload.buffer_id;
3655 let ops = payload
3656 .operations
3657 .into_iter()
3658 .map(|op| language::proto::deserialize_operation(op))
3659 .collect::<Result<Vec<_>, _>>()?;
3660 match this.opened_buffers.entry(buffer_id) {
3661 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3662 OpenBuffer::Strong(buffer) => {
3663 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3664 }
3665 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3666 OpenBuffer::Weak(_) => {}
3667 },
3668 hash_map::Entry::Vacant(e) => {
3669 e.insert(OpenBuffer::Loading(ops));
3670 }
3671 }
3672 Ok(())
3673 })
3674 }
3675
3676 async fn handle_update_buffer_file(
3677 this: ModelHandle<Self>,
3678 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3679 _: Arc<Client>,
3680 mut cx: AsyncAppContext,
3681 ) -> Result<()> {
3682 this.update(&mut cx, |this, cx| {
3683 let payload = envelope.payload.clone();
3684 let buffer_id = payload.buffer_id;
3685 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3686 let worktree = this
3687 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3688 .ok_or_else(|| anyhow!("no such worktree"))?;
3689 let file = File::from_proto(file, worktree.clone(), cx)?;
3690 let buffer = this
3691 .opened_buffers
3692 .get_mut(&buffer_id)
3693 .and_then(|b| b.upgrade(cx))
3694 .ok_or_else(|| anyhow!("no such buffer"))?;
3695 buffer.update(cx, |buffer, cx| {
3696 buffer.file_updated(Box::new(file), cx).detach();
3697 });
3698 Ok(())
3699 })
3700 }
3701
3702 async fn handle_save_buffer(
3703 this: ModelHandle<Self>,
3704 envelope: TypedEnvelope<proto::SaveBuffer>,
3705 _: Arc<Client>,
3706 mut cx: AsyncAppContext,
3707 ) -> Result<proto::BufferSaved> {
3708 let buffer_id = envelope.payload.buffer_id;
3709 let requested_version = deserialize_version(envelope.payload.version);
3710
3711 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3712 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3713 let buffer = this
3714 .opened_buffers
3715 .get(&buffer_id)
3716 .map(|buffer| buffer.upgrade(cx).unwrap())
3717 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3718 Ok::<_, anyhow::Error>((project_id, buffer))
3719 })?;
3720 buffer
3721 .update(&mut cx, |buffer, _| {
3722 buffer.wait_for_version(requested_version)
3723 })
3724 .await;
3725
3726 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3727 Ok(proto::BufferSaved {
3728 project_id,
3729 buffer_id,
3730 version: serialize_version(&saved_version),
3731 mtime: Some(mtime.into()),
3732 })
3733 }
3734
3735 async fn handle_reload_buffers(
3736 this: ModelHandle<Self>,
3737 envelope: TypedEnvelope<proto::ReloadBuffers>,
3738 _: Arc<Client>,
3739 mut cx: AsyncAppContext,
3740 ) -> Result<proto::ReloadBuffersResponse> {
3741 let sender_id = envelope.original_sender_id()?;
3742 let reload = this.update(&mut cx, |this, cx| {
3743 let mut buffers = HashSet::default();
3744 for buffer_id in &envelope.payload.buffer_ids {
3745 buffers.insert(
3746 this.opened_buffers
3747 .get(buffer_id)
3748 .map(|buffer| buffer.upgrade(cx).unwrap())
3749 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3750 );
3751 }
3752 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3753 })?;
3754
3755 let project_transaction = reload.await?;
3756 let project_transaction = this.update(&mut cx, |this, cx| {
3757 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3758 });
3759 Ok(proto::ReloadBuffersResponse {
3760 transaction: Some(project_transaction),
3761 })
3762 }
3763
3764 async fn handle_format_buffers(
3765 this: ModelHandle<Self>,
3766 envelope: TypedEnvelope<proto::FormatBuffers>,
3767 _: Arc<Client>,
3768 mut cx: AsyncAppContext,
3769 ) -> Result<proto::FormatBuffersResponse> {
3770 let sender_id = envelope.original_sender_id()?;
3771 let format = this.update(&mut cx, |this, cx| {
3772 let mut buffers = HashSet::default();
3773 for buffer_id in &envelope.payload.buffer_ids {
3774 buffers.insert(
3775 this.opened_buffers
3776 .get(buffer_id)
3777 .map(|buffer| buffer.upgrade(cx).unwrap())
3778 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3779 );
3780 }
3781 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3782 })?;
3783
3784 let project_transaction = format.await?;
3785 let project_transaction = this.update(&mut cx, |this, cx| {
3786 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3787 });
3788 Ok(proto::FormatBuffersResponse {
3789 transaction: Some(project_transaction),
3790 })
3791 }
3792
3793 async fn handle_get_completions(
3794 this: ModelHandle<Self>,
3795 envelope: TypedEnvelope<proto::GetCompletions>,
3796 _: Arc<Client>,
3797 mut cx: AsyncAppContext,
3798 ) -> Result<proto::GetCompletionsResponse> {
3799 let position = envelope
3800 .payload
3801 .position
3802 .and_then(language::proto::deserialize_anchor)
3803 .ok_or_else(|| anyhow!("invalid position"))?;
3804 let version = deserialize_version(envelope.payload.version);
3805 let buffer = this.read_with(&cx, |this, cx| {
3806 this.opened_buffers
3807 .get(&envelope.payload.buffer_id)
3808 .map(|buffer| buffer.upgrade(cx).unwrap())
3809 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3810 })?;
3811 buffer
3812 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3813 .await;
3814 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3815 let completions = this
3816 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3817 .await?;
3818
3819 Ok(proto::GetCompletionsResponse {
3820 completions: completions
3821 .iter()
3822 .map(language::proto::serialize_completion)
3823 .collect(),
3824 version: serialize_version(&version),
3825 })
3826 }
3827
3828 async fn handle_apply_additional_edits_for_completion(
3829 this: ModelHandle<Self>,
3830 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3831 _: Arc<Client>,
3832 mut cx: AsyncAppContext,
3833 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3834 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3835 let buffer = this
3836 .opened_buffers
3837 .get(&envelope.payload.buffer_id)
3838 .map(|buffer| buffer.upgrade(cx).unwrap())
3839 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3840 let language = buffer.read(cx).language();
3841 let completion = language::proto::deserialize_completion(
3842 envelope
3843 .payload
3844 .completion
3845 .ok_or_else(|| anyhow!("invalid completion"))?,
3846 language,
3847 )?;
3848 Ok::<_, anyhow::Error>(
3849 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3850 )
3851 })?;
3852
3853 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3854 transaction: apply_additional_edits
3855 .await?
3856 .as_ref()
3857 .map(language::proto::serialize_transaction),
3858 })
3859 }
3860
3861 async fn handle_get_code_actions(
3862 this: ModelHandle<Self>,
3863 envelope: TypedEnvelope<proto::GetCodeActions>,
3864 _: Arc<Client>,
3865 mut cx: AsyncAppContext,
3866 ) -> Result<proto::GetCodeActionsResponse> {
3867 let start = envelope
3868 .payload
3869 .start
3870 .and_then(language::proto::deserialize_anchor)
3871 .ok_or_else(|| anyhow!("invalid start"))?;
3872 let end = envelope
3873 .payload
3874 .end
3875 .and_then(language::proto::deserialize_anchor)
3876 .ok_or_else(|| anyhow!("invalid end"))?;
3877 let buffer = this.update(&mut cx, |this, cx| {
3878 this.opened_buffers
3879 .get(&envelope.payload.buffer_id)
3880 .map(|buffer| buffer.upgrade(cx).unwrap())
3881 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3882 })?;
3883 buffer
3884 .update(&mut cx, |buffer, _| {
3885 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3886 })
3887 .await;
3888
3889 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3890 let code_actions = this.update(&mut cx, |this, cx| {
3891 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3892 })?;
3893
3894 Ok(proto::GetCodeActionsResponse {
3895 actions: code_actions
3896 .await?
3897 .iter()
3898 .map(language::proto::serialize_code_action)
3899 .collect(),
3900 version: serialize_version(&version),
3901 })
3902 }
3903
3904 async fn handle_apply_code_action(
3905 this: ModelHandle<Self>,
3906 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3907 _: Arc<Client>,
3908 mut cx: AsyncAppContext,
3909 ) -> Result<proto::ApplyCodeActionResponse> {
3910 let sender_id = envelope.original_sender_id()?;
3911 let action = language::proto::deserialize_code_action(
3912 envelope
3913 .payload
3914 .action
3915 .ok_or_else(|| anyhow!("invalid action"))?,
3916 )?;
3917 let apply_code_action = this.update(&mut cx, |this, cx| {
3918 let buffer = this
3919 .opened_buffers
3920 .get(&envelope.payload.buffer_id)
3921 .map(|buffer| buffer.upgrade(cx).unwrap())
3922 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3923 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3924 })?;
3925
3926 let project_transaction = apply_code_action.await?;
3927 let project_transaction = this.update(&mut cx, |this, cx| {
3928 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3929 });
3930 Ok(proto::ApplyCodeActionResponse {
3931 transaction: Some(project_transaction),
3932 })
3933 }
3934
3935 async fn handle_lsp_command<T: LspCommand>(
3936 this: ModelHandle<Self>,
3937 envelope: TypedEnvelope<T::ProtoRequest>,
3938 _: Arc<Client>,
3939 mut cx: AsyncAppContext,
3940 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3941 where
3942 <T::LspRequest as lsp::request::Request>::Result: Send,
3943 {
3944 let sender_id = envelope.original_sender_id()?;
3945 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3946 let buffer_handle = this.read_with(&cx, |this, _| {
3947 this.opened_buffers
3948 .get(&buffer_id)
3949 .and_then(|buffer| buffer.upgrade(&cx))
3950 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3951 })?;
3952 let request = T::from_proto(
3953 envelope.payload,
3954 this.clone(),
3955 buffer_handle.clone(),
3956 cx.clone(),
3957 )
3958 .await?;
3959 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3960 let response = this
3961 .update(&mut cx, |this, cx| {
3962 this.request_lsp(buffer_handle, request, cx)
3963 })
3964 .await?;
3965 this.update(&mut cx, |this, cx| {
3966 Ok(T::response_to_proto(
3967 response,
3968 this,
3969 sender_id,
3970 &buffer_version,
3971 cx,
3972 ))
3973 })
3974 }
3975
3976 async fn handle_get_project_symbols(
3977 this: ModelHandle<Self>,
3978 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3979 _: Arc<Client>,
3980 mut cx: AsyncAppContext,
3981 ) -> Result<proto::GetProjectSymbolsResponse> {
3982 let symbols = this
3983 .update(&mut cx, |this, cx| {
3984 this.symbols(&envelope.payload.query, cx)
3985 })
3986 .await?;
3987
3988 Ok(proto::GetProjectSymbolsResponse {
3989 symbols: symbols.iter().map(serialize_symbol).collect(),
3990 })
3991 }
3992
3993 async fn handle_search_project(
3994 this: ModelHandle<Self>,
3995 envelope: TypedEnvelope<proto::SearchProject>,
3996 _: Arc<Client>,
3997 mut cx: AsyncAppContext,
3998 ) -> Result<proto::SearchProjectResponse> {
3999 let peer_id = envelope.original_sender_id()?;
4000 let query = SearchQuery::from_proto(envelope.payload)?;
4001 let result = this
4002 .update(&mut cx, |this, cx| this.search(query, cx))
4003 .await?;
4004
4005 this.update(&mut cx, |this, cx| {
4006 let mut locations = Vec::new();
4007 for (buffer, ranges) in result {
4008 for range in ranges {
4009 let start = serialize_anchor(&range.start);
4010 let end = serialize_anchor(&range.end);
4011 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4012 locations.push(proto::Location {
4013 buffer: Some(buffer),
4014 start: Some(start),
4015 end: Some(end),
4016 });
4017 }
4018 }
4019 Ok(proto::SearchProjectResponse { locations })
4020 })
4021 }
4022
4023 async fn handle_open_buffer_for_symbol(
4024 this: ModelHandle<Self>,
4025 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4026 _: Arc<Client>,
4027 mut cx: AsyncAppContext,
4028 ) -> Result<proto::OpenBufferForSymbolResponse> {
4029 let peer_id = envelope.original_sender_id()?;
4030 let symbol = envelope
4031 .payload
4032 .symbol
4033 .ok_or_else(|| anyhow!("invalid symbol"))?;
4034 let symbol = this.read_with(&cx, |this, _| {
4035 let symbol = this.deserialize_symbol(symbol)?;
4036 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4037 if signature == symbol.signature {
4038 Ok(symbol)
4039 } else {
4040 Err(anyhow!("invalid symbol signature"))
4041 }
4042 })?;
4043 let buffer = this
4044 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4045 .await?;
4046
4047 Ok(proto::OpenBufferForSymbolResponse {
4048 buffer: Some(this.update(&mut cx, |this, cx| {
4049 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4050 })),
4051 })
4052 }
4053
4054 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4055 let mut hasher = Sha256::new();
4056 hasher.update(worktree_id.to_proto().to_be_bytes());
4057 hasher.update(path.to_string_lossy().as_bytes());
4058 hasher.update(self.nonce.to_be_bytes());
4059 hasher.finalize().as_slice().try_into().unwrap()
4060 }
4061
4062 async fn handle_open_buffer_by_id(
4063 this: ModelHandle<Self>,
4064 envelope: TypedEnvelope<proto::OpenBufferById>,
4065 _: Arc<Client>,
4066 mut cx: AsyncAppContext,
4067 ) -> Result<proto::OpenBufferResponse> {
4068 let peer_id = envelope.original_sender_id()?;
4069 let buffer = this
4070 .update(&mut cx, |this, cx| {
4071 this.open_buffer_by_id(envelope.payload.id, cx)
4072 })
4073 .await?;
4074 this.update(&mut cx, |this, cx| {
4075 Ok(proto::OpenBufferResponse {
4076 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4077 })
4078 })
4079 }
4080
4081 async fn handle_open_buffer_by_path(
4082 this: ModelHandle<Self>,
4083 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4084 _: Arc<Client>,
4085 mut cx: AsyncAppContext,
4086 ) -> Result<proto::OpenBufferResponse> {
4087 let peer_id = envelope.original_sender_id()?;
4088 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4089 let open_buffer = this.update(&mut cx, |this, cx| {
4090 this.open_buffer(
4091 ProjectPath {
4092 worktree_id,
4093 path: PathBuf::from(envelope.payload.path).into(),
4094 },
4095 cx,
4096 )
4097 });
4098
4099 let buffer = open_buffer.await?;
4100 this.update(&mut cx, |this, cx| {
4101 Ok(proto::OpenBufferResponse {
4102 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4103 })
4104 })
4105 }
4106
4107 fn serialize_project_transaction_for_peer(
4108 &mut self,
4109 project_transaction: ProjectTransaction,
4110 peer_id: PeerId,
4111 cx: &AppContext,
4112 ) -> proto::ProjectTransaction {
4113 let mut serialized_transaction = proto::ProjectTransaction {
4114 buffers: Default::default(),
4115 transactions: Default::default(),
4116 };
4117 for (buffer, transaction) in project_transaction.0 {
4118 serialized_transaction
4119 .buffers
4120 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4121 serialized_transaction
4122 .transactions
4123 .push(language::proto::serialize_transaction(&transaction));
4124 }
4125 serialized_transaction
4126 }
4127
4128 fn deserialize_project_transaction(
4129 &mut self,
4130 message: proto::ProjectTransaction,
4131 push_to_history: bool,
4132 cx: &mut ModelContext<Self>,
4133 ) -> Task<Result<ProjectTransaction>> {
4134 cx.spawn(|this, mut cx| async move {
4135 let mut project_transaction = ProjectTransaction::default();
4136 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4137 let buffer = this
4138 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4139 .await?;
4140 let transaction = language::proto::deserialize_transaction(transaction)?;
4141 project_transaction.0.insert(buffer, transaction);
4142 }
4143
4144 for (buffer, transaction) in &project_transaction.0 {
4145 buffer
4146 .update(&mut cx, |buffer, _| {
4147 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4148 })
4149 .await;
4150
4151 if push_to_history {
4152 buffer.update(&mut cx, |buffer, _| {
4153 buffer.push_transaction(transaction.clone(), Instant::now());
4154 });
4155 }
4156 }
4157
4158 Ok(project_transaction)
4159 })
4160 }
4161
4162 fn serialize_buffer_for_peer(
4163 &mut self,
4164 buffer: &ModelHandle<Buffer>,
4165 peer_id: PeerId,
4166 cx: &AppContext,
4167 ) -> proto::Buffer {
4168 let buffer_id = buffer.read(cx).remote_id();
4169 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4170 if shared_buffers.insert(buffer_id) {
4171 proto::Buffer {
4172 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4173 }
4174 } else {
4175 proto::Buffer {
4176 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4177 }
4178 }
4179 }
4180
4181 fn deserialize_buffer(
4182 &mut self,
4183 buffer: proto::Buffer,
4184 cx: &mut ModelContext<Self>,
4185 ) -> Task<Result<ModelHandle<Buffer>>> {
4186 let replica_id = self.replica_id();
4187
4188 let opened_buffer_tx = self.opened_buffer.0.clone();
4189 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4190 cx.spawn(|this, mut cx| async move {
4191 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4192 proto::buffer::Variant::Id(id) => {
4193 let buffer = loop {
4194 let buffer = this.read_with(&cx, |this, cx| {
4195 this.opened_buffers
4196 .get(&id)
4197 .and_then(|buffer| buffer.upgrade(cx))
4198 });
4199 if let Some(buffer) = buffer {
4200 break buffer;
4201 }
4202 opened_buffer_rx
4203 .next()
4204 .await
4205 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4206 };
4207 Ok(buffer)
4208 }
4209 proto::buffer::Variant::State(mut buffer) => {
4210 let mut buffer_worktree = None;
4211 let mut buffer_file = None;
4212 if let Some(file) = buffer.file.take() {
4213 this.read_with(&cx, |this, cx| {
4214 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4215 let worktree =
4216 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4217 anyhow!("no worktree found for id {}", file.worktree_id)
4218 })?;
4219 buffer_file =
4220 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4221 as Box<dyn language::File>);
4222 buffer_worktree = Some(worktree);
4223 Ok::<_, anyhow::Error>(())
4224 })?;
4225 }
4226
4227 let buffer = cx.add_model(|cx| {
4228 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4229 });
4230
4231 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4232
4233 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4234 Ok(buffer)
4235 }
4236 }
4237 })
4238 }
4239
4240 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4241 let language = self
4242 .languages
4243 .get_language(&serialized_symbol.language_name);
4244 let start = serialized_symbol
4245 .start
4246 .ok_or_else(|| anyhow!("invalid start"))?;
4247 let end = serialized_symbol
4248 .end
4249 .ok_or_else(|| anyhow!("invalid end"))?;
4250 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4251 Ok(Symbol {
4252 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4253 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4254 language_name: serialized_symbol.language_name.clone(),
4255 label: language
4256 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4257 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4258 name: serialized_symbol.name,
4259 path: PathBuf::from(serialized_symbol.path),
4260 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4261 kind,
4262 signature: serialized_symbol
4263 .signature
4264 .try_into()
4265 .map_err(|_| anyhow!("invalid signature"))?,
4266 })
4267 }
4268
4269 async fn handle_buffer_saved(
4270 this: ModelHandle<Self>,
4271 envelope: TypedEnvelope<proto::BufferSaved>,
4272 _: Arc<Client>,
4273 mut cx: AsyncAppContext,
4274 ) -> Result<()> {
4275 let version = deserialize_version(envelope.payload.version);
4276 let mtime = envelope
4277 .payload
4278 .mtime
4279 .ok_or_else(|| anyhow!("missing mtime"))?
4280 .into();
4281
4282 this.update(&mut cx, |this, cx| {
4283 let buffer = this
4284 .opened_buffers
4285 .get(&envelope.payload.buffer_id)
4286 .and_then(|buffer| buffer.upgrade(cx));
4287 if let Some(buffer) = buffer {
4288 buffer.update(cx, |buffer, cx| {
4289 buffer.did_save(version, mtime, None, cx);
4290 });
4291 }
4292 Ok(())
4293 })
4294 }
4295
4296 async fn handle_buffer_reloaded(
4297 this: ModelHandle<Self>,
4298 envelope: TypedEnvelope<proto::BufferReloaded>,
4299 _: Arc<Client>,
4300 mut cx: AsyncAppContext,
4301 ) -> Result<()> {
4302 let payload = envelope.payload.clone();
4303 let version = deserialize_version(payload.version);
4304 let mtime = payload
4305 .mtime
4306 .ok_or_else(|| anyhow!("missing mtime"))?
4307 .into();
4308 this.update(&mut cx, |this, cx| {
4309 let buffer = this
4310 .opened_buffers
4311 .get(&payload.buffer_id)
4312 .and_then(|buffer| buffer.upgrade(cx));
4313 if let Some(buffer) = buffer {
4314 buffer.update(cx, |buffer, cx| {
4315 buffer.did_reload(version, mtime, cx);
4316 });
4317 }
4318 Ok(())
4319 })
4320 }
4321
4322 pub fn match_paths<'a>(
4323 &self,
4324 query: &'a str,
4325 include_ignored: bool,
4326 smart_case: bool,
4327 max_results: usize,
4328 cancel_flag: &'a AtomicBool,
4329 cx: &AppContext,
4330 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4331 let worktrees = self
4332 .worktrees(cx)
4333 .filter(|worktree| worktree.read(cx).is_visible())
4334 .collect::<Vec<_>>();
4335 let include_root_name = worktrees.len() > 1;
4336 let candidate_sets = worktrees
4337 .into_iter()
4338 .map(|worktree| CandidateSet {
4339 snapshot: worktree.read(cx).snapshot(),
4340 include_ignored,
4341 include_root_name,
4342 })
4343 .collect::<Vec<_>>();
4344
4345 let background = cx.background().clone();
4346 async move {
4347 fuzzy::match_paths(
4348 candidate_sets.as_slice(),
4349 query,
4350 smart_case,
4351 max_results,
4352 cancel_flag,
4353 background,
4354 )
4355 .await
4356 }
4357 }
4358
4359 fn edits_from_lsp(
4360 &mut self,
4361 buffer: &ModelHandle<Buffer>,
4362 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4363 version: Option<i32>,
4364 cx: &mut ModelContext<Self>,
4365 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4366 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4367 cx.background().spawn(async move {
4368 let snapshot = snapshot?;
4369 let mut lsp_edits = lsp_edits
4370 .into_iter()
4371 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4372 .peekable();
4373
4374 let mut edits = Vec::new();
4375 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4376 // Combine any LSP edits that are adjacent.
4377 //
4378 // Also, combine LSP edits that are separated from each other by only
4379 // a newline. This is important because for some code actions,
4380 // Rust-analyzer rewrites the entire buffer via a series of edits that
4381 // are separated by unchanged newline characters.
4382 //
4383 // In order for the diffing logic below to work properly, any edits that
4384 // cancel each other out must be combined into one.
4385 while let Some((next_range, next_text)) = lsp_edits.peek() {
4386 if next_range.start > range.end {
4387 if next_range.start.row > range.end.row + 1
4388 || next_range.start.column > 0
4389 || snapshot.clip_point_utf16(
4390 PointUtf16::new(range.end.row, u32::MAX),
4391 Bias::Left,
4392 ) > range.end
4393 {
4394 break;
4395 }
4396 new_text.push('\n');
4397 }
4398 range.end = next_range.end;
4399 new_text.push_str(&next_text);
4400 lsp_edits.next();
4401 }
4402
4403 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4404 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4405 {
4406 return Err(anyhow!("invalid edits received from language server"));
4407 }
4408
4409 // For multiline edits, perform a diff of the old and new text so that
4410 // we can identify the changes more precisely, preserving the locations
4411 // of any anchors positioned in the unchanged regions.
4412 if range.end.row > range.start.row {
4413 let mut offset = range.start.to_offset(&snapshot);
4414 let old_text = snapshot.text_for_range(range).collect::<String>();
4415
4416 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4417 let mut moved_since_edit = true;
4418 for change in diff.iter_all_changes() {
4419 let tag = change.tag();
4420 let value = change.value();
4421 match tag {
4422 ChangeTag::Equal => {
4423 offset += value.len();
4424 moved_since_edit = true;
4425 }
4426 ChangeTag::Delete => {
4427 let start = snapshot.anchor_after(offset);
4428 let end = snapshot.anchor_before(offset + value.len());
4429 if moved_since_edit {
4430 edits.push((start..end, String::new()));
4431 } else {
4432 edits.last_mut().unwrap().0.end = end;
4433 }
4434 offset += value.len();
4435 moved_since_edit = false;
4436 }
4437 ChangeTag::Insert => {
4438 if moved_since_edit {
4439 let anchor = snapshot.anchor_after(offset);
4440 edits.push((anchor.clone()..anchor, value.to_string()));
4441 } else {
4442 edits.last_mut().unwrap().1.push_str(value);
4443 }
4444 moved_since_edit = false;
4445 }
4446 }
4447 }
4448 } else if range.end == range.start {
4449 let anchor = snapshot.anchor_after(range.start);
4450 edits.push((anchor.clone()..anchor, new_text));
4451 } else {
4452 let edit_start = snapshot.anchor_after(range.start);
4453 let edit_end = snapshot.anchor_before(range.end);
4454 edits.push((edit_start..edit_end, new_text));
4455 }
4456 }
4457
4458 Ok(edits)
4459 })
4460 }
4461
4462 fn buffer_snapshot_for_lsp_version(
4463 &mut self,
4464 buffer: &ModelHandle<Buffer>,
4465 version: Option<i32>,
4466 cx: &AppContext,
4467 ) -> Result<TextBufferSnapshot> {
4468 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4469
4470 if let Some(version) = version {
4471 let buffer_id = buffer.read(cx).remote_id();
4472 let snapshots = self
4473 .buffer_snapshots
4474 .get_mut(&buffer_id)
4475 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4476 let mut found_snapshot = None;
4477 snapshots.retain(|(snapshot_version, snapshot)| {
4478 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4479 false
4480 } else {
4481 if *snapshot_version == version {
4482 found_snapshot = Some(snapshot.clone());
4483 }
4484 true
4485 }
4486 });
4487
4488 found_snapshot.ok_or_else(|| {
4489 anyhow!(
4490 "snapshot not found for buffer {} at version {}",
4491 buffer_id,
4492 version
4493 )
4494 })
4495 } else {
4496 Ok((buffer.read(cx)).text_snapshot())
4497 }
4498 }
4499
4500 fn language_server_for_buffer(
4501 &self,
4502 buffer: &Buffer,
4503 cx: &AppContext,
4504 ) -> Option<&Arc<LanguageServer>> {
4505 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4506 let worktree_id = file.worktree_id(cx);
4507 self.language_servers.get(&(worktree_id, language.name()))
4508 } else {
4509 None
4510 }
4511 }
4512}
4513
4514impl WorktreeHandle {
4515 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4516 match self {
4517 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4518 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4519 }
4520 }
4521}
4522
4523impl OpenBuffer {
4524 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4525 match self {
4526 OpenBuffer::Strong(handle) => Some(handle.clone()),
4527 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4528 OpenBuffer::Loading(_) => None,
4529 }
4530 }
4531}
4532
4533struct CandidateSet {
4534 snapshot: Snapshot,
4535 include_ignored: bool,
4536 include_root_name: bool,
4537}
4538
4539impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4540 type Candidates = CandidateSetIter<'a>;
4541
4542 fn id(&self) -> usize {
4543 self.snapshot.id().to_usize()
4544 }
4545
4546 fn len(&self) -> usize {
4547 if self.include_ignored {
4548 self.snapshot.file_count()
4549 } else {
4550 self.snapshot.visible_file_count()
4551 }
4552 }
4553
4554 fn prefix(&self) -> Arc<str> {
4555 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4556 self.snapshot.root_name().into()
4557 } else if self.include_root_name {
4558 format!("{}/", self.snapshot.root_name()).into()
4559 } else {
4560 "".into()
4561 }
4562 }
4563
4564 fn candidates(&'a self, start: usize) -> Self::Candidates {
4565 CandidateSetIter {
4566 traversal: self.snapshot.files(self.include_ignored, start),
4567 }
4568 }
4569}
4570
4571struct CandidateSetIter<'a> {
4572 traversal: Traversal<'a>,
4573}
4574
4575impl<'a> Iterator for CandidateSetIter<'a> {
4576 type Item = PathMatchCandidate<'a>;
4577
4578 fn next(&mut self) -> Option<Self::Item> {
4579 self.traversal.next().map(|entry| {
4580 if let EntryKind::File(char_bag) = entry.kind {
4581 PathMatchCandidate {
4582 path: &entry.path,
4583 char_bag,
4584 }
4585 } else {
4586 unreachable!()
4587 }
4588 })
4589 }
4590}
4591
4592impl Entity for Project {
4593 type Event = Event;
4594
4595 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4596 match &self.client_state {
4597 ProjectClientState::Local { remote_id_rx, .. } => {
4598 if let Some(project_id) = *remote_id_rx.borrow() {
4599 self.client
4600 .send(proto::UnregisterProject { project_id })
4601 .log_err();
4602 }
4603 }
4604 ProjectClientState::Remote { remote_id, .. } => {
4605 self.client
4606 .send(proto::LeaveProject {
4607 project_id: *remote_id,
4608 })
4609 .log_err();
4610 }
4611 }
4612 }
4613
4614 fn app_will_quit(
4615 &mut self,
4616 _: &mut MutableAppContext,
4617 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4618 let shutdown_futures = self
4619 .language_servers
4620 .drain()
4621 .filter_map(|(_, server)| server.shutdown())
4622 .collect::<Vec<_>>();
4623 Some(
4624 async move {
4625 futures::future::join_all(shutdown_futures).await;
4626 }
4627 .boxed(),
4628 )
4629 }
4630}
4631
4632impl Collaborator {
4633 fn from_proto(
4634 message: proto::Collaborator,
4635 user_store: &ModelHandle<UserStore>,
4636 cx: &mut AsyncAppContext,
4637 ) -> impl Future<Output = Result<Self>> {
4638 let user = user_store.update(cx, |user_store, cx| {
4639 user_store.fetch_user(message.user_id, cx)
4640 });
4641
4642 async move {
4643 Ok(Self {
4644 peer_id: PeerId(message.peer_id),
4645 user: user.await?,
4646 replica_id: message.replica_id as ReplicaId,
4647 })
4648 }
4649 }
4650}
4651
4652impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4653 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4654 Self {
4655 worktree_id,
4656 path: path.as_ref().into(),
4657 }
4658 }
4659}
4660
4661impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4662 fn from(options: lsp::CreateFileOptions) -> Self {
4663 Self {
4664 overwrite: options.overwrite.unwrap_or(false),
4665 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4666 }
4667 }
4668}
4669
4670impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4671 fn from(options: lsp::RenameFileOptions) -> Self {
4672 Self {
4673 overwrite: options.overwrite.unwrap_or(false),
4674 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4675 }
4676 }
4677}
4678
4679impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4680 fn from(options: lsp::DeleteFileOptions) -> Self {
4681 Self {
4682 recursive: options.recursive.unwrap_or(false),
4683 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4684 }
4685 }
4686}
4687
4688fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4689 proto::Symbol {
4690 source_worktree_id: symbol.source_worktree_id.to_proto(),
4691 worktree_id: symbol.worktree_id.to_proto(),
4692 language_name: symbol.language_name.clone(),
4693 name: symbol.name.clone(),
4694 kind: unsafe { mem::transmute(symbol.kind) },
4695 path: symbol.path.to_string_lossy().to_string(),
4696 start: Some(proto::Point {
4697 row: symbol.range.start.row,
4698 column: symbol.range.start.column,
4699 }),
4700 end: Some(proto::Point {
4701 row: symbol.range.end.row,
4702 column: symbol.range.end.column,
4703 }),
4704 signature: symbol.signature.to_vec(),
4705 }
4706}
4707
4708fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4709 let mut path_components = path.components();
4710 let mut base_components = base.components();
4711 let mut components: Vec<Component> = Vec::new();
4712 loop {
4713 match (path_components.next(), base_components.next()) {
4714 (None, None) => break,
4715 (Some(a), None) => {
4716 components.push(a);
4717 components.extend(path_components.by_ref());
4718 break;
4719 }
4720 (None, _) => components.push(Component::ParentDir),
4721 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4722 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4723 (Some(a), Some(_)) => {
4724 components.push(Component::ParentDir);
4725 for _ in base_components {
4726 components.push(Component::ParentDir);
4727 }
4728 components.push(a);
4729 components.extend(path_components.by_ref());
4730 break;
4731 }
4732 }
4733 }
4734 components.iter().map(|c| c.as_os_str()).collect()
4735}
4736
4737impl Item for Buffer {
4738 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4739 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4740 }
4741}
4742
4743#[cfg(test)]
4744mod tests {
4745 use super::{Event, *};
4746 use fs::RealFs;
4747 use futures::{future, StreamExt};
4748 use gpui::test::subscribe;
4749 use language::{
4750 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4751 ToPoint,
4752 };
4753 use lsp::Url;
4754 use serde_json::json;
4755 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4756 use unindent::Unindent as _;
4757 use util::{assert_set_eq, test::temp_tree};
4758 use worktree::WorktreeHandle as _;
4759
4760 #[gpui::test]
4761 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4762 let dir = temp_tree(json!({
4763 "root": {
4764 "apple": "",
4765 "banana": {
4766 "carrot": {
4767 "date": "",
4768 "endive": "",
4769 }
4770 },
4771 "fennel": {
4772 "grape": "",
4773 }
4774 }
4775 }));
4776
4777 let root_link_path = dir.path().join("root_link");
4778 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4779 unix::fs::symlink(
4780 &dir.path().join("root/fennel"),
4781 &dir.path().join("root/finnochio"),
4782 )
4783 .unwrap();
4784
4785 let project = Project::test(Arc::new(RealFs), cx);
4786
4787 let (tree, _) = project
4788 .update(cx, |project, cx| {
4789 project.find_or_create_local_worktree(&root_link_path, true, cx)
4790 })
4791 .await
4792 .unwrap();
4793
4794 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4795 .await;
4796 cx.read(|cx| {
4797 let tree = tree.read(cx);
4798 assert_eq!(tree.file_count(), 5);
4799 assert_eq!(
4800 tree.inode_for_path("fennel/grape"),
4801 tree.inode_for_path("finnochio/grape")
4802 );
4803 });
4804
4805 let cancel_flag = Default::default();
4806 let results = project
4807 .read_with(cx, |project, cx| {
4808 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4809 })
4810 .await;
4811 assert_eq!(
4812 results
4813 .into_iter()
4814 .map(|result| result.path)
4815 .collect::<Vec<Arc<Path>>>(),
4816 vec![
4817 PathBuf::from("banana/carrot/date").into(),
4818 PathBuf::from("banana/carrot/endive").into(),
4819 ]
4820 );
4821 }
4822
4823 #[gpui::test]
4824 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4825 cx.foreground().forbid_parking();
4826
4827 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4828 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4829 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4830 completion_provider: Some(lsp::CompletionOptions {
4831 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4832 ..Default::default()
4833 }),
4834 ..Default::default()
4835 });
4836 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4837 completion_provider: Some(lsp::CompletionOptions {
4838 trigger_characters: Some(vec![":".to_string()]),
4839 ..Default::default()
4840 }),
4841 ..Default::default()
4842 });
4843
4844 let rust_language = Arc::new(Language::new(
4845 LanguageConfig {
4846 name: "Rust".into(),
4847 path_suffixes: vec!["rs".to_string()],
4848 language_server: Some(rust_lsp_config),
4849 ..Default::default()
4850 },
4851 Some(tree_sitter_rust::language()),
4852 ));
4853 let json_language = Arc::new(Language::new(
4854 LanguageConfig {
4855 name: "JSON".into(),
4856 path_suffixes: vec!["json".to_string()],
4857 language_server: Some(json_lsp_config),
4858 ..Default::default()
4859 },
4860 None,
4861 ));
4862
4863 let fs = FakeFs::new(cx.background());
4864 fs.insert_tree(
4865 "/the-root",
4866 json!({
4867 "test.rs": "const A: i32 = 1;",
4868 "test2.rs": "",
4869 "Cargo.toml": "a = 1",
4870 "package.json": "{\"a\": 1}",
4871 }),
4872 )
4873 .await;
4874
4875 let project = Project::test(fs, cx);
4876 project.update(cx, |project, _| {
4877 project.languages.add(rust_language);
4878 project.languages.add(json_language);
4879 });
4880
4881 let worktree_id = project
4882 .update(cx, |project, cx| {
4883 project.find_or_create_local_worktree("/the-root", true, cx)
4884 })
4885 .await
4886 .unwrap()
4887 .0
4888 .read_with(cx, |tree, _| tree.id());
4889
4890 // Open a buffer without an associated language server.
4891 let toml_buffer = project
4892 .update(cx, |project, cx| {
4893 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4894 })
4895 .await
4896 .unwrap();
4897
4898 // Open a buffer with an associated language server.
4899 let rust_buffer = project
4900 .update(cx, |project, cx| {
4901 project.open_buffer((worktree_id, "test.rs"), cx)
4902 })
4903 .await
4904 .unwrap();
4905
4906 // A server is started up, and it is notified about Rust files.
4907 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4908 assert_eq!(
4909 fake_rust_server
4910 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4911 .await
4912 .text_document,
4913 lsp::TextDocumentItem {
4914 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4915 version: 0,
4916 text: "const A: i32 = 1;".to_string(),
4917 language_id: Default::default()
4918 }
4919 );
4920
4921 // The buffer is configured based on the language server's capabilities.
4922 rust_buffer.read_with(cx, |buffer, _| {
4923 assert_eq!(
4924 buffer.completion_triggers(),
4925 &[".".to_string(), "::".to_string()]
4926 );
4927 });
4928 toml_buffer.read_with(cx, |buffer, _| {
4929 assert!(buffer.completion_triggers().is_empty());
4930 });
4931
4932 // Edit a buffer. The changes are reported to the language server.
4933 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4934 assert_eq!(
4935 fake_rust_server
4936 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4937 .await
4938 .text_document,
4939 lsp::VersionedTextDocumentIdentifier::new(
4940 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4941 1
4942 )
4943 );
4944
4945 // Open a third buffer with a different associated language server.
4946 let json_buffer = project
4947 .update(cx, |project, cx| {
4948 project.open_buffer((worktree_id, "package.json"), cx)
4949 })
4950 .await
4951 .unwrap();
4952
4953 // A json language server is started up and is only notified about the json buffer.
4954 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4955 assert_eq!(
4956 fake_json_server
4957 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4958 .await
4959 .text_document,
4960 lsp::TextDocumentItem {
4961 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4962 version: 0,
4963 text: "{\"a\": 1}".to_string(),
4964 language_id: Default::default()
4965 }
4966 );
4967
4968 // This buffer is configured based on the second language server's
4969 // capabilities.
4970 json_buffer.read_with(cx, |buffer, _| {
4971 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4972 });
4973
4974 // When opening another buffer whose language server is already running,
4975 // it is also configured based on the existing language server's capabilities.
4976 let rust_buffer2 = project
4977 .update(cx, |project, cx| {
4978 project.open_buffer((worktree_id, "test2.rs"), cx)
4979 })
4980 .await
4981 .unwrap();
4982 rust_buffer2.read_with(cx, |buffer, _| {
4983 assert_eq!(
4984 buffer.completion_triggers(),
4985 &[".".to_string(), "::".to_string()]
4986 );
4987 });
4988
4989 // Changes are reported only to servers matching the buffer's language.
4990 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4991 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4992 assert_eq!(
4993 fake_rust_server
4994 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4995 .await
4996 .text_document,
4997 lsp::VersionedTextDocumentIdentifier::new(
4998 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4999 1
5000 )
5001 );
5002
5003 // Save notifications are reported to all servers.
5004 toml_buffer
5005 .update(cx, |buffer, cx| buffer.save(cx))
5006 .await
5007 .unwrap();
5008 assert_eq!(
5009 fake_rust_server
5010 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5011 .await
5012 .text_document,
5013 lsp::TextDocumentIdentifier::new(
5014 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5015 )
5016 );
5017 assert_eq!(
5018 fake_json_server
5019 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5020 .await
5021 .text_document,
5022 lsp::TextDocumentIdentifier::new(
5023 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5024 )
5025 );
5026
5027 // Restart language servers
5028 project.update(cx, |project, cx| {
5029 project.restart_language_servers_for_buffers(
5030 vec![rust_buffer.clone(), json_buffer.clone()],
5031 cx,
5032 );
5033 });
5034
5035 let mut rust_shutdown_requests = fake_rust_server
5036 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(()));
5037 let mut json_shutdown_requests = fake_json_server
5038 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(()));
5039 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5040
5041 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5042 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5043
5044 // Ensure both rust documents are reopened in new rust language server without worrying about order
5045 assert_set_eq!(
5046 [
5047 fake_rust_server
5048 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5049 .await
5050 .text_document,
5051 fake_rust_server
5052 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5053 .await
5054 .text_document,
5055 ],
5056 [
5057 lsp::TextDocumentItem {
5058 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5059 version: 1,
5060 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5061 language_id: Default::default()
5062 },
5063 lsp::TextDocumentItem {
5064 uri: lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5065 version: 1,
5066 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5067 language_id: Default::default()
5068 },
5069 ]
5070 );
5071
5072 // Ensure json document is reopened in new json language server
5073 assert_eq!(
5074 fake_json_server
5075 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5076 .await
5077 .text_document,
5078 lsp::TextDocumentItem {
5079 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5080 version: 0,
5081 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5082 language_id: Default::default()
5083 }
5084 );
5085
5086 // Close notifications are reported only to servers matching the buffer's language.
5087 cx.update(|_| drop(json_buffer));
5088 let close_message = lsp::DidCloseTextDocumentParams {
5089 text_document: lsp::TextDocumentIdentifier::new(
5090 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5091 ),
5092 };
5093 assert_eq!(
5094 fake_json_server
5095 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5096 .await,
5097 close_message,
5098 );
5099 }
5100
5101 #[gpui::test]
5102 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5103 cx.foreground().forbid_parking();
5104
5105 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5106 let progress_token = language_server_config
5107 .disk_based_diagnostics_progress_token
5108 .clone()
5109 .unwrap();
5110
5111 let language = Arc::new(Language::new(
5112 LanguageConfig {
5113 name: "Rust".into(),
5114 path_suffixes: vec!["rs".to_string()],
5115 language_server: Some(language_server_config),
5116 ..Default::default()
5117 },
5118 Some(tree_sitter_rust::language()),
5119 ));
5120
5121 let fs = FakeFs::new(cx.background());
5122 fs.insert_tree(
5123 "/dir",
5124 json!({
5125 "a.rs": "fn a() { A }",
5126 "b.rs": "const y: i32 = 1",
5127 }),
5128 )
5129 .await;
5130
5131 let project = Project::test(fs, cx);
5132 project.update(cx, |project, _| project.languages.add(language));
5133
5134 let (tree, _) = project
5135 .update(cx, |project, cx| {
5136 project.find_or_create_local_worktree("/dir", true, cx)
5137 })
5138 .await
5139 .unwrap();
5140 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5141
5142 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5143 .await;
5144
5145 // Cause worktree to start the fake language server
5146 let _buffer = project
5147 .update(cx, |project, cx| {
5148 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5149 })
5150 .await
5151 .unwrap();
5152
5153 let mut events = subscribe(&project, cx);
5154
5155 let mut fake_server = fake_servers.next().await.unwrap();
5156 fake_server.start_progress(&progress_token).await;
5157 assert_eq!(
5158 events.next().await.unwrap(),
5159 Event::DiskBasedDiagnosticsStarted
5160 );
5161
5162 fake_server.start_progress(&progress_token).await;
5163 fake_server.end_progress(&progress_token).await;
5164 fake_server.start_progress(&progress_token).await;
5165
5166 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5167 lsp::PublishDiagnosticsParams {
5168 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5169 version: None,
5170 diagnostics: vec![lsp::Diagnostic {
5171 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5172 severity: Some(lsp::DiagnosticSeverity::ERROR),
5173 message: "undefined variable 'A'".to_string(),
5174 ..Default::default()
5175 }],
5176 },
5177 );
5178 assert_eq!(
5179 events.next().await.unwrap(),
5180 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5181 );
5182
5183 fake_server.end_progress(&progress_token).await;
5184 fake_server.end_progress(&progress_token).await;
5185 assert_eq!(
5186 events.next().await.unwrap(),
5187 Event::DiskBasedDiagnosticsUpdated
5188 );
5189 assert_eq!(
5190 events.next().await.unwrap(),
5191 Event::DiskBasedDiagnosticsFinished
5192 );
5193
5194 let buffer = project
5195 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5196 .await
5197 .unwrap();
5198
5199 buffer.read_with(cx, |buffer, _| {
5200 let snapshot = buffer.snapshot();
5201 let diagnostics = snapshot
5202 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5203 .collect::<Vec<_>>();
5204 assert_eq!(
5205 diagnostics,
5206 &[DiagnosticEntry {
5207 range: Point::new(0, 9)..Point::new(0, 10),
5208 diagnostic: Diagnostic {
5209 severity: lsp::DiagnosticSeverity::ERROR,
5210 message: "undefined variable 'A'".to_string(),
5211 group_id: 0,
5212 is_primary: true,
5213 ..Default::default()
5214 }
5215 }]
5216 )
5217 });
5218 }
5219
5220 #[gpui::test]
5221 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5222 cx.foreground().forbid_parking();
5223
5224 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5225 lsp_config
5226 .disk_based_diagnostic_sources
5227 .insert("disk".to_string());
5228 let language = Arc::new(Language::new(
5229 LanguageConfig {
5230 name: "Rust".into(),
5231 path_suffixes: vec!["rs".to_string()],
5232 language_server: Some(lsp_config),
5233 ..Default::default()
5234 },
5235 Some(tree_sitter_rust::language()),
5236 ));
5237
5238 let text = "
5239 fn a() { A }
5240 fn b() { BB }
5241 fn c() { CCC }
5242 "
5243 .unindent();
5244
5245 let fs = FakeFs::new(cx.background());
5246 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5247
5248 let project = Project::test(fs, cx);
5249 project.update(cx, |project, _| project.languages.add(language));
5250
5251 let worktree_id = project
5252 .update(cx, |project, cx| {
5253 project.find_or_create_local_worktree("/dir", true, cx)
5254 })
5255 .await
5256 .unwrap()
5257 .0
5258 .read_with(cx, |tree, _| tree.id());
5259
5260 let buffer = project
5261 .update(cx, |project, cx| {
5262 project.open_buffer((worktree_id, "a.rs"), cx)
5263 })
5264 .await
5265 .unwrap();
5266
5267 let mut fake_server = fake_servers.next().await.unwrap();
5268 let open_notification = fake_server
5269 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5270 .await;
5271
5272 // Edit the buffer, moving the content down
5273 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5274 let change_notification_1 = fake_server
5275 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5276 .await;
5277 assert!(
5278 change_notification_1.text_document.version > open_notification.text_document.version
5279 );
5280
5281 // Report some diagnostics for the initial version of the buffer
5282 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5283 lsp::PublishDiagnosticsParams {
5284 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5285 version: Some(open_notification.text_document.version),
5286 diagnostics: vec![
5287 lsp::Diagnostic {
5288 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5289 severity: Some(DiagnosticSeverity::ERROR),
5290 message: "undefined variable 'A'".to_string(),
5291 source: Some("disk".to_string()),
5292 ..Default::default()
5293 },
5294 lsp::Diagnostic {
5295 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5296 severity: Some(DiagnosticSeverity::ERROR),
5297 message: "undefined variable 'BB'".to_string(),
5298 source: Some("disk".to_string()),
5299 ..Default::default()
5300 },
5301 lsp::Diagnostic {
5302 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5303 severity: Some(DiagnosticSeverity::ERROR),
5304 source: Some("disk".to_string()),
5305 message: "undefined variable 'CCC'".to_string(),
5306 ..Default::default()
5307 },
5308 ],
5309 },
5310 );
5311
5312 // The diagnostics have moved down since they were created.
5313 buffer.next_notification(cx).await;
5314 buffer.read_with(cx, |buffer, _| {
5315 assert_eq!(
5316 buffer
5317 .snapshot()
5318 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5319 .collect::<Vec<_>>(),
5320 &[
5321 DiagnosticEntry {
5322 range: Point::new(3, 9)..Point::new(3, 11),
5323 diagnostic: Diagnostic {
5324 severity: DiagnosticSeverity::ERROR,
5325 message: "undefined variable 'BB'".to_string(),
5326 is_disk_based: true,
5327 group_id: 1,
5328 is_primary: true,
5329 ..Default::default()
5330 },
5331 },
5332 DiagnosticEntry {
5333 range: Point::new(4, 9)..Point::new(4, 12),
5334 diagnostic: Diagnostic {
5335 severity: DiagnosticSeverity::ERROR,
5336 message: "undefined variable 'CCC'".to_string(),
5337 is_disk_based: true,
5338 group_id: 2,
5339 is_primary: true,
5340 ..Default::default()
5341 }
5342 }
5343 ]
5344 );
5345 assert_eq!(
5346 chunks_with_diagnostics(buffer, 0..buffer.len()),
5347 [
5348 ("\n\nfn a() { ".to_string(), None),
5349 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5350 (" }\nfn b() { ".to_string(), None),
5351 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5352 (" }\nfn c() { ".to_string(), None),
5353 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5354 (" }\n".to_string(), None),
5355 ]
5356 );
5357 assert_eq!(
5358 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5359 [
5360 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5361 (" }\nfn c() { ".to_string(), None),
5362 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5363 ]
5364 );
5365 });
5366
5367 // Ensure overlapping diagnostics are highlighted correctly.
5368 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5369 lsp::PublishDiagnosticsParams {
5370 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5371 version: Some(open_notification.text_document.version),
5372 diagnostics: vec![
5373 lsp::Diagnostic {
5374 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5375 severity: Some(DiagnosticSeverity::ERROR),
5376 message: "undefined variable 'A'".to_string(),
5377 source: Some("disk".to_string()),
5378 ..Default::default()
5379 },
5380 lsp::Diagnostic {
5381 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5382 severity: Some(DiagnosticSeverity::WARNING),
5383 message: "unreachable statement".to_string(),
5384 source: Some("disk".to_string()),
5385 ..Default::default()
5386 },
5387 ],
5388 },
5389 );
5390
5391 buffer.next_notification(cx).await;
5392 buffer.read_with(cx, |buffer, _| {
5393 assert_eq!(
5394 buffer
5395 .snapshot()
5396 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5397 .collect::<Vec<_>>(),
5398 &[
5399 DiagnosticEntry {
5400 range: Point::new(2, 9)..Point::new(2, 12),
5401 diagnostic: Diagnostic {
5402 severity: DiagnosticSeverity::WARNING,
5403 message: "unreachable statement".to_string(),
5404 is_disk_based: true,
5405 group_id: 1,
5406 is_primary: true,
5407 ..Default::default()
5408 }
5409 },
5410 DiagnosticEntry {
5411 range: Point::new(2, 9)..Point::new(2, 10),
5412 diagnostic: Diagnostic {
5413 severity: DiagnosticSeverity::ERROR,
5414 message: "undefined variable 'A'".to_string(),
5415 is_disk_based: true,
5416 group_id: 0,
5417 is_primary: true,
5418 ..Default::default()
5419 },
5420 }
5421 ]
5422 );
5423 assert_eq!(
5424 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5425 [
5426 ("fn a() { ".to_string(), None),
5427 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5428 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5429 ("\n".to_string(), None),
5430 ]
5431 );
5432 assert_eq!(
5433 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5434 [
5435 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5436 ("\n".to_string(), None),
5437 ]
5438 );
5439 });
5440
5441 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5442 // changes since the last save.
5443 buffer.update(cx, |buffer, cx| {
5444 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5445 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5446 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5447 });
5448 let change_notification_2 = fake_server
5449 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5450 .await;
5451 assert!(
5452 change_notification_2.text_document.version
5453 > change_notification_1.text_document.version
5454 );
5455
5456 // Handle out-of-order diagnostics
5457 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5458 lsp::PublishDiagnosticsParams {
5459 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5460 version: Some(change_notification_2.text_document.version),
5461 diagnostics: vec![
5462 lsp::Diagnostic {
5463 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5464 severity: Some(DiagnosticSeverity::ERROR),
5465 message: "undefined variable 'BB'".to_string(),
5466 source: Some("disk".to_string()),
5467 ..Default::default()
5468 },
5469 lsp::Diagnostic {
5470 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5471 severity: Some(DiagnosticSeverity::WARNING),
5472 message: "undefined variable 'A'".to_string(),
5473 source: Some("disk".to_string()),
5474 ..Default::default()
5475 },
5476 ],
5477 },
5478 );
5479
5480 buffer.next_notification(cx).await;
5481 buffer.read_with(cx, |buffer, _| {
5482 assert_eq!(
5483 buffer
5484 .snapshot()
5485 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5486 .collect::<Vec<_>>(),
5487 &[
5488 DiagnosticEntry {
5489 range: Point::new(2, 21)..Point::new(2, 22),
5490 diagnostic: Diagnostic {
5491 severity: DiagnosticSeverity::WARNING,
5492 message: "undefined variable 'A'".to_string(),
5493 is_disk_based: true,
5494 group_id: 1,
5495 is_primary: true,
5496 ..Default::default()
5497 }
5498 },
5499 DiagnosticEntry {
5500 range: Point::new(3, 9)..Point::new(3, 14),
5501 diagnostic: Diagnostic {
5502 severity: DiagnosticSeverity::ERROR,
5503 message: "undefined variable 'BB'".to_string(),
5504 is_disk_based: true,
5505 group_id: 0,
5506 is_primary: true,
5507 ..Default::default()
5508 },
5509 }
5510 ]
5511 );
5512 });
5513 }
5514
5515 #[gpui::test]
5516 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5517 cx.foreground().forbid_parking();
5518
5519 let text = concat!(
5520 "let one = ;\n", //
5521 "let two = \n",
5522 "let three = 3;\n",
5523 );
5524
5525 let fs = FakeFs::new(cx.background());
5526 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5527
5528 let project = Project::test(fs, cx);
5529 let worktree_id = project
5530 .update(cx, |project, cx| {
5531 project.find_or_create_local_worktree("/dir", true, cx)
5532 })
5533 .await
5534 .unwrap()
5535 .0
5536 .read_with(cx, |tree, _| tree.id());
5537
5538 let buffer = project
5539 .update(cx, |project, cx| {
5540 project.open_buffer((worktree_id, "a.rs"), cx)
5541 })
5542 .await
5543 .unwrap();
5544
5545 project.update(cx, |project, cx| {
5546 project
5547 .update_buffer_diagnostics(
5548 &buffer,
5549 vec![
5550 DiagnosticEntry {
5551 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5552 diagnostic: Diagnostic {
5553 severity: DiagnosticSeverity::ERROR,
5554 message: "syntax error 1".to_string(),
5555 ..Default::default()
5556 },
5557 },
5558 DiagnosticEntry {
5559 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5560 diagnostic: Diagnostic {
5561 severity: DiagnosticSeverity::ERROR,
5562 message: "syntax error 2".to_string(),
5563 ..Default::default()
5564 },
5565 },
5566 ],
5567 None,
5568 cx,
5569 )
5570 .unwrap();
5571 });
5572
5573 // An empty range is extended forward to include the following character.
5574 // At the end of a line, an empty range is extended backward to include
5575 // the preceding character.
5576 buffer.read_with(cx, |buffer, _| {
5577 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5578 assert_eq!(
5579 chunks
5580 .iter()
5581 .map(|(s, d)| (s.as_str(), *d))
5582 .collect::<Vec<_>>(),
5583 &[
5584 ("let one = ", None),
5585 (";", Some(DiagnosticSeverity::ERROR)),
5586 ("\nlet two =", None),
5587 (" ", Some(DiagnosticSeverity::ERROR)),
5588 ("\nlet three = 3;\n", None)
5589 ]
5590 );
5591 });
5592 }
5593
5594 #[gpui::test]
5595 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5596 cx.foreground().forbid_parking();
5597
5598 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5599 let language = Arc::new(Language::new(
5600 LanguageConfig {
5601 name: "Rust".into(),
5602 path_suffixes: vec!["rs".to_string()],
5603 language_server: Some(lsp_config),
5604 ..Default::default()
5605 },
5606 Some(tree_sitter_rust::language()),
5607 ));
5608
5609 let text = "
5610 fn a() {
5611 f1();
5612 }
5613 fn b() {
5614 f2();
5615 }
5616 fn c() {
5617 f3();
5618 }
5619 "
5620 .unindent();
5621
5622 let fs = FakeFs::new(cx.background());
5623 fs.insert_tree(
5624 "/dir",
5625 json!({
5626 "a.rs": text.clone(),
5627 }),
5628 )
5629 .await;
5630
5631 let project = Project::test(fs, cx);
5632 project.update(cx, |project, _| project.languages.add(language));
5633
5634 let worktree_id = project
5635 .update(cx, |project, cx| {
5636 project.find_or_create_local_worktree("/dir", true, cx)
5637 })
5638 .await
5639 .unwrap()
5640 .0
5641 .read_with(cx, |tree, _| tree.id());
5642
5643 let buffer = project
5644 .update(cx, |project, cx| {
5645 project.open_buffer((worktree_id, "a.rs"), cx)
5646 })
5647 .await
5648 .unwrap();
5649
5650 let mut fake_server = fake_servers.next().await.unwrap();
5651 let lsp_document_version = fake_server
5652 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5653 .await
5654 .text_document
5655 .version;
5656
5657 // Simulate editing the buffer after the language server computes some edits.
5658 buffer.update(cx, |buffer, cx| {
5659 buffer.edit(
5660 [Point::new(0, 0)..Point::new(0, 0)],
5661 "// above first function\n",
5662 cx,
5663 );
5664 buffer.edit(
5665 [Point::new(2, 0)..Point::new(2, 0)],
5666 " // inside first function\n",
5667 cx,
5668 );
5669 buffer.edit(
5670 [Point::new(6, 4)..Point::new(6, 4)],
5671 "// inside second function ",
5672 cx,
5673 );
5674
5675 assert_eq!(
5676 buffer.text(),
5677 "
5678 // above first function
5679 fn a() {
5680 // inside first function
5681 f1();
5682 }
5683 fn b() {
5684 // inside second function f2();
5685 }
5686 fn c() {
5687 f3();
5688 }
5689 "
5690 .unindent()
5691 );
5692 });
5693
5694 let edits = project
5695 .update(cx, |project, cx| {
5696 project.edits_from_lsp(
5697 &buffer,
5698 vec![
5699 // replace body of first function
5700 lsp::TextEdit {
5701 range: lsp::Range::new(
5702 lsp::Position::new(0, 0),
5703 lsp::Position::new(3, 0),
5704 ),
5705 new_text: "
5706 fn a() {
5707 f10();
5708 }
5709 "
5710 .unindent(),
5711 },
5712 // edit inside second function
5713 lsp::TextEdit {
5714 range: lsp::Range::new(
5715 lsp::Position::new(4, 6),
5716 lsp::Position::new(4, 6),
5717 ),
5718 new_text: "00".into(),
5719 },
5720 // edit inside third function via two distinct edits
5721 lsp::TextEdit {
5722 range: lsp::Range::new(
5723 lsp::Position::new(7, 5),
5724 lsp::Position::new(7, 5),
5725 ),
5726 new_text: "4000".into(),
5727 },
5728 lsp::TextEdit {
5729 range: lsp::Range::new(
5730 lsp::Position::new(7, 5),
5731 lsp::Position::new(7, 6),
5732 ),
5733 new_text: "".into(),
5734 },
5735 ],
5736 Some(lsp_document_version),
5737 cx,
5738 )
5739 })
5740 .await
5741 .unwrap();
5742
5743 buffer.update(cx, |buffer, cx| {
5744 for (range, new_text) in edits {
5745 buffer.edit([range], new_text, cx);
5746 }
5747 assert_eq!(
5748 buffer.text(),
5749 "
5750 // above first function
5751 fn a() {
5752 // inside first function
5753 f10();
5754 }
5755 fn b() {
5756 // inside second function f200();
5757 }
5758 fn c() {
5759 f4000();
5760 }
5761 "
5762 .unindent()
5763 );
5764 });
5765 }
5766
5767 #[gpui::test]
5768 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5769 cx.foreground().forbid_parking();
5770
5771 let text = "
5772 use a::b;
5773 use a::c;
5774
5775 fn f() {
5776 b();
5777 c();
5778 }
5779 "
5780 .unindent();
5781
5782 let fs = FakeFs::new(cx.background());
5783 fs.insert_tree(
5784 "/dir",
5785 json!({
5786 "a.rs": text.clone(),
5787 }),
5788 )
5789 .await;
5790
5791 let project = Project::test(fs, cx);
5792 let worktree_id = project
5793 .update(cx, |project, cx| {
5794 project.find_or_create_local_worktree("/dir", true, cx)
5795 })
5796 .await
5797 .unwrap()
5798 .0
5799 .read_with(cx, |tree, _| tree.id());
5800
5801 let buffer = project
5802 .update(cx, |project, cx| {
5803 project.open_buffer((worktree_id, "a.rs"), cx)
5804 })
5805 .await
5806 .unwrap();
5807
5808 // Simulate the language server sending us a small edit in the form of a very large diff.
5809 // Rust-analyzer does this when performing a merge-imports code action.
5810 let edits = project
5811 .update(cx, |project, cx| {
5812 project.edits_from_lsp(
5813 &buffer,
5814 [
5815 // Replace the first use statement without editing the semicolon.
5816 lsp::TextEdit {
5817 range: lsp::Range::new(
5818 lsp::Position::new(0, 4),
5819 lsp::Position::new(0, 8),
5820 ),
5821 new_text: "a::{b, c}".into(),
5822 },
5823 // Reinsert the remainder of the file between the semicolon and the final
5824 // newline of the file.
5825 lsp::TextEdit {
5826 range: lsp::Range::new(
5827 lsp::Position::new(0, 9),
5828 lsp::Position::new(0, 9),
5829 ),
5830 new_text: "\n\n".into(),
5831 },
5832 lsp::TextEdit {
5833 range: lsp::Range::new(
5834 lsp::Position::new(0, 9),
5835 lsp::Position::new(0, 9),
5836 ),
5837 new_text: "
5838 fn f() {
5839 b();
5840 c();
5841 }"
5842 .unindent(),
5843 },
5844 // Delete everything after the first newline of the file.
5845 lsp::TextEdit {
5846 range: lsp::Range::new(
5847 lsp::Position::new(1, 0),
5848 lsp::Position::new(7, 0),
5849 ),
5850 new_text: "".into(),
5851 },
5852 ],
5853 None,
5854 cx,
5855 )
5856 })
5857 .await
5858 .unwrap();
5859
5860 buffer.update(cx, |buffer, cx| {
5861 let edits = edits
5862 .into_iter()
5863 .map(|(range, text)| {
5864 (
5865 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5866 text,
5867 )
5868 })
5869 .collect::<Vec<_>>();
5870
5871 assert_eq!(
5872 edits,
5873 [
5874 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5875 (Point::new(1, 0)..Point::new(2, 0), "".into())
5876 ]
5877 );
5878
5879 for (range, new_text) in edits {
5880 buffer.edit([range], new_text, cx);
5881 }
5882 assert_eq!(
5883 buffer.text(),
5884 "
5885 use a::{b, c};
5886
5887 fn f() {
5888 b();
5889 c();
5890 }
5891 "
5892 .unindent()
5893 );
5894 });
5895 }
5896
5897 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5898 buffer: &Buffer,
5899 range: Range<T>,
5900 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5901 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5902 for chunk in buffer.snapshot().chunks(range, true) {
5903 if chunks.last().map_or(false, |prev_chunk| {
5904 prev_chunk.1 == chunk.diagnostic_severity
5905 }) {
5906 chunks.last_mut().unwrap().0.push_str(chunk.text);
5907 } else {
5908 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5909 }
5910 }
5911 chunks
5912 }
5913
5914 #[gpui::test]
5915 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5916 let dir = temp_tree(json!({
5917 "root": {
5918 "dir1": {},
5919 "dir2": {
5920 "dir3": {}
5921 }
5922 }
5923 }));
5924
5925 let project = Project::test(Arc::new(RealFs), cx);
5926 let (tree, _) = project
5927 .update(cx, |project, cx| {
5928 project.find_or_create_local_worktree(&dir.path(), true, cx)
5929 })
5930 .await
5931 .unwrap();
5932
5933 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5934 .await;
5935
5936 let cancel_flag = Default::default();
5937 let results = project
5938 .read_with(cx, |project, cx| {
5939 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5940 })
5941 .await;
5942
5943 assert!(results.is_empty());
5944 }
5945
5946 #[gpui::test]
5947 async fn test_definition(cx: &mut gpui::TestAppContext) {
5948 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5949 let language = Arc::new(Language::new(
5950 LanguageConfig {
5951 name: "Rust".into(),
5952 path_suffixes: vec!["rs".to_string()],
5953 language_server: Some(language_server_config),
5954 ..Default::default()
5955 },
5956 Some(tree_sitter_rust::language()),
5957 ));
5958
5959 let fs = FakeFs::new(cx.background());
5960 fs.insert_tree(
5961 "/dir",
5962 json!({
5963 "a.rs": "const fn a() { A }",
5964 "b.rs": "const y: i32 = crate::a()",
5965 }),
5966 )
5967 .await;
5968
5969 let project = Project::test(fs, cx);
5970 project.update(cx, |project, _| {
5971 Arc::get_mut(&mut project.languages).unwrap().add(language);
5972 });
5973
5974 let (tree, _) = project
5975 .update(cx, |project, cx| {
5976 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5977 })
5978 .await
5979 .unwrap();
5980 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5981 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5982 .await;
5983
5984 let buffer = project
5985 .update(cx, |project, cx| {
5986 project.open_buffer(
5987 ProjectPath {
5988 worktree_id,
5989 path: Path::new("").into(),
5990 },
5991 cx,
5992 )
5993 })
5994 .await
5995 .unwrap();
5996
5997 let mut fake_server = fake_servers.next().await.unwrap();
5998 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
5999 let params = params.text_document_position_params;
6000 assert_eq!(
6001 params.text_document.uri.to_file_path().unwrap(),
6002 Path::new("/dir/b.rs"),
6003 );
6004 assert_eq!(params.position, lsp::Position::new(0, 22));
6005
6006 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
6007 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6008 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6009 )))
6010 });
6011
6012 let mut definitions = project
6013 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6014 .await
6015 .unwrap();
6016
6017 assert_eq!(definitions.len(), 1);
6018 let definition = definitions.pop().unwrap();
6019 cx.update(|cx| {
6020 let target_buffer = definition.buffer.read(cx);
6021 assert_eq!(
6022 target_buffer
6023 .file()
6024 .unwrap()
6025 .as_local()
6026 .unwrap()
6027 .abs_path(cx),
6028 Path::new("/dir/a.rs"),
6029 );
6030 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6031 assert_eq!(
6032 list_worktrees(&project, cx),
6033 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6034 );
6035
6036 drop(definition);
6037 });
6038 cx.read(|cx| {
6039 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6040 });
6041
6042 fn list_worktrees<'a>(
6043 project: &'a ModelHandle<Project>,
6044 cx: &'a AppContext,
6045 ) -> Vec<(&'a Path, bool)> {
6046 project
6047 .read(cx)
6048 .worktrees(cx)
6049 .map(|worktree| {
6050 let worktree = worktree.read(cx);
6051 (
6052 worktree.as_local().unwrap().abs_path().as_ref(),
6053 worktree.is_visible(),
6054 )
6055 })
6056 .collect::<Vec<_>>()
6057 }
6058 }
6059
6060 #[gpui::test]
6061 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6062 let fs = FakeFs::new(cx.background());
6063 fs.insert_tree(
6064 "/dir",
6065 json!({
6066 "file1": "the old contents",
6067 }),
6068 )
6069 .await;
6070
6071 let project = Project::test(fs.clone(), cx);
6072 let worktree_id = project
6073 .update(cx, |p, cx| {
6074 p.find_or_create_local_worktree("/dir", true, cx)
6075 })
6076 .await
6077 .unwrap()
6078 .0
6079 .read_with(cx, |tree, _| tree.id());
6080
6081 let buffer = project
6082 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6083 .await
6084 .unwrap();
6085 buffer
6086 .update(cx, |buffer, cx| {
6087 assert_eq!(buffer.text(), "the old contents");
6088 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6089 buffer.save(cx)
6090 })
6091 .await
6092 .unwrap();
6093
6094 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6095 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6096 }
6097
6098 #[gpui::test]
6099 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6100 let fs = FakeFs::new(cx.background());
6101 fs.insert_tree(
6102 "/dir",
6103 json!({
6104 "file1": "the old contents",
6105 }),
6106 )
6107 .await;
6108
6109 let project = Project::test(fs.clone(), cx);
6110 let worktree_id = project
6111 .update(cx, |p, cx| {
6112 p.find_or_create_local_worktree("/dir/file1", true, cx)
6113 })
6114 .await
6115 .unwrap()
6116 .0
6117 .read_with(cx, |tree, _| tree.id());
6118
6119 let buffer = project
6120 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6121 .await
6122 .unwrap();
6123 buffer
6124 .update(cx, |buffer, cx| {
6125 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6126 buffer.save(cx)
6127 })
6128 .await
6129 .unwrap();
6130
6131 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6132 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6133 }
6134
6135 #[gpui::test]
6136 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6137 let fs = FakeFs::new(cx.background());
6138 fs.insert_tree("/dir", json!({})).await;
6139
6140 let project = Project::test(fs.clone(), cx);
6141 let (worktree, _) = project
6142 .update(cx, |project, cx| {
6143 project.find_or_create_local_worktree("/dir", true, cx)
6144 })
6145 .await
6146 .unwrap();
6147 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6148
6149 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6150 buffer.update(cx, |buffer, cx| {
6151 buffer.edit([0..0], "abc", cx);
6152 assert!(buffer.is_dirty());
6153 assert!(!buffer.has_conflict());
6154 });
6155 project
6156 .update(cx, |project, cx| {
6157 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6158 })
6159 .await
6160 .unwrap();
6161 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6162 buffer.read_with(cx, |buffer, cx| {
6163 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6164 assert!(!buffer.is_dirty());
6165 assert!(!buffer.has_conflict());
6166 });
6167
6168 let opened_buffer = project
6169 .update(cx, |project, cx| {
6170 project.open_buffer((worktree_id, "file1"), cx)
6171 })
6172 .await
6173 .unwrap();
6174 assert_eq!(opened_buffer, buffer);
6175 }
6176
6177 #[gpui::test(retries = 5)]
6178 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6179 let dir = temp_tree(json!({
6180 "a": {
6181 "file1": "",
6182 "file2": "",
6183 "file3": "",
6184 },
6185 "b": {
6186 "c": {
6187 "file4": "",
6188 "file5": "",
6189 }
6190 }
6191 }));
6192
6193 let project = Project::test(Arc::new(RealFs), cx);
6194 let rpc = project.read_with(cx, |p, _| p.client.clone());
6195
6196 let (tree, _) = project
6197 .update(cx, |p, cx| {
6198 p.find_or_create_local_worktree(dir.path(), true, cx)
6199 })
6200 .await
6201 .unwrap();
6202 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6203
6204 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6205 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6206 async move { buffer.await.unwrap() }
6207 };
6208 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6209 tree.read_with(cx, |tree, _| {
6210 tree.entry_for_path(path)
6211 .expect(&format!("no entry for path {}", path))
6212 .id
6213 })
6214 };
6215
6216 let buffer2 = buffer_for_path("a/file2", cx).await;
6217 let buffer3 = buffer_for_path("a/file3", cx).await;
6218 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6219 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6220
6221 let file2_id = id_for_path("a/file2", &cx);
6222 let file3_id = id_for_path("a/file3", &cx);
6223 let file4_id = id_for_path("b/c/file4", &cx);
6224
6225 // Wait for the initial scan.
6226 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6227 .await;
6228
6229 // Create a remote copy of this worktree.
6230 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6231 let (remote, load_task) = cx.update(|cx| {
6232 Worktree::remote(
6233 1,
6234 1,
6235 initial_snapshot.to_proto(&Default::default(), true),
6236 rpc.clone(),
6237 cx,
6238 )
6239 });
6240 load_task.await;
6241
6242 cx.read(|cx| {
6243 assert!(!buffer2.read(cx).is_dirty());
6244 assert!(!buffer3.read(cx).is_dirty());
6245 assert!(!buffer4.read(cx).is_dirty());
6246 assert!(!buffer5.read(cx).is_dirty());
6247 });
6248
6249 // Rename and delete files and directories.
6250 tree.flush_fs_events(&cx).await;
6251 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6252 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6253 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6254 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6255 tree.flush_fs_events(&cx).await;
6256
6257 let expected_paths = vec![
6258 "a",
6259 "a/file1",
6260 "a/file2.new",
6261 "b",
6262 "d",
6263 "d/file3",
6264 "d/file4",
6265 ];
6266
6267 cx.read(|app| {
6268 assert_eq!(
6269 tree.read(app)
6270 .paths()
6271 .map(|p| p.to_str().unwrap())
6272 .collect::<Vec<_>>(),
6273 expected_paths
6274 );
6275
6276 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6277 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6278 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6279
6280 assert_eq!(
6281 buffer2.read(app).file().unwrap().path().as_ref(),
6282 Path::new("a/file2.new")
6283 );
6284 assert_eq!(
6285 buffer3.read(app).file().unwrap().path().as_ref(),
6286 Path::new("d/file3")
6287 );
6288 assert_eq!(
6289 buffer4.read(app).file().unwrap().path().as_ref(),
6290 Path::new("d/file4")
6291 );
6292 assert_eq!(
6293 buffer5.read(app).file().unwrap().path().as_ref(),
6294 Path::new("b/c/file5")
6295 );
6296
6297 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6298 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6299 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6300 assert!(buffer5.read(app).file().unwrap().is_deleted());
6301 });
6302
6303 // Update the remote worktree. Check that it becomes consistent with the
6304 // local worktree.
6305 remote.update(cx, |remote, cx| {
6306 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6307 &initial_snapshot,
6308 1,
6309 1,
6310 true,
6311 );
6312 remote
6313 .as_remote_mut()
6314 .unwrap()
6315 .snapshot
6316 .apply_remote_update(update_message)
6317 .unwrap();
6318
6319 assert_eq!(
6320 remote
6321 .paths()
6322 .map(|p| p.to_str().unwrap())
6323 .collect::<Vec<_>>(),
6324 expected_paths
6325 );
6326 });
6327 }
6328
6329 #[gpui::test]
6330 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6331 let fs = FakeFs::new(cx.background());
6332 fs.insert_tree(
6333 "/the-dir",
6334 json!({
6335 "a.txt": "a-contents",
6336 "b.txt": "b-contents",
6337 }),
6338 )
6339 .await;
6340
6341 let project = Project::test(fs.clone(), cx);
6342 let worktree_id = project
6343 .update(cx, |p, cx| {
6344 p.find_or_create_local_worktree("/the-dir", true, cx)
6345 })
6346 .await
6347 .unwrap()
6348 .0
6349 .read_with(cx, |tree, _| tree.id());
6350
6351 // Spawn multiple tasks to open paths, repeating some paths.
6352 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6353 (
6354 p.open_buffer((worktree_id, "a.txt"), cx),
6355 p.open_buffer((worktree_id, "b.txt"), cx),
6356 p.open_buffer((worktree_id, "a.txt"), cx),
6357 )
6358 });
6359
6360 let buffer_a_1 = buffer_a_1.await.unwrap();
6361 let buffer_a_2 = buffer_a_2.await.unwrap();
6362 let buffer_b = buffer_b.await.unwrap();
6363 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6364 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6365
6366 // There is only one buffer per path.
6367 let buffer_a_id = buffer_a_1.id();
6368 assert_eq!(buffer_a_2.id(), buffer_a_id);
6369
6370 // Open the same path again while it is still open.
6371 drop(buffer_a_1);
6372 let buffer_a_3 = project
6373 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6374 .await
6375 .unwrap();
6376
6377 // There's still only one buffer per path.
6378 assert_eq!(buffer_a_3.id(), buffer_a_id);
6379 }
6380
6381 #[gpui::test]
6382 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6383 use std::fs;
6384
6385 let dir = temp_tree(json!({
6386 "file1": "abc",
6387 "file2": "def",
6388 "file3": "ghi",
6389 }));
6390
6391 let project = Project::test(Arc::new(RealFs), cx);
6392 let (worktree, _) = project
6393 .update(cx, |p, cx| {
6394 p.find_or_create_local_worktree(dir.path(), true, cx)
6395 })
6396 .await
6397 .unwrap();
6398 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6399
6400 worktree.flush_fs_events(&cx).await;
6401 worktree
6402 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6403 .await;
6404
6405 let buffer1 = project
6406 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6407 .await
6408 .unwrap();
6409 let events = Rc::new(RefCell::new(Vec::new()));
6410
6411 // initially, the buffer isn't dirty.
6412 buffer1.update(cx, |buffer, cx| {
6413 cx.subscribe(&buffer1, {
6414 let events = events.clone();
6415 move |_, _, event, _| match event {
6416 BufferEvent::Operation(_) => {}
6417 _ => events.borrow_mut().push(event.clone()),
6418 }
6419 })
6420 .detach();
6421
6422 assert!(!buffer.is_dirty());
6423 assert!(events.borrow().is_empty());
6424
6425 buffer.edit(vec![1..2], "", cx);
6426 });
6427
6428 // after the first edit, the buffer is dirty, and emits a dirtied event.
6429 buffer1.update(cx, |buffer, cx| {
6430 assert!(buffer.text() == "ac");
6431 assert!(buffer.is_dirty());
6432 assert_eq!(
6433 *events.borrow(),
6434 &[language::Event::Edited, language::Event::Dirtied]
6435 );
6436 events.borrow_mut().clear();
6437 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6438 });
6439
6440 // after saving, the buffer is not dirty, and emits a saved event.
6441 buffer1.update(cx, |buffer, cx| {
6442 assert!(!buffer.is_dirty());
6443 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6444 events.borrow_mut().clear();
6445
6446 buffer.edit(vec![1..1], "B", cx);
6447 buffer.edit(vec![2..2], "D", cx);
6448 });
6449
6450 // after editing again, the buffer is dirty, and emits another dirty event.
6451 buffer1.update(cx, |buffer, cx| {
6452 assert!(buffer.text() == "aBDc");
6453 assert!(buffer.is_dirty());
6454 assert_eq!(
6455 *events.borrow(),
6456 &[
6457 language::Event::Edited,
6458 language::Event::Dirtied,
6459 language::Event::Edited,
6460 ],
6461 );
6462 events.borrow_mut().clear();
6463
6464 // TODO - currently, after restoring the buffer to its
6465 // previously-saved state, the is still considered dirty.
6466 buffer.edit([1..3], "", cx);
6467 assert!(buffer.text() == "ac");
6468 assert!(buffer.is_dirty());
6469 });
6470
6471 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6472
6473 // When a file is deleted, the buffer is considered dirty.
6474 let events = Rc::new(RefCell::new(Vec::new()));
6475 let buffer2 = project
6476 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6477 .await
6478 .unwrap();
6479 buffer2.update(cx, |_, cx| {
6480 cx.subscribe(&buffer2, {
6481 let events = events.clone();
6482 move |_, _, event, _| events.borrow_mut().push(event.clone())
6483 })
6484 .detach();
6485 });
6486
6487 fs::remove_file(dir.path().join("file2")).unwrap();
6488 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6489 assert_eq!(
6490 *events.borrow(),
6491 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6492 );
6493
6494 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6495 let events = Rc::new(RefCell::new(Vec::new()));
6496 let buffer3 = project
6497 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6498 .await
6499 .unwrap();
6500 buffer3.update(cx, |_, cx| {
6501 cx.subscribe(&buffer3, {
6502 let events = events.clone();
6503 move |_, _, event, _| events.borrow_mut().push(event.clone())
6504 })
6505 .detach();
6506 });
6507
6508 worktree.flush_fs_events(&cx).await;
6509 buffer3.update(cx, |buffer, cx| {
6510 buffer.edit(Some(0..0), "x", cx);
6511 });
6512 events.borrow_mut().clear();
6513 fs::remove_file(dir.path().join("file3")).unwrap();
6514 buffer3
6515 .condition(&cx, |_, _| !events.borrow().is_empty())
6516 .await;
6517 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6518 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6519 }
6520
6521 #[gpui::test]
6522 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6523 use std::fs;
6524
6525 let initial_contents = "aaa\nbbbbb\nc\n";
6526 let dir = temp_tree(json!({ "the-file": initial_contents }));
6527
6528 let project = Project::test(Arc::new(RealFs), cx);
6529 let (worktree, _) = project
6530 .update(cx, |p, cx| {
6531 p.find_or_create_local_worktree(dir.path(), true, cx)
6532 })
6533 .await
6534 .unwrap();
6535 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6536
6537 worktree
6538 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6539 .await;
6540
6541 let abs_path = dir.path().join("the-file");
6542 let buffer = project
6543 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6544 .await
6545 .unwrap();
6546
6547 // TODO
6548 // Add a cursor on each row.
6549 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6550 // assert!(!buffer.is_dirty());
6551 // buffer.add_selection_set(
6552 // &(0..3)
6553 // .map(|row| Selection {
6554 // id: row as usize,
6555 // start: Point::new(row, 1),
6556 // end: Point::new(row, 1),
6557 // reversed: false,
6558 // goal: SelectionGoal::None,
6559 // })
6560 // .collect::<Vec<_>>(),
6561 // cx,
6562 // )
6563 // });
6564
6565 // Change the file on disk, adding two new lines of text, and removing
6566 // one line.
6567 buffer.read_with(cx, |buffer, _| {
6568 assert!(!buffer.is_dirty());
6569 assert!(!buffer.has_conflict());
6570 });
6571 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6572 fs::write(&abs_path, new_contents).unwrap();
6573
6574 // Because the buffer was not modified, it is reloaded from disk. Its
6575 // contents are edited according to the diff between the old and new
6576 // file contents.
6577 buffer
6578 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6579 .await;
6580
6581 buffer.update(cx, |buffer, _| {
6582 assert_eq!(buffer.text(), new_contents);
6583 assert!(!buffer.is_dirty());
6584 assert!(!buffer.has_conflict());
6585
6586 // TODO
6587 // let cursor_positions = buffer
6588 // .selection_set(selection_set_id)
6589 // .unwrap()
6590 // .selections::<Point>(&*buffer)
6591 // .map(|selection| {
6592 // assert_eq!(selection.start, selection.end);
6593 // selection.start
6594 // })
6595 // .collect::<Vec<_>>();
6596 // assert_eq!(
6597 // cursor_positions,
6598 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6599 // );
6600 });
6601
6602 // Modify the buffer
6603 buffer.update(cx, |buffer, cx| {
6604 buffer.edit(vec![0..0], " ", cx);
6605 assert!(buffer.is_dirty());
6606 assert!(!buffer.has_conflict());
6607 });
6608
6609 // Change the file on disk again, adding blank lines to the beginning.
6610 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6611
6612 // Because the buffer is modified, it doesn't reload from disk, but is
6613 // marked as having a conflict.
6614 buffer
6615 .condition(&cx, |buffer, _| buffer.has_conflict())
6616 .await;
6617 }
6618
6619 #[gpui::test]
6620 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6621 cx.foreground().forbid_parking();
6622
6623 let fs = FakeFs::new(cx.background());
6624 fs.insert_tree(
6625 "/the-dir",
6626 json!({
6627 "a.rs": "
6628 fn foo(mut v: Vec<usize>) {
6629 for x in &v {
6630 v.push(1);
6631 }
6632 }
6633 "
6634 .unindent(),
6635 }),
6636 )
6637 .await;
6638
6639 let project = Project::test(fs.clone(), cx);
6640 let (worktree, _) = project
6641 .update(cx, |p, cx| {
6642 p.find_or_create_local_worktree("/the-dir", true, cx)
6643 })
6644 .await
6645 .unwrap();
6646 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6647
6648 let buffer = project
6649 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6650 .await
6651 .unwrap();
6652
6653 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6654 let message = lsp::PublishDiagnosticsParams {
6655 uri: buffer_uri.clone(),
6656 diagnostics: vec![
6657 lsp::Diagnostic {
6658 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6659 severity: Some(DiagnosticSeverity::WARNING),
6660 message: "error 1".to_string(),
6661 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6662 location: lsp::Location {
6663 uri: buffer_uri.clone(),
6664 range: lsp::Range::new(
6665 lsp::Position::new(1, 8),
6666 lsp::Position::new(1, 9),
6667 ),
6668 },
6669 message: "error 1 hint 1".to_string(),
6670 }]),
6671 ..Default::default()
6672 },
6673 lsp::Diagnostic {
6674 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6675 severity: Some(DiagnosticSeverity::HINT),
6676 message: "error 1 hint 1".to_string(),
6677 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6678 location: lsp::Location {
6679 uri: buffer_uri.clone(),
6680 range: lsp::Range::new(
6681 lsp::Position::new(1, 8),
6682 lsp::Position::new(1, 9),
6683 ),
6684 },
6685 message: "original diagnostic".to_string(),
6686 }]),
6687 ..Default::default()
6688 },
6689 lsp::Diagnostic {
6690 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6691 severity: Some(DiagnosticSeverity::ERROR),
6692 message: "error 2".to_string(),
6693 related_information: Some(vec![
6694 lsp::DiagnosticRelatedInformation {
6695 location: lsp::Location {
6696 uri: buffer_uri.clone(),
6697 range: lsp::Range::new(
6698 lsp::Position::new(1, 13),
6699 lsp::Position::new(1, 15),
6700 ),
6701 },
6702 message: "error 2 hint 1".to_string(),
6703 },
6704 lsp::DiagnosticRelatedInformation {
6705 location: lsp::Location {
6706 uri: buffer_uri.clone(),
6707 range: lsp::Range::new(
6708 lsp::Position::new(1, 13),
6709 lsp::Position::new(1, 15),
6710 ),
6711 },
6712 message: "error 2 hint 2".to_string(),
6713 },
6714 ]),
6715 ..Default::default()
6716 },
6717 lsp::Diagnostic {
6718 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6719 severity: Some(DiagnosticSeverity::HINT),
6720 message: "error 2 hint 1".to_string(),
6721 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6722 location: lsp::Location {
6723 uri: buffer_uri.clone(),
6724 range: lsp::Range::new(
6725 lsp::Position::new(2, 8),
6726 lsp::Position::new(2, 17),
6727 ),
6728 },
6729 message: "original diagnostic".to_string(),
6730 }]),
6731 ..Default::default()
6732 },
6733 lsp::Diagnostic {
6734 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6735 severity: Some(DiagnosticSeverity::HINT),
6736 message: "error 2 hint 2".to_string(),
6737 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6738 location: lsp::Location {
6739 uri: buffer_uri.clone(),
6740 range: lsp::Range::new(
6741 lsp::Position::new(2, 8),
6742 lsp::Position::new(2, 17),
6743 ),
6744 },
6745 message: "original diagnostic".to_string(),
6746 }]),
6747 ..Default::default()
6748 },
6749 ],
6750 version: None,
6751 };
6752
6753 project
6754 .update(cx, |p, cx| {
6755 p.update_diagnostics(message, &Default::default(), cx)
6756 })
6757 .unwrap();
6758 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6759
6760 assert_eq!(
6761 buffer
6762 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6763 .collect::<Vec<_>>(),
6764 &[
6765 DiagnosticEntry {
6766 range: Point::new(1, 8)..Point::new(1, 9),
6767 diagnostic: Diagnostic {
6768 severity: DiagnosticSeverity::WARNING,
6769 message: "error 1".to_string(),
6770 group_id: 0,
6771 is_primary: true,
6772 ..Default::default()
6773 }
6774 },
6775 DiagnosticEntry {
6776 range: Point::new(1, 8)..Point::new(1, 9),
6777 diagnostic: Diagnostic {
6778 severity: DiagnosticSeverity::HINT,
6779 message: "error 1 hint 1".to_string(),
6780 group_id: 0,
6781 is_primary: false,
6782 ..Default::default()
6783 }
6784 },
6785 DiagnosticEntry {
6786 range: Point::new(1, 13)..Point::new(1, 15),
6787 diagnostic: Diagnostic {
6788 severity: DiagnosticSeverity::HINT,
6789 message: "error 2 hint 1".to_string(),
6790 group_id: 1,
6791 is_primary: false,
6792 ..Default::default()
6793 }
6794 },
6795 DiagnosticEntry {
6796 range: Point::new(1, 13)..Point::new(1, 15),
6797 diagnostic: Diagnostic {
6798 severity: DiagnosticSeverity::HINT,
6799 message: "error 2 hint 2".to_string(),
6800 group_id: 1,
6801 is_primary: false,
6802 ..Default::default()
6803 }
6804 },
6805 DiagnosticEntry {
6806 range: Point::new(2, 8)..Point::new(2, 17),
6807 diagnostic: Diagnostic {
6808 severity: DiagnosticSeverity::ERROR,
6809 message: "error 2".to_string(),
6810 group_id: 1,
6811 is_primary: true,
6812 ..Default::default()
6813 }
6814 }
6815 ]
6816 );
6817
6818 assert_eq!(
6819 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6820 &[
6821 DiagnosticEntry {
6822 range: Point::new(1, 8)..Point::new(1, 9),
6823 diagnostic: Diagnostic {
6824 severity: DiagnosticSeverity::WARNING,
6825 message: "error 1".to_string(),
6826 group_id: 0,
6827 is_primary: true,
6828 ..Default::default()
6829 }
6830 },
6831 DiagnosticEntry {
6832 range: Point::new(1, 8)..Point::new(1, 9),
6833 diagnostic: Diagnostic {
6834 severity: DiagnosticSeverity::HINT,
6835 message: "error 1 hint 1".to_string(),
6836 group_id: 0,
6837 is_primary: false,
6838 ..Default::default()
6839 }
6840 },
6841 ]
6842 );
6843 assert_eq!(
6844 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6845 &[
6846 DiagnosticEntry {
6847 range: Point::new(1, 13)..Point::new(1, 15),
6848 diagnostic: Diagnostic {
6849 severity: DiagnosticSeverity::HINT,
6850 message: "error 2 hint 1".to_string(),
6851 group_id: 1,
6852 is_primary: false,
6853 ..Default::default()
6854 }
6855 },
6856 DiagnosticEntry {
6857 range: Point::new(1, 13)..Point::new(1, 15),
6858 diagnostic: Diagnostic {
6859 severity: DiagnosticSeverity::HINT,
6860 message: "error 2 hint 2".to_string(),
6861 group_id: 1,
6862 is_primary: false,
6863 ..Default::default()
6864 }
6865 },
6866 DiagnosticEntry {
6867 range: Point::new(2, 8)..Point::new(2, 17),
6868 diagnostic: Diagnostic {
6869 severity: DiagnosticSeverity::ERROR,
6870 message: "error 2".to_string(),
6871 group_id: 1,
6872 is_primary: true,
6873 ..Default::default()
6874 }
6875 }
6876 ]
6877 );
6878 }
6879
6880 #[gpui::test]
6881 async fn test_rename(cx: &mut gpui::TestAppContext) {
6882 cx.foreground().forbid_parking();
6883
6884 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6885 let language = Arc::new(Language::new(
6886 LanguageConfig {
6887 name: "Rust".into(),
6888 path_suffixes: vec!["rs".to_string()],
6889 language_server: Some(language_server_config),
6890 ..Default::default()
6891 },
6892 Some(tree_sitter_rust::language()),
6893 ));
6894
6895 let fs = FakeFs::new(cx.background());
6896 fs.insert_tree(
6897 "/dir",
6898 json!({
6899 "one.rs": "const ONE: usize = 1;",
6900 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6901 }),
6902 )
6903 .await;
6904
6905 let project = Project::test(fs.clone(), cx);
6906 project.update(cx, |project, _| {
6907 Arc::get_mut(&mut project.languages).unwrap().add(language);
6908 });
6909
6910 let (tree, _) = project
6911 .update(cx, |project, cx| {
6912 project.find_or_create_local_worktree("/dir", true, cx)
6913 })
6914 .await
6915 .unwrap();
6916 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6917 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6918 .await;
6919
6920 let buffer = project
6921 .update(cx, |project, cx| {
6922 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6923 })
6924 .await
6925 .unwrap();
6926
6927 let mut fake_server = fake_servers.next().await.unwrap();
6928
6929 let response = project.update(cx, |project, cx| {
6930 project.prepare_rename(buffer.clone(), 7, cx)
6931 });
6932 fake_server
6933 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6934 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6935 assert_eq!(params.position, lsp::Position::new(0, 7));
6936 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6937 lsp::Position::new(0, 6),
6938 lsp::Position::new(0, 9),
6939 )))
6940 })
6941 .next()
6942 .await
6943 .unwrap();
6944 let range = response.await.unwrap().unwrap();
6945 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6946 assert_eq!(range, 6..9);
6947
6948 let response = project.update(cx, |project, cx| {
6949 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6950 });
6951 fake_server
6952 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
6953 assert_eq!(
6954 params.text_document_position.text_document.uri.as_str(),
6955 "file:///dir/one.rs"
6956 );
6957 assert_eq!(
6958 params.text_document_position.position,
6959 lsp::Position::new(0, 7)
6960 );
6961 assert_eq!(params.new_name, "THREE");
6962 Some(lsp::WorkspaceEdit {
6963 changes: Some(
6964 [
6965 (
6966 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6967 vec![lsp::TextEdit::new(
6968 lsp::Range::new(
6969 lsp::Position::new(0, 6),
6970 lsp::Position::new(0, 9),
6971 ),
6972 "THREE".to_string(),
6973 )],
6974 ),
6975 (
6976 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6977 vec![
6978 lsp::TextEdit::new(
6979 lsp::Range::new(
6980 lsp::Position::new(0, 24),
6981 lsp::Position::new(0, 27),
6982 ),
6983 "THREE".to_string(),
6984 ),
6985 lsp::TextEdit::new(
6986 lsp::Range::new(
6987 lsp::Position::new(0, 35),
6988 lsp::Position::new(0, 38),
6989 ),
6990 "THREE".to_string(),
6991 ),
6992 ],
6993 ),
6994 ]
6995 .into_iter()
6996 .collect(),
6997 ),
6998 ..Default::default()
6999 })
7000 })
7001 .next()
7002 .await
7003 .unwrap();
7004 let mut transaction = response.await.unwrap().0;
7005 assert_eq!(transaction.len(), 2);
7006 assert_eq!(
7007 transaction
7008 .remove_entry(&buffer)
7009 .unwrap()
7010 .0
7011 .read_with(cx, |buffer, _| buffer.text()),
7012 "const THREE: usize = 1;"
7013 );
7014 assert_eq!(
7015 transaction
7016 .into_keys()
7017 .next()
7018 .unwrap()
7019 .read_with(cx, |buffer, _| buffer.text()),
7020 "const TWO: usize = one::THREE + one::THREE;"
7021 );
7022 }
7023
7024 #[gpui::test]
7025 async fn test_search(cx: &mut gpui::TestAppContext) {
7026 let fs = FakeFs::new(cx.background());
7027 fs.insert_tree(
7028 "/dir",
7029 json!({
7030 "one.rs": "const ONE: usize = 1;",
7031 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7032 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7033 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7034 }),
7035 )
7036 .await;
7037 let project = Project::test(fs.clone(), cx);
7038 let (tree, _) = project
7039 .update(cx, |project, cx| {
7040 project.find_or_create_local_worktree("/dir", true, cx)
7041 })
7042 .await
7043 .unwrap();
7044 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7045 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7046 .await;
7047
7048 assert_eq!(
7049 search(&project, SearchQuery::text("TWO", false, true), cx)
7050 .await
7051 .unwrap(),
7052 HashMap::from_iter([
7053 ("two.rs".to_string(), vec![6..9]),
7054 ("three.rs".to_string(), vec![37..40])
7055 ])
7056 );
7057
7058 let buffer_4 = project
7059 .update(cx, |project, cx| {
7060 project.open_buffer((worktree_id, "four.rs"), cx)
7061 })
7062 .await
7063 .unwrap();
7064 buffer_4.update(cx, |buffer, cx| {
7065 buffer.edit([20..28, 31..43], "two::TWO", cx);
7066 });
7067
7068 assert_eq!(
7069 search(&project, SearchQuery::text("TWO", false, true), cx)
7070 .await
7071 .unwrap(),
7072 HashMap::from_iter([
7073 ("two.rs".to_string(), vec![6..9]),
7074 ("three.rs".to_string(), vec![37..40]),
7075 ("four.rs".to_string(), vec![25..28, 36..39])
7076 ])
7077 );
7078
7079 async fn search(
7080 project: &ModelHandle<Project>,
7081 query: SearchQuery,
7082 cx: &mut gpui::TestAppContext,
7083 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7084 let results = project
7085 .update(cx, |project, cx| project.search(query, cx))
7086 .await?;
7087
7088 Ok(results
7089 .into_iter()
7090 .map(|(buffer, ranges)| {
7091 buffer.read_with(cx, |buffer, _| {
7092 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7093 let ranges = ranges
7094 .into_iter()
7095 .map(|range| range.to_offset(buffer))
7096 .collect::<Vec<_>>();
7097 (path, ranges)
7098 })
7099 })
7100 .collect())
7101 }
7102 }
7103}