1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use sha2::{Digest, Sha256};
32use similar::{ChangeTag, TextDiff};
33use std::{
34 cell::RefCell,
35 cmp::{self, Ordering},
36 convert::TryInto,
37 hash::Hash,
38 mem,
39 ops::Range,
40 path::{Component, Path, PathBuf},
41 rc::Rc,
42 sync::{
43 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
44 Arc,
45 },
46 time::Instant,
47};
48use util::{post_inc, ResultExt, TryFutureExt as _};
49
50pub use fs::*;
51pub use worktree::*;
52
53pub trait Item: Entity {
54 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
55}
56
57pub struct Project {
58 worktrees: Vec<WorktreeHandle>,
59 active_entry: Option<ProjectEntryId>,
60 languages: Arc<LanguageRegistry>,
61 language_servers:
62 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
63 started_language_servers:
64 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
65 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
66 language_server_settings: Arc<Mutex<serde_json::Value>>,
67 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
68 next_language_server_id: usize,
69 client: Arc<client::Client>,
70 next_entry_id: Arc<AtomicUsize>,
71 user_store: ModelHandle<UserStore>,
72 fs: Arc<dyn Fs>,
73 client_state: ProjectClientState,
74 collaborators: HashMap<PeerId, Collaborator>,
75 subscriptions: Vec<client::Subscription>,
76 language_servers_with_diagnostics_running: isize,
77 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
78 shared_buffers: HashMap<PeerId, HashSet<u64>>,
79 loading_buffers: HashMap<
80 ProjectPath,
81 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
82 >,
83 loading_local_worktrees:
84 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
85 opened_buffers: HashMap<u64, OpenBuffer>,
86 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
87 nonce: u128,
88}
89
90enum OpenBuffer {
91 Strong(ModelHandle<Buffer>),
92 Weak(WeakModelHandle<Buffer>),
93 Loading(Vec<Operation>),
94}
95
96enum WorktreeHandle {
97 Strong(ModelHandle<Worktree>),
98 Weak(WeakModelHandle<Worktree>),
99}
100
101enum ProjectClientState {
102 Local {
103 is_shared: bool,
104 remote_id_tx: watch::Sender<Option<u64>>,
105 remote_id_rx: watch::Receiver<Option<u64>>,
106 _maintain_remote_id_task: Task<Option<()>>,
107 },
108 Remote {
109 sharing_has_stopped: bool,
110 remote_id: u64,
111 replica_id: ReplicaId,
112 _detect_unshare_task: Task<Option<()>>,
113 },
114}
115
116#[derive(Clone, Debug)]
117pub struct Collaborator {
118 pub user: Arc<User>,
119 pub peer_id: PeerId,
120 pub replica_id: ReplicaId,
121}
122
123#[derive(Clone, Debug, PartialEq)]
124pub enum Event {
125 ActiveEntryChanged(Option<ProjectEntryId>),
126 WorktreeRemoved(WorktreeId),
127 DiskBasedDiagnosticsStarted,
128 DiskBasedDiagnosticsUpdated,
129 DiskBasedDiagnosticsFinished,
130 DiagnosticsUpdated(ProjectPath),
131 RemoteIdChanged(Option<u64>),
132 CollaboratorLeft(PeerId),
133}
134
135pub struct LanguageServerStatus {
136 pub name: String,
137 pub pending_work: BTreeMap<String, LanguageServerProgress>,
138 pending_diagnostic_updates: isize,
139}
140
141#[derive(Clone, Debug)]
142pub struct LanguageServerProgress {
143 pub message: Option<String>,
144 pub percentage: Option<usize>,
145 pub last_update_at: Instant,
146}
147
148#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
149pub struct ProjectPath {
150 pub worktree_id: WorktreeId,
151 pub path: Arc<Path>,
152}
153
154#[derive(Clone, Debug, Default, PartialEq)]
155pub struct DiagnosticSummary {
156 pub error_count: usize,
157 pub warning_count: usize,
158 pub info_count: usize,
159 pub hint_count: usize,
160}
161
162#[derive(Debug)]
163pub struct Location {
164 pub buffer: ModelHandle<Buffer>,
165 pub range: Range<language::Anchor>,
166}
167
168#[derive(Debug)]
169pub struct DocumentHighlight {
170 pub range: Range<language::Anchor>,
171 pub kind: DocumentHighlightKind,
172}
173
174#[derive(Clone, Debug)]
175pub struct Symbol {
176 pub source_worktree_id: WorktreeId,
177 pub worktree_id: WorktreeId,
178 pub language_server_name: LanguageServerName,
179 pub path: PathBuf,
180 pub label: CodeLabel,
181 pub name: String,
182 pub kind: lsp::SymbolKind,
183 pub range: Range<PointUtf16>,
184 pub signature: [u8; 32],
185}
186
187#[derive(Default)]
188pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
189
190impl DiagnosticSummary {
191 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
192 let mut this = Self {
193 error_count: 0,
194 warning_count: 0,
195 info_count: 0,
196 hint_count: 0,
197 };
198
199 for entry in diagnostics {
200 if entry.diagnostic.is_primary {
201 match entry.diagnostic.severity {
202 DiagnosticSeverity::ERROR => this.error_count += 1,
203 DiagnosticSeverity::WARNING => this.warning_count += 1,
204 DiagnosticSeverity::INFORMATION => this.info_count += 1,
205 DiagnosticSeverity::HINT => this.hint_count += 1,
206 _ => {}
207 }
208 }
209 }
210
211 this
212 }
213
214 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
215 proto::DiagnosticSummary {
216 path: path.to_string_lossy().to_string(),
217 error_count: self.error_count as u32,
218 warning_count: self.warning_count as u32,
219 info_count: self.info_count as u32,
220 hint_count: self.hint_count as u32,
221 }
222 }
223}
224
225#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
226pub struct ProjectEntryId(usize);
227
228impl ProjectEntryId {
229 pub fn new(counter: &AtomicUsize) -> Self {
230 Self(counter.fetch_add(1, SeqCst))
231 }
232
233 pub fn from_proto(id: u64) -> Self {
234 Self(id as usize)
235 }
236
237 pub fn to_proto(&self) -> u64 {
238 self.0 as u64
239 }
240
241 pub fn to_usize(&self) -> usize {
242 self.0
243 }
244}
245
246impl Project {
247 pub fn init(client: &Arc<Client>) {
248 client.add_model_message_handler(Self::handle_add_collaborator);
249 client.add_model_message_handler(Self::handle_buffer_reloaded);
250 client.add_model_message_handler(Self::handle_buffer_saved);
251 client.add_model_message_handler(Self::handle_start_language_server);
252 client.add_model_message_handler(Self::handle_update_language_server);
253 client.add_model_message_handler(Self::handle_remove_collaborator);
254 client.add_model_message_handler(Self::handle_register_worktree);
255 client.add_model_message_handler(Self::handle_unregister_worktree);
256 client.add_model_message_handler(Self::handle_unshare_project);
257 client.add_model_message_handler(Self::handle_update_buffer_file);
258 client.add_model_message_handler(Self::handle_update_buffer);
259 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
260 client.add_model_message_handler(Self::handle_update_worktree);
261 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
262 client.add_model_request_handler(Self::handle_apply_code_action);
263 client.add_model_request_handler(Self::handle_reload_buffers);
264 client.add_model_request_handler(Self::handle_format_buffers);
265 client.add_model_request_handler(Self::handle_get_code_actions);
266 client.add_model_request_handler(Self::handle_get_completions);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
269 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
271 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
272 client.add_model_request_handler(Self::handle_search_project);
273 client.add_model_request_handler(Self::handle_get_project_symbols);
274 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
275 client.add_model_request_handler(Self::handle_open_buffer_by_id);
276 client.add_model_request_handler(Self::handle_open_buffer_by_path);
277 client.add_model_request_handler(Self::handle_save_buffer);
278 }
279
280 pub fn local(
281 client: Arc<Client>,
282 user_store: ModelHandle<UserStore>,
283 languages: Arc<LanguageRegistry>,
284 fs: Arc<dyn Fs>,
285 cx: &mut MutableAppContext,
286 ) -> ModelHandle<Self> {
287 cx.add_model(|cx: &mut ModelContext<Self>| {
288 let (remote_id_tx, remote_id_rx) = watch::channel();
289 let _maintain_remote_id_task = cx.spawn_weak({
290 let rpc = client.clone();
291 move |this, mut cx| {
292 async move {
293 let mut status = rpc.status();
294 while let Some(status) = status.next().await {
295 if let Some(this) = this.upgrade(&cx) {
296 if status.is_connected() {
297 this.update(&mut cx, |this, cx| this.register(cx)).await?;
298 } else {
299 this.update(&mut cx, |this, cx| this.unregister(cx));
300 }
301 }
302 }
303 Ok(())
304 }
305 .log_err()
306 }
307 });
308
309 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
310 Self {
311 worktrees: Default::default(),
312 collaborators: Default::default(),
313 opened_buffers: Default::default(),
314 shared_buffers: Default::default(),
315 loading_buffers: Default::default(),
316 loading_local_worktrees: Default::default(),
317 buffer_snapshots: Default::default(),
318 client_state: ProjectClientState::Local {
319 is_shared: false,
320 remote_id_tx,
321 remote_id_rx,
322 _maintain_remote_id_task,
323 },
324 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
325 subscriptions: Vec::new(),
326 active_entry: None,
327 languages,
328 client,
329 user_store,
330 fs,
331 next_entry_id: Default::default(),
332 language_servers_with_diagnostics_running: 0,
333 language_servers: Default::default(),
334 started_language_servers: Default::default(),
335 language_server_statuses: Default::default(),
336 last_workspace_edits_by_language_server: Default::default(),
337 language_server_settings: Default::default(),
338 next_language_server_id: 0,
339 nonce: StdRng::from_entropy().gen(),
340 }
341 })
342 }
343
344 pub async fn remote(
345 remote_id: u64,
346 client: Arc<Client>,
347 user_store: ModelHandle<UserStore>,
348 languages: Arc<LanguageRegistry>,
349 fs: Arc<dyn Fs>,
350 cx: &mut AsyncAppContext,
351 ) -> Result<ModelHandle<Self>> {
352 client.authenticate_and_connect(true, &cx).await?;
353
354 let response = client
355 .request(proto::JoinProject {
356 project_id: remote_id,
357 })
358 .await?;
359
360 let replica_id = response.replica_id as ReplicaId;
361
362 let mut worktrees = Vec::new();
363 for worktree in response.worktrees {
364 let (worktree, load_task) = cx
365 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
366 worktrees.push(worktree);
367 load_task.detach();
368 }
369
370 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
371 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
372 let mut this = Self {
373 worktrees: Vec::new(),
374 loading_buffers: Default::default(),
375 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
376 shared_buffers: Default::default(),
377 loading_local_worktrees: Default::default(),
378 active_entry: None,
379 collaborators: Default::default(),
380 languages,
381 user_store: user_store.clone(),
382 fs,
383 next_entry_id: Default::default(),
384 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
385 client: client.clone(),
386 client_state: ProjectClientState::Remote {
387 sharing_has_stopped: false,
388 remote_id,
389 replica_id,
390 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
391 async move {
392 let mut status = client.status();
393 let is_connected =
394 status.next().await.map_or(false, |s| s.is_connected());
395 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
396 if !is_connected || status.next().await.is_some() {
397 if let Some(this) = this.upgrade(&cx) {
398 this.update(&mut cx, |this, cx| this.project_unshared(cx))
399 }
400 }
401 Ok(())
402 }
403 .log_err()
404 }),
405 },
406 language_servers_with_diagnostics_running: 0,
407 language_servers: Default::default(),
408 started_language_servers: Default::default(),
409 language_server_settings: Default::default(),
410 language_server_statuses: response
411 .language_servers
412 .into_iter()
413 .map(|server| {
414 (
415 server.id as usize,
416 LanguageServerStatus {
417 name: server.name,
418 pending_work: Default::default(),
419 pending_diagnostic_updates: 0,
420 },
421 )
422 })
423 .collect(),
424 last_workspace_edits_by_language_server: Default::default(),
425 next_language_server_id: 0,
426 opened_buffers: Default::default(),
427 buffer_snapshots: Default::default(),
428 nonce: StdRng::from_entropy().gen(),
429 };
430 for worktree in worktrees {
431 this.add_worktree(&worktree, cx);
432 }
433 this
434 });
435
436 let user_ids = response
437 .collaborators
438 .iter()
439 .map(|peer| peer.user_id)
440 .collect();
441 user_store
442 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
443 .await?;
444 let mut collaborators = HashMap::default();
445 for message in response.collaborators {
446 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
447 collaborators.insert(collaborator.peer_id, collaborator);
448 }
449
450 this.update(cx, |this, _| {
451 this.collaborators = collaborators;
452 });
453
454 Ok(this)
455 }
456
457 #[cfg(any(test, feature = "test-support"))]
458 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
459 let languages = Arc::new(LanguageRegistry::test());
460 let http_client = client::test::FakeHttpClient::with_404_response();
461 let client = client::Client::new(http_client.clone());
462 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
463 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
464 }
465
466 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
467 self.opened_buffers
468 .get(&remote_id)
469 .and_then(|buffer| buffer.upgrade(cx))
470 }
471
472 #[cfg(any(test, feature = "test-support"))]
473 pub fn languages(&self) -> &Arc<LanguageRegistry> {
474 &self.languages
475 }
476
477 #[cfg(any(test, feature = "test-support"))]
478 pub fn check_invariants(&self, cx: &AppContext) {
479 if self.is_local() {
480 let mut worktree_root_paths = HashMap::default();
481 for worktree in self.worktrees(cx) {
482 let worktree = worktree.read(cx);
483 let abs_path = worktree.as_local().unwrap().abs_path().clone();
484 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
485 assert_eq!(
486 prev_worktree_id,
487 None,
488 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
489 abs_path,
490 worktree.id(),
491 prev_worktree_id
492 )
493 }
494 } else {
495 let replica_id = self.replica_id();
496 for buffer in self.opened_buffers.values() {
497 if let Some(buffer) = buffer.upgrade(cx) {
498 let buffer = buffer.read(cx);
499 assert_eq!(
500 buffer.deferred_ops_len(),
501 0,
502 "replica {}, buffer {} has deferred operations",
503 replica_id,
504 buffer.remote_id()
505 );
506 }
507 }
508 }
509 }
510
511 #[cfg(any(test, feature = "test-support"))]
512 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
513 let path = path.into();
514 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
515 self.opened_buffers.iter().any(|(_, buffer)| {
516 if let Some(buffer) = buffer.upgrade(cx) {
517 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
518 if file.worktree == worktree && file.path() == &path.path {
519 return true;
520 }
521 }
522 }
523 false
524 })
525 } else {
526 false
527 }
528 }
529
530 pub fn fs(&self) -> &Arc<dyn Fs> {
531 &self.fs
532 }
533
534 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
535 self.unshare(cx);
536 for worktree in &self.worktrees {
537 if let Some(worktree) = worktree.upgrade(cx) {
538 worktree.update(cx, |worktree, _| {
539 worktree.as_local_mut().unwrap().unregister();
540 });
541 }
542 }
543
544 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
545 *remote_id_tx.borrow_mut() = None;
546 }
547
548 self.subscriptions.clear();
549 }
550
551 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
552 self.unregister(cx);
553
554 let response = self.client.request(proto::RegisterProject {});
555 cx.spawn(|this, mut cx| async move {
556 let remote_id = response.await?.project_id;
557
558 let mut registrations = Vec::new();
559 this.update(&mut cx, |this, cx| {
560 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
561 *remote_id_tx.borrow_mut() = Some(remote_id);
562 }
563
564 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
565
566 this.subscriptions
567 .push(this.client.add_model_for_remote_entity(remote_id, cx));
568
569 for worktree in &this.worktrees {
570 if let Some(worktree) = worktree.upgrade(cx) {
571 registrations.push(worktree.update(cx, |worktree, cx| {
572 let worktree = worktree.as_local_mut().unwrap();
573 worktree.register(remote_id, cx)
574 }));
575 }
576 }
577 });
578
579 futures::future::try_join_all(registrations).await?;
580 Ok(())
581 })
582 }
583
584 pub fn remote_id(&self) -> Option<u64> {
585 match &self.client_state {
586 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
587 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
588 }
589 }
590
591 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
592 let mut id = None;
593 let mut watch = None;
594 match &self.client_state {
595 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
596 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
597 }
598
599 async move {
600 if let Some(id) = id {
601 return id;
602 }
603 let mut watch = watch.unwrap();
604 loop {
605 let id = *watch.borrow();
606 if let Some(id) = id {
607 return id;
608 }
609 watch.next().await;
610 }
611 }
612 }
613
614 pub fn replica_id(&self) -> ReplicaId {
615 match &self.client_state {
616 ProjectClientState::Local { .. } => 0,
617 ProjectClientState::Remote { replica_id, .. } => *replica_id,
618 }
619 }
620
621 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
622 &self.collaborators
623 }
624
625 pub fn worktrees<'a>(
626 &'a self,
627 cx: &'a AppContext,
628 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
629 self.worktrees
630 .iter()
631 .filter_map(move |worktree| worktree.upgrade(cx))
632 }
633
634 pub fn visible_worktrees<'a>(
635 &'a self,
636 cx: &'a AppContext,
637 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
638 self.worktrees.iter().filter_map(|worktree| {
639 worktree.upgrade(cx).and_then(|worktree| {
640 if worktree.read(cx).is_visible() {
641 Some(worktree)
642 } else {
643 None
644 }
645 })
646 })
647 }
648
649 pub fn worktree_for_id(
650 &self,
651 id: WorktreeId,
652 cx: &AppContext,
653 ) -> Option<ModelHandle<Worktree>> {
654 self.worktrees(cx)
655 .find(|worktree| worktree.read(cx).id() == id)
656 }
657
658 pub fn worktree_for_entry(
659 &self,
660 entry_id: ProjectEntryId,
661 cx: &AppContext,
662 ) -> Option<ModelHandle<Worktree>> {
663 self.worktrees(cx)
664 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
665 }
666
667 pub fn worktree_id_for_entry(
668 &self,
669 entry_id: ProjectEntryId,
670 cx: &AppContext,
671 ) -> Option<WorktreeId> {
672 self.worktree_for_entry(entry_id, cx)
673 .map(|worktree| worktree.read(cx).id())
674 }
675
676 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
677 let rpc = self.client.clone();
678 cx.spawn(|this, mut cx| async move {
679 let project_id = this.update(&mut cx, |this, cx| {
680 if let ProjectClientState::Local {
681 is_shared,
682 remote_id_rx,
683 ..
684 } = &mut this.client_state
685 {
686 *is_shared = true;
687
688 for open_buffer in this.opened_buffers.values_mut() {
689 match open_buffer {
690 OpenBuffer::Strong(_) => {}
691 OpenBuffer::Weak(buffer) => {
692 if let Some(buffer) = buffer.upgrade(cx) {
693 *open_buffer = OpenBuffer::Strong(buffer);
694 }
695 }
696 OpenBuffer::Loading(_) => unreachable!(),
697 }
698 }
699
700 for worktree_handle in this.worktrees.iter_mut() {
701 match worktree_handle {
702 WorktreeHandle::Strong(_) => {}
703 WorktreeHandle::Weak(worktree) => {
704 if let Some(worktree) = worktree.upgrade(cx) {
705 *worktree_handle = WorktreeHandle::Strong(worktree);
706 }
707 }
708 }
709 }
710
711 remote_id_rx
712 .borrow()
713 .ok_or_else(|| anyhow!("no project id"))
714 } else {
715 Err(anyhow!("can't share a remote project"))
716 }
717 })?;
718
719 rpc.request(proto::ShareProject { project_id }).await?;
720
721 let mut tasks = Vec::new();
722 this.update(&mut cx, |this, cx| {
723 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
724 worktree.update(cx, |worktree, cx| {
725 let worktree = worktree.as_local_mut().unwrap();
726 tasks.push(worktree.share(project_id, cx));
727 });
728 }
729 });
730 for task in tasks {
731 task.await?;
732 }
733 this.update(&mut cx, |_, cx| cx.notify());
734 Ok(())
735 })
736 }
737
738 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
739 let rpc = self.client.clone();
740
741 if let ProjectClientState::Local {
742 is_shared,
743 remote_id_rx,
744 ..
745 } = &mut self.client_state
746 {
747 if !*is_shared {
748 return;
749 }
750
751 *is_shared = false;
752 self.collaborators.clear();
753 self.shared_buffers.clear();
754 for worktree_handle in self.worktrees.iter_mut() {
755 if let WorktreeHandle::Strong(worktree) = worktree_handle {
756 let is_visible = worktree.update(cx, |worktree, _| {
757 worktree.as_local_mut().unwrap().unshare();
758 worktree.is_visible()
759 });
760 if !is_visible {
761 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
762 }
763 }
764 }
765
766 for open_buffer in self.opened_buffers.values_mut() {
767 match open_buffer {
768 OpenBuffer::Strong(buffer) => {
769 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
770 }
771 _ => {}
772 }
773 }
774
775 if let Some(project_id) = *remote_id_rx.borrow() {
776 rpc.send(proto::UnshareProject { project_id }).log_err();
777 }
778
779 cx.notify();
780 } else {
781 log::error!("attempted to unshare a remote project");
782 }
783 }
784
785 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
786 if let ProjectClientState::Remote {
787 sharing_has_stopped,
788 ..
789 } = &mut self.client_state
790 {
791 *sharing_has_stopped = true;
792 self.collaborators.clear();
793 cx.notify();
794 }
795 }
796
797 pub fn is_read_only(&self) -> bool {
798 match &self.client_state {
799 ProjectClientState::Local { .. } => false,
800 ProjectClientState::Remote {
801 sharing_has_stopped,
802 ..
803 } => *sharing_has_stopped,
804 }
805 }
806
807 pub fn is_local(&self) -> bool {
808 match &self.client_state {
809 ProjectClientState::Local { .. } => true,
810 ProjectClientState::Remote { .. } => false,
811 }
812 }
813
814 pub fn is_remote(&self) -> bool {
815 !self.is_local()
816 }
817
818 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
819 if self.is_remote() {
820 return Err(anyhow!("creating buffers as a guest is not supported yet"));
821 }
822
823 let buffer = cx.add_model(|cx| {
824 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
825 });
826 self.register_buffer(&buffer, cx)?;
827 Ok(buffer)
828 }
829
830 pub fn open_path(
831 &mut self,
832 path: impl Into<ProjectPath>,
833 cx: &mut ModelContext<Self>,
834 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
835 let task = self.open_buffer(path, cx);
836 cx.spawn_weak(|_, cx| async move {
837 let buffer = task.await?;
838 let project_entry_id = buffer
839 .read_with(&cx, |buffer, cx| {
840 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
841 })
842 .ok_or_else(|| anyhow!("no project entry"))?;
843 Ok((project_entry_id, buffer.into()))
844 })
845 }
846
847 pub fn open_buffer(
848 &mut self,
849 path: impl Into<ProjectPath>,
850 cx: &mut ModelContext<Self>,
851 ) -> Task<Result<ModelHandle<Buffer>>> {
852 let project_path = path.into();
853 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
854 worktree
855 } else {
856 return Task::ready(Err(anyhow!("no such worktree")));
857 };
858
859 // If there is already a buffer for the given path, then return it.
860 let existing_buffer = self.get_open_buffer(&project_path, cx);
861 if let Some(existing_buffer) = existing_buffer {
862 return Task::ready(Ok(existing_buffer));
863 }
864
865 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
866 // If the given path is already being loaded, then wait for that existing
867 // task to complete and return the same buffer.
868 hash_map::Entry::Occupied(e) => e.get().clone(),
869
870 // Otherwise, record the fact that this path is now being loaded.
871 hash_map::Entry::Vacant(entry) => {
872 let (mut tx, rx) = postage::watch::channel();
873 entry.insert(rx.clone());
874
875 let load_buffer = if worktree.read(cx).is_local() {
876 self.open_local_buffer(&project_path.path, &worktree, cx)
877 } else {
878 self.open_remote_buffer(&project_path.path, &worktree, cx)
879 };
880
881 cx.spawn(move |this, mut cx| async move {
882 let load_result = load_buffer.await;
883 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
884 // Record the fact that the buffer is no longer loading.
885 this.loading_buffers.remove(&project_path);
886 let buffer = load_result.map_err(Arc::new)?;
887 Ok(buffer)
888 }));
889 })
890 .detach();
891 rx
892 }
893 };
894
895 cx.foreground().spawn(async move {
896 loop {
897 if let Some(result) = loading_watch.borrow().as_ref() {
898 match result {
899 Ok(buffer) => return Ok(buffer.clone()),
900 Err(error) => return Err(anyhow!("{}", error)),
901 }
902 }
903 loading_watch.next().await;
904 }
905 })
906 }
907
908 fn open_local_buffer(
909 &mut self,
910 path: &Arc<Path>,
911 worktree: &ModelHandle<Worktree>,
912 cx: &mut ModelContext<Self>,
913 ) -> Task<Result<ModelHandle<Buffer>>> {
914 let load_buffer = worktree.update(cx, |worktree, cx| {
915 let worktree = worktree.as_local_mut().unwrap();
916 worktree.load_buffer(path, cx)
917 });
918 cx.spawn(|this, mut cx| async move {
919 let buffer = load_buffer.await?;
920 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
921 Ok(buffer)
922 })
923 }
924
925 fn open_remote_buffer(
926 &mut self,
927 path: &Arc<Path>,
928 worktree: &ModelHandle<Worktree>,
929 cx: &mut ModelContext<Self>,
930 ) -> Task<Result<ModelHandle<Buffer>>> {
931 let rpc = self.client.clone();
932 let project_id = self.remote_id().unwrap();
933 let remote_worktree_id = worktree.read(cx).id();
934 let path = path.clone();
935 let path_string = path.to_string_lossy().to_string();
936 cx.spawn(|this, mut cx| async move {
937 let response = rpc
938 .request(proto::OpenBufferByPath {
939 project_id,
940 worktree_id: remote_worktree_id.to_proto(),
941 path: path_string,
942 })
943 .await?;
944 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
945 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
946 .await
947 })
948 }
949
950 fn open_local_buffer_via_lsp(
951 &mut self,
952 abs_path: lsp::Url,
953 lsp_adapter: Arc<dyn LspAdapter>,
954 lsp_server: Arc<LanguageServer>,
955 cx: &mut ModelContext<Self>,
956 ) -> Task<Result<ModelHandle<Buffer>>> {
957 cx.spawn(|this, mut cx| async move {
958 let abs_path = abs_path
959 .to_file_path()
960 .map_err(|_| anyhow!("can't convert URI to path"))?;
961 let (worktree, relative_path) = if let Some(result) =
962 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
963 {
964 result
965 } else {
966 let worktree = this
967 .update(&mut cx, |this, cx| {
968 this.create_local_worktree(&abs_path, false, cx)
969 })
970 .await?;
971 this.update(&mut cx, |this, cx| {
972 this.language_servers.insert(
973 (worktree.read(cx).id(), lsp_adapter.name()),
974 (lsp_adapter, lsp_server),
975 );
976 });
977 (worktree, PathBuf::new())
978 };
979
980 let project_path = ProjectPath {
981 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
982 path: relative_path.into(),
983 };
984 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
985 .await
986 })
987 }
988
989 pub fn open_buffer_by_id(
990 &mut self,
991 id: u64,
992 cx: &mut ModelContext<Self>,
993 ) -> Task<Result<ModelHandle<Buffer>>> {
994 if let Some(buffer) = self.buffer_for_id(id, cx) {
995 Task::ready(Ok(buffer))
996 } else if self.is_local() {
997 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
998 } else if let Some(project_id) = self.remote_id() {
999 let request = self
1000 .client
1001 .request(proto::OpenBufferById { project_id, id });
1002 cx.spawn(|this, mut cx| async move {
1003 let buffer = request
1004 .await?
1005 .buffer
1006 .ok_or_else(|| anyhow!("invalid buffer"))?;
1007 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1008 .await
1009 })
1010 } else {
1011 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1012 }
1013 }
1014
1015 pub fn save_buffer_as(
1016 &mut self,
1017 buffer: ModelHandle<Buffer>,
1018 abs_path: PathBuf,
1019 cx: &mut ModelContext<Project>,
1020 ) -> Task<Result<()>> {
1021 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1022 let old_path =
1023 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1024 cx.spawn(|this, mut cx| async move {
1025 if let Some(old_path) = old_path {
1026 this.update(&mut cx, |this, cx| {
1027 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1028 });
1029 }
1030 let (worktree, path) = worktree_task.await?;
1031 worktree
1032 .update(&mut cx, |worktree, cx| {
1033 worktree
1034 .as_local_mut()
1035 .unwrap()
1036 .save_buffer_as(buffer.clone(), path, cx)
1037 })
1038 .await?;
1039 this.update(&mut cx, |this, cx| {
1040 this.assign_language_to_buffer(&buffer, cx);
1041 this.register_buffer_with_language_server(&buffer, cx);
1042 });
1043 Ok(())
1044 })
1045 }
1046
1047 pub fn get_open_buffer(
1048 &mut self,
1049 path: &ProjectPath,
1050 cx: &mut ModelContext<Self>,
1051 ) -> Option<ModelHandle<Buffer>> {
1052 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1053 self.opened_buffers.values().find_map(|buffer| {
1054 let buffer = buffer.upgrade(cx)?;
1055 let file = File::from_dyn(buffer.read(cx).file())?;
1056 if file.worktree == worktree && file.path() == &path.path {
1057 Some(buffer)
1058 } else {
1059 None
1060 }
1061 })
1062 }
1063
1064 fn register_buffer(
1065 &mut self,
1066 buffer: &ModelHandle<Buffer>,
1067 cx: &mut ModelContext<Self>,
1068 ) -> Result<()> {
1069 let remote_id = buffer.read(cx).remote_id();
1070 let open_buffer = if self.is_remote() || self.is_shared() {
1071 OpenBuffer::Strong(buffer.clone())
1072 } else {
1073 OpenBuffer::Weak(buffer.downgrade())
1074 };
1075
1076 match self.opened_buffers.insert(remote_id, open_buffer) {
1077 None => {}
1078 Some(OpenBuffer::Loading(operations)) => {
1079 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1080 }
1081 Some(OpenBuffer::Weak(existing_handle)) => {
1082 if existing_handle.upgrade(cx).is_some() {
1083 Err(anyhow!(
1084 "already registered buffer with remote id {}",
1085 remote_id
1086 ))?
1087 }
1088 }
1089 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1090 "already registered buffer with remote id {}",
1091 remote_id
1092 ))?,
1093 }
1094 cx.subscribe(buffer, |this, buffer, event, cx| {
1095 this.on_buffer_event(buffer, event, cx);
1096 })
1097 .detach();
1098
1099 self.assign_language_to_buffer(buffer, cx);
1100 self.register_buffer_with_language_server(buffer, cx);
1101 cx.observe_release(buffer, |this, buffer, cx| {
1102 if let Some(file) = File::from_dyn(buffer.file()) {
1103 if file.is_local() {
1104 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1105 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1106 server
1107 .notify::<lsp::notification::DidCloseTextDocument>(
1108 lsp::DidCloseTextDocumentParams {
1109 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1110 },
1111 )
1112 .log_err();
1113 }
1114 }
1115 }
1116 })
1117 .detach();
1118
1119 Ok(())
1120 }
1121
1122 fn register_buffer_with_language_server(
1123 &mut self,
1124 buffer_handle: &ModelHandle<Buffer>,
1125 cx: &mut ModelContext<Self>,
1126 ) {
1127 let buffer = buffer_handle.read(cx);
1128 let buffer_id = buffer.remote_id();
1129 if let Some(file) = File::from_dyn(buffer.file()) {
1130 if file.is_local() {
1131 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1132 let initial_snapshot = buffer.text_snapshot();
1133 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1134
1135 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1136 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1137 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1138 .log_err();
1139 }
1140 }
1141
1142 if let Some((_, server)) = language_server {
1143 server
1144 .notify::<lsp::notification::DidOpenTextDocument>(
1145 lsp::DidOpenTextDocumentParams {
1146 text_document: lsp::TextDocumentItem::new(
1147 uri,
1148 Default::default(),
1149 0,
1150 initial_snapshot.text(),
1151 ),
1152 }
1153 .clone(),
1154 )
1155 .log_err();
1156 buffer_handle.update(cx, |buffer, cx| {
1157 buffer.set_completion_triggers(
1158 server
1159 .capabilities()
1160 .completion_provider
1161 .as_ref()
1162 .and_then(|provider| provider.trigger_characters.clone())
1163 .unwrap_or(Vec::new()),
1164 cx,
1165 )
1166 });
1167 self.buffer_snapshots
1168 .insert(buffer_id, vec![(0, initial_snapshot)]);
1169 }
1170 }
1171 }
1172 }
1173
1174 fn unregister_buffer_from_language_server(
1175 &mut self,
1176 buffer: &ModelHandle<Buffer>,
1177 old_path: PathBuf,
1178 cx: &mut ModelContext<Self>,
1179 ) {
1180 buffer.update(cx, |buffer, cx| {
1181 buffer.update_diagnostics(Default::default(), cx);
1182 self.buffer_snapshots.remove(&buffer.remote_id());
1183 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1184 language_server
1185 .notify::<lsp::notification::DidCloseTextDocument>(
1186 lsp::DidCloseTextDocumentParams {
1187 text_document: lsp::TextDocumentIdentifier::new(
1188 lsp::Url::from_file_path(old_path).unwrap(),
1189 ),
1190 },
1191 )
1192 .log_err();
1193 }
1194 });
1195 }
1196
1197 fn on_buffer_event(
1198 &mut self,
1199 buffer: ModelHandle<Buffer>,
1200 event: &BufferEvent,
1201 cx: &mut ModelContext<Self>,
1202 ) -> Option<()> {
1203 match event {
1204 BufferEvent::Operation(operation) => {
1205 let project_id = self.remote_id()?;
1206 let request = self.client.request(proto::UpdateBuffer {
1207 project_id,
1208 buffer_id: buffer.read(cx).remote_id(),
1209 operations: vec![language::proto::serialize_operation(&operation)],
1210 });
1211 cx.background().spawn(request).detach_and_log_err(cx);
1212 }
1213 BufferEvent::Edited { .. } => {
1214 let (_, language_server) = self
1215 .language_server_for_buffer(buffer.read(cx), cx)?
1216 .clone();
1217 let buffer = buffer.read(cx);
1218 let file = File::from_dyn(buffer.file())?;
1219 let abs_path = file.as_local()?.abs_path(cx);
1220 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1221 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1222 let (version, prev_snapshot) = buffer_snapshots.last()?;
1223 let next_snapshot = buffer.text_snapshot();
1224 let next_version = version + 1;
1225
1226 let content_changes = buffer
1227 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1228 .map(|edit| {
1229 let edit_start = edit.new.start.0;
1230 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1231 let new_text = next_snapshot
1232 .text_for_range(edit.new.start.1..edit.new.end.1)
1233 .collect();
1234 lsp::TextDocumentContentChangeEvent {
1235 range: Some(lsp::Range::new(
1236 point_to_lsp(edit_start),
1237 point_to_lsp(edit_end),
1238 )),
1239 range_length: None,
1240 text: new_text,
1241 }
1242 })
1243 .collect();
1244
1245 buffer_snapshots.push((next_version, next_snapshot));
1246
1247 language_server
1248 .notify::<lsp::notification::DidChangeTextDocument>(
1249 lsp::DidChangeTextDocumentParams {
1250 text_document: lsp::VersionedTextDocumentIdentifier::new(
1251 uri,
1252 next_version,
1253 ),
1254 content_changes,
1255 },
1256 )
1257 .log_err();
1258 }
1259 BufferEvent::Saved => {
1260 let file = File::from_dyn(buffer.read(cx).file())?;
1261 let worktree_id = file.worktree_id(cx);
1262 let abs_path = file.as_local()?.abs_path(cx);
1263 let text_document = lsp::TextDocumentIdentifier {
1264 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1265 };
1266
1267 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1268 server
1269 .notify::<lsp::notification::DidSaveTextDocument>(
1270 lsp::DidSaveTextDocumentParams {
1271 text_document: text_document.clone(),
1272 text: None,
1273 },
1274 )
1275 .log_err();
1276 }
1277 }
1278 _ => {}
1279 }
1280
1281 None
1282 }
1283
1284 fn language_servers_for_worktree(
1285 &self,
1286 worktree_id: WorktreeId,
1287 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1288 self.language_servers.iter().filter_map(
1289 move |((language_server_worktree_id, _), server)| {
1290 if *language_server_worktree_id == worktree_id {
1291 Some(server)
1292 } else {
1293 None
1294 }
1295 },
1296 )
1297 }
1298
1299 fn assign_language_to_buffer(
1300 &mut self,
1301 buffer: &ModelHandle<Buffer>,
1302 cx: &mut ModelContext<Self>,
1303 ) -> Option<()> {
1304 // If the buffer has a language, set it and start the language server if we haven't already.
1305 let full_path = buffer.read(cx).file()?.full_path(cx);
1306 let language = self.languages.select_language(&full_path)?;
1307 buffer.update(cx, |buffer, cx| {
1308 buffer.set_language(Some(language.clone()), cx);
1309 });
1310
1311 let file = File::from_dyn(buffer.read(cx).file())?;
1312 let worktree = file.worktree.read(cx).as_local()?;
1313 let worktree_id = worktree.id();
1314 let worktree_abs_path = worktree.abs_path().clone();
1315 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1316
1317 None
1318 }
1319
1320 fn start_language_server(
1321 &mut self,
1322 worktree_id: WorktreeId,
1323 worktree_path: Arc<Path>,
1324 language: Arc<Language>,
1325 cx: &mut ModelContext<Self>,
1326 ) {
1327 let adapter = if let Some(adapter) = language.lsp_adapter() {
1328 adapter
1329 } else {
1330 return;
1331 };
1332 let key = (worktree_id, adapter.name());
1333 self.started_language_servers
1334 .entry(key.clone())
1335 .or_insert_with(|| {
1336 let server_id = post_inc(&mut self.next_language_server_id);
1337 let language_server = self.languages.start_language_server(
1338 server_id,
1339 language.clone(),
1340 worktree_path,
1341 self.client.http_client(),
1342 cx,
1343 );
1344 cx.spawn_weak(|this, mut cx| async move {
1345 let language_server = language_server?.await.log_err()?;
1346 let language_server = language_server
1347 .initialize(adapter.initialization_options())
1348 .await
1349 .log_err()?;
1350 let this = this.upgrade(&cx)?;
1351 let disk_based_diagnostics_progress_token =
1352 adapter.disk_based_diagnostics_progress_token();
1353
1354 language_server
1355 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1356 let this = this.downgrade();
1357 let adapter = adapter.clone();
1358 move |params, mut cx| {
1359 if let Some(this) = this.upgrade(&cx) {
1360 this.update(&mut cx, |this, cx| {
1361 this.on_lsp_diagnostics_published(
1362 server_id,
1363 params,
1364 &adapter,
1365 disk_based_diagnostics_progress_token,
1366 cx,
1367 );
1368 });
1369 }
1370 }
1371 })
1372 .detach();
1373
1374 language_server
1375 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1376 let settings = this
1377 .read_with(&cx, |this, _| this.language_server_settings.clone());
1378 move |params, _| {
1379 let settings = settings.lock().clone();
1380 async move {
1381 Ok(params
1382 .items
1383 .into_iter()
1384 .map(|item| {
1385 if let Some(section) = &item.section {
1386 settings
1387 .get(section)
1388 .cloned()
1389 .unwrap_or(serde_json::Value::Null)
1390 } else {
1391 settings.clone()
1392 }
1393 })
1394 .collect())
1395 }
1396 }
1397 })
1398 .detach();
1399
1400 language_server
1401 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1402 let this = this.downgrade();
1403 let adapter = adapter.clone();
1404 let language_server = language_server.clone();
1405 move |params, cx| {
1406 Self::on_lsp_workspace_edit(
1407 this,
1408 params,
1409 server_id,
1410 adapter.clone(),
1411 language_server.clone(),
1412 cx,
1413 )
1414 }
1415 })
1416 .detach();
1417
1418 language_server
1419 .on_notification::<lsp::notification::Progress, _>({
1420 let this = this.downgrade();
1421 move |params, mut cx| {
1422 if let Some(this) = this.upgrade(&cx) {
1423 this.update(&mut cx, |this, cx| {
1424 this.on_lsp_progress(
1425 params,
1426 server_id,
1427 disk_based_diagnostics_progress_token,
1428 cx,
1429 );
1430 });
1431 }
1432 }
1433 })
1434 .detach();
1435
1436 this.update(&mut cx, |this, cx| {
1437 this.language_servers
1438 .insert(key.clone(), (adapter, language_server.clone()));
1439 this.language_server_statuses.insert(
1440 server_id,
1441 LanguageServerStatus {
1442 name: language_server.name().to_string(),
1443 pending_work: Default::default(),
1444 pending_diagnostic_updates: 0,
1445 },
1446 );
1447 language_server
1448 .notify::<lsp::notification::DidChangeConfiguration>(
1449 lsp::DidChangeConfigurationParams {
1450 settings: this.language_server_settings.lock().clone(),
1451 },
1452 )
1453 .ok();
1454
1455 if let Some(project_id) = this.remote_id() {
1456 this.client
1457 .send(proto::StartLanguageServer {
1458 project_id,
1459 server: Some(proto::LanguageServer {
1460 id: server_id as u64,
1461 name: language_server.name().to_string(),
1462 }),
1463 })
1464 .log_err();
1465 }
1466
1467 // Tell the language server about every open buffer in the worktree that matches the language.
1468 for buffer in this.opened_buffers.values() {
1469 if let Some(buffer_handle) = buffer.upgrade(cx) {
1470 let buffer = buffer_handle.read(cx);
1471 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1472 file
1473 } else {
1474 continue;
1475 };
1476 let language = if let Some(language) = buffer.language() {
1477 language
1478 } else {
1479 continue;
1480 };
1481 if file.worktree.read(cx).id() != key.0
1482 || language.lsp_adapter().map(|a| a.name())
1483 != Some(key.1.clone())
1484 {
1485 continue;
1486 }
1487
1488 let file = file.as_local()?;
1489 let versions = this
1490 .buffer_snapshots
1491 .entry(buffer.remote_id())
1492 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1493 let (version, initial_snapshot) = versions.last().unwrap();
1494 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1495 language_server
1496 .notify::<lsp::notification::DidOpenTextDocument>(
1497 lsp::DidOpenTextDocumentParams {
1498 text_document: lsp::TextDocumentItem::new(
1499 uri,
1500 Default::default(),
1501 *version,
1502 initial_snapshot.text(),
1503 ),
1504 },
1505 )
1506 .log_err()?;
1507 buffer_handle.update(cx, |buffer, cx| {
1508 buffer.set_completion_triggers(
1509 language_server
1510 .capabilities()
1511 .completion_provider
1512 .as_ref()
1513 .and_then(|provider| {
1514 provider.trigger_characters.clone()
1515 })
1516 .unwrap_or(Vec::new()),
1517 cx,
1518 )
1519 });
1520 }
1521 }
1522
1523 cx.notify();
1524 Some(())
1525 });
1526
1527 Some(language_server)
1528 })
1529 });
1530 }
1531
1532 pub fn restart_language_servers_for_buffers(
1533 &mut self,
1534 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1535 cx: &mut ModelContext<Self>,
1536 ) -> Option<()> {
1537 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1538 .into_iter()
1539 .filter_map(|buffer| {
1540 let file = File::from_dyn(buffer.read(cx).file())?;
1541 let worktree = file.worktree.read(cx).as_local()?;
1542 let worktree_id = worktree.id();
1543 let worktree_abs_path = worktree.abs_path().clone();
1544 let full_path = file.full_path(cx);
1545 Some((worktree_id, worktree_abs_path, full_path))
1546 })
1547 .collect();
1548 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1549 let language = self.languages.select_language(&full_path)?;
1550 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1551 }
1552
1553 None
1554 }
1555
1556 fn restart_language_server(
1557 &mut self,
1558 worktree_id: WorktreeId,
1559 worktree_path: Arc<Path>,
1560 language: Arc<Language>,
1561 cx: &mut ModelContext<Self>,
1562 ) {
1563 let adapter = if let Some(adapter) = language.lsp_adapter() {
1564 adapter
1565 } else {
1566 return;
1567 };
1568 let key = (worktree_id, adapter.name());
1569 let server_to_shutdown = self.language_servers.remove(&key);
1570 self.started_language_servers.remove(&key);
1571 server_to_shutdown
1572 .as_ref()
1573 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1574 cx.spawn_weak(|this, mut cx| async move {
1575 if let Some(this) = this.upgrade(&cx) {
1576 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1577 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1578 shutdown_task.await;
1579 }
1580 }
1581
1582 this.update(&mut cx, |this, cx| {
1583 this.start_language_server(worktree_id, worktree_path, language, cx);
1584 });
1585 }
1586 })
1587 .detach();
1588 }
1589
1590 fn on_lsp_diagnostics_published(
1591 &mut self,
1592 server_id: usize,
1593 mut params: lsp::PublishDiagnosticsParams,
1594 adapter: &Arc<dyn LspAdapter>,
1595 disk_based_diagnostics_progress_token: Option<&str>,
1596 cx: &mut ModelContext<Self>,
1597 ) {
1598 adapter.process_diagnostics(&mut params);
1599 if disk_based_diagnostics_progress_token.is_none() {
1600 self.disk_based_diagnostics_started(cx);
1601 self.broadcast_language_server_update(
1602 server_id,
1603 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1604 proto::LspDiskBasedDiagnosticsUpdating {},
1605 ),
1606 );
1607 }
1608 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1609 .log_err();
1610 if disk_based_diagnostics_progress_token.is_none() {
1611 self.disk_based_diagnostics_finished(cx);
1612 self.broadcast_language_server_update(
1613 server_id,
1614 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1615 proto::LspDiskBasedDiagnosticsUpdated {},
1616 ),
1617 );
1618 }
1619 }
1620
1621 fn on_lsp_progress(
1622 &mut self,
1623 progress: lsp::ProgressParams,
1624 server_id: usize,
1625 disk_based_diagnostics_progress_token: Option<&str>,
1626 cx: &mut ModelContext<Self>,
1627 ) {
1628 let token = match progress.token {
1629 lsp::NumberOrString::String(token) => token,
1630 lsp::NumberOrString::Number(token) => {
1631 log::info!("skipping numeric progress token {}", token);
1632 return;
1633 }
1634 };
1635
1636 match progress.value {
1637 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1638 lsp::WorkDoneProgress::Begin(_) => {
1639 let language_server_status =
1640 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1641 status
1642 } else {
1643 return;
1644 };
1645
1646 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1647 language_server_status.pending_diagnostic_updates += 1;
1648 if language_server_status.pending_diagnostic_updates == 1 {
1649 self.disk_based_diagnostics_started(cx);
1650 self.broadcast_language_server_update(
1651 server_id,
1652 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1653 proto::LspDiskBasedDiagnosticsUpdating {},
1654 ),
1655 );
1656 }
1657 } else {
1658 self.on_lsp_work_start(server_id, token.clone(), cx);
1659 self.broadcast_language_server_update(
1660 server_id,
1661 proto::update_language_server::Variant::WorkStart(
1662 proto::LspWorkStart { token },
1663 ),
1664 );
1665 }
1666 }
1667 lsp::WorkDoneProgress::Report(report) => {
1668 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1669 self.on_lsp_work_progress(
1670 server_id,
1671 token.clone(),
1672 LanguageServerProgress {
1673 message: report.message.clone(),
1674 percentage: report.percentage.map(|p| p as usize),
1675 last_update_at: Instant::now(),
1676 },
1677 cx,
1678 );
1679 self.broadcast_language_server_update(
1680 server_id,
1681 proto::update_language_server::Variant::WorkProgress(
1682 proto::LspWorkProgress {
1683 token,
1684 message: report.message,
1685 percentage: report.percentage.map(|p| p as u32),
1686 },
1687 ),
1688 );
1689 }
1690 }
1691 lsp::WorkDoneProgress::End(_) => {
1692 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1693 let language_server_status = if let Some(status) =
1694 self.language_server_statuses.get_mut(&server_id)
1695 {
1696 status
1697 } else {
1698 return;
1699 };
1700
1701 language_server_status.pending_diagnostic_updates -= 1;
1702 if language_server_status.pending_diagnostic_updates == 0 {
1703 self.disk_based_diagnostics_finished(cx);
1704 self.broadcast_language_server_update(
1705 server_id,
1706 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1707 proto::LspDiskBasedDiagnosticsUpdated {},
1708 ),
1709 );
1710 }
1711 } else {
1712 self.on_lsp_work_end(server_id, token.clone(), cx);
1713 self.broadcast_language_server_update(
1714 server_id,
1715 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1716 token,
1717 }),
1718 );
1719 }
1720 }
1721 },
1722 }
1723 }
1724
1725 fn on_lsp_work_start(
1726 &mut self,
1727 language_server_id: usize,
1728 token: String,
1729 cx: &mut ModelContext<Self>,
1730 ) {
1731 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1732 status.pending_work.insert(
1733 token,
1734 LanguageServerProgress {
1735 message: None,
1736 percentage: None,
1737 last_update_at: Instant::now(),
1738 },
1739 );
1740 cx.notify();
1741 }
1742 }
1743
1744 fn on_lsp_work_progress(
1745 &mut self,
1746 language_server_id: usize,
1747 token: String,
1748 progress: LanguageServerProgress,
1749 cx: &mut ModelContext<Self>,
1750 ) {
1751 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1752 status.pending_work.insert(token, progress);
1753 cx.notify();
1754 }
1755 }
1756
1757 fn on_lsp_work_end(
1758 &mut self,
1759 language_server_id: usize,
1760 token: String,
1761 cx: &mut ModelContext<Self>,
1762 ) {
1763 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1764 status.pending_work.remove(&token);
1765 cx.notify();
1766 }
1767 }
1768
1769 async fn on_lsp_workspace_edit(
1770 this: WeakModelHandle<Self>,
1771 params: lsp::ApplyWorkspaceEditParams,
1772 server_id: usize,
1773 adapter: Arc<dyn LspAdapter>,
1774 language_server: Arc<LanguageServer>,
1775 mut cx: AsyncAppContext,
1776 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1777 let this = this
1778 .upgrade(&cx)
1779 .ok_or_else(|| anyhow!("project project closed"))?;
1780 let transaction = Self::deserialize_workspace_edit(
1781 this.clone(),
1782 params.edit,
1783 true,
1784 adapter.clone(),
1785 language_server.clone(),
1786 &mut cx,
1787 )
1788 .await
1789 .log_err();
1790 this.update(&mut cx, |this, _| {
1791 if let Some(transaction) = transaction {
1792 this.last_workspace_edits_by_language_server
1793 .insert(server_id, transaction);
1794 }
1795 });
1796 Ok(lsp::ApplyWorkspaceEditResponse {
1797 applied: true,
1798 failed_change: None,
1799 failure_reason: None,
1800 })
1801 }
1802
1803 fn broadcast_language_server_update(
1804 &self,
1805 language_server_id: usize,
1806 event: proto::update_language_server::Variant,
1807 ) {
1808 if let Some(project_id) = self.remote_id() {
1809 self.client
1810 .send(proto::UpdateLanguageServer {
1811 project_id,
1812 language_server_id: language_server_id as u64,
1813 variant: Some(event),
1814 })
1815 .log_err();
1816 }
1817 }
1818
1819 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1820 for (_, server) in self.language_servers.values() {
1821 server
1822 .notify::<lsp::notification::DidChangeConfiguration>(
1823 lsp::DidChangeConfigurationParams {
1824 settings: settings.clone(),
1825 },
1826 )
1827 .ok();
1828 }
1829 *self.language_server_settings.lock() = settings;
1830 }
1831
1832 pub fn language_server_statuses(
1833 &self,
1834 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1835 self.language_server_statuses.values()
1836 }
1837
1838 pub fn update_diagnostics(
1839 &mut self,
1840 params: lsp::PublishDiagnosticsParams,
1841 disk_based_sources: &[&str],
1842 cx: &mut ModelContext<Self>,
1843 ) -> Result<()> {
1844 let abs_path = params
1845 .uri
1846 .to_file_path()
1847 .map_err(|_| anyhow!("URI is not a file"))?;
1848 let mut next_group_id = 0;
1849 let mut diagnostics = Vec::default();
1850 let mut primary_diagnostic_group_ids = HashMap::default();
1851 let mut sources_by_group_id = HashMap::default();
1852 let mut supporting_diagnostics = HashMap::default();
1853 for diagnostic in ¶ms.diagnostics {
1854 let source = diagnostic.source.as_ref();
1855 let code = diagnostic.code.as_ref().map(|code| match code {
1856 lsp::NumberOrString::Number(code) => code.to_string(),
1857 lsp::NumberOrString::String(code) => code.clone(),
1858 });
1859 let range = range_from_lsp(diagnostic.range);
1860 let is_supporting = diagnostic
1861 .related_information
1862 .as_ref()
1863 .map_or(false, |infos| {
1864 infos.iter().any(|info| {
1865 primary_diagnostic_group_ids.contains_key(&(
1866 source,
1867 code.clone(),
1868 range_from_lsp(info.location.range),
1869 ))
1870 })
1871 });
1872
1873 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1874 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1875 });
1876
1877 if is_supporting {
1878 supporting_diagnostics.insert(
1879 (source, code.clone(), range),
1880 (diagnostic.severity, is_unnecessary),
1881 );
1882 } else {
1883 let group_id = post_inc(&mut next_group_id);
1884 let is_disk_based = source.map_or(false, |source| {
1885 disk_based_sources.contains(&source.as_str())
1886 });
1887
1888 sources_by_group_id.insert(group_id, source);
1889 primary_diagnostic_group_ids
1890 .insert((source, code.clone(), range.clone()), group_id);
1891
1892 diagnostics.push(DiagnosticEntry {
1893 range,
1894 diagnostic: Diagnostic {
1895 code: code.clone(),
1896 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1897 message: diagnostic.message.clone(),
1898 group_id,
1899 is_primary: true,
1900 is_valid: true,
1901 is_disk_based,
1902 is_unnecessary,
1903 },
1904 });
1905 if let Some(infos) = &diagnostic.related_information {
1906 for info in infos {
1907 if info.location.uri == params.uri && !info.message.is_empty() {
1908 let range = range_from_lsp(info.location.range);
1909 diagnostics.push(DiagnosticEntry {
1910 range,
1911 diagnostic: Diagnostic {
1912 code: code.clone(),
1913 severity: DiagnosticSeverity::INFORMATION,
1914 message: info.message.clone(),
1915 group_id,
1916 is_primary: false,
1917 is_valid: true,
1918 is_disk_based,
1919 is_unnecessary: false,
1920 },
1921 });
1922 }
1923 }
1924 }
1925 }
1926 }
1927
1928 for entry in &mut diagnostics {
1929 let diagnostic = &mut entry.diagnostic;
1930 if !diagnostic.is_primary {
1931 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1932 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1933 source,
1934 diagnostic.code.clone(),
1935 entry.range.clone(),
1936 )) {
1937 if let Some(severity) = severity {
1938 diagnostic.severity = severity;
1939 }
1940 diagnostic.is_unnecessary = is_unnecessary;
1941 }
1942 }
1943 }
1944
1945 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1946 Ok(())
1947 }
1948
1949 pub fn update_diagnostic_entries(
1950 &mut self,
1951 abs_path: PathBuf,
1952 version: Option<i32>,
1953 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1954 cx: &mut ModelContext<Project>,
1955 ) -> Result<(), anyhow::Error> {
1956 let (worktree, relative_path) = self
1957 .find_local_worktree(&abs_path, cx)
1958 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1959 if !worktree.read(cx).is_visible() {
1960 return Ok(());
1961 }
1962
1963 let project_path = ProjectPath {
1964 worktree_id: worktree.read(cx).id(),
1965 path: relative_path.into(),
1966 };
1967
1968 for buffer in self.opened_buffers.values() {
1969 if let Some(buffer) = buffer.upgrade(cx) {
1970 if buffer
1971 .read(cx)
1972 .file()
1973 .map_or(false, |file| *file.path() == project_path.path)
1974 {
1975 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1976 break;
1977 }
1978 }
1979 }
1980 worktree.update(cx, |worktree, cx| {
1981 worktree
1982 .as_local_mut()
1983 .ok_or_else(|| anyhow!("not a local worktree"))?
1984 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1985 })?;
1986 cx.emit(Event::DiagnosticsUpdated(project_path));
1987 Ok(())
1988 }
1989
1990 fn update_buffer_diagnostics(
1991 &mut self,
1992 buffer: &ModelHandle<Buffer>,
1993 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1994 version: Option<i32>,
1995 cx: &mut ModelContext<Self>,
1996 ) -> Result<()> {
1997 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1998 Ordering::Equal
1999 .then_with(|| b.is_primary.cmp(&a.is_primary))
2000 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2001 .then_with(|| a.severity.cmp(&b.severity))
2002 .then_with(|| a.message.cmp(&b.message))
2003 }
2004
2005 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2006
2007 diagnostics.sort_unstable_by(|a, b| {
2008 Ordering::Equal
2009 .then_with(|| a.range.start.cmp(&b.range.start))
2010 .then_with(|| b.range.end.cmp(&a.range.end))
2011 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2012 });
2013
2014 let mut sanitized_diagnostics = Vec::new();
2015 let edits_since_save = Patch::new(
2016 snapshot
2017 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2018 .collect(),
2019 );
2020 for entry in diagnostics {
2021 let start;
2022 let end;
2023 if entry.diagnostic.is_disk_based {
2024 // Some diagnostics are based on files on disk instead of buffers'
2025 // current contents. Adjust these diagnostics' ranges to reflect
2026 // any unsaved edits.
2027 start = edits_since_save.old_to_new(entry.range.start);
2028 end = edits_since_save.old_to_new(entry.range.end);
2029 } else {
2030 start = entry.range.start;
2031 end = entry.range.end;
2032 }
2033
2034 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2035 ..snapshot.clip_point_utf16(end, Bias::Right);
2036
2037 // Expand empty ranges by one character
2038 if range.start == range.end {
2039 range.end.column += 1;
2040 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2041 if range.start == range.end && range.end.column > 0 {
2042 range.start.column -= 1;
2043 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2044 }
2045 }
2046
2047 sanitized_diagnostics.push(DiagnosticEntry {
2048 range,
2049 diagnostic: entry.diagnostic,
2050 });
2051 }
2052 drop(edits_since_save);
2053
2054 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2055 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2056 Ok(())
2057 }
2058
2059 pub fn reload_buffers(
2060 &self,
2061 buffers: HashSet<ModelHandle<Buffer>>,
2062 push_to_history: bool,
2063 cx: &mut ModelContext<Self>,
2064 ) -> Task<Result<ProjectTransaction>> {
2065 let mut local_buffers = Vec::new();
2066 let mut remote_buffers = None;
2067 for buffer_handle in buffers {
2068 let buffer = buffer_handle.read(cx);
2069 if buffer.is_dirty() {
2070 if let Some(file) = File::from_dyn(buffer.file()) {
2071 if file.is_local() {
2072 local_buffers.push(buffer_handle);
2073 } else {
2074 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2075 }
2076 }
2077 }
2078 }
2079
2080 let remote_buffers = self.remote_id().zip(remote_buffers);
2081 let client = self.client.clone();
2082
2083 cx.spawn(|this, mut cx| async move {
2084 let mut project_transaction = ProjectTransaction::default();
2085
2086 if let Some((project_id, remote_buffers)) = remote_buffers {
2087 let response = client
2088 .request(proto::ReloadBuffers {
2089 project_id,
2090 buffer_ids: remote_buffers
2091 .iter()
2092 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2093 .collect(),
2094 })
2095 .await?
2096 .transaction
2097 .ok_or_else(|| anyhow!("missing transaction"))?;
2098 project_transaction = this
2099 .update(&mut cx, |this, cx| {
2100 this.deserialize_project_transaction(response, push_to_history, cx)
2101 })
2102 .await?;
2103 }
2104
2105 for buffer in local_buffers {
2106 let transaction = buffer
2107 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2108 .await?;
2109 buffer.update(&mut cx, |buffer, cx| {
2110 if let Some(transaction) = transaction {
2111 if !push_to_history {
2112 buffer.forget_transaction(transaction.id);
2113 }
2114 project_transaction.0.insert(cx.handle(), transaction);
2115 }
2116 });
2117 }
2118
2119 Ok(project_transaction)
2120 })
2121 }
2122
2123 pub fn format(
2124 &self,
2125 buffers: HashSet<ModelHandle<Buffer>>,
2126 push_to_history: bool,
2127 cx: &mut ModelContext<Project>,
2128 ) -> Task<Result<ProjectTransaction>> {
2129 let mut local_buffers = Vec::new();
2130 let mut remote_buffers = None;
2131 for buffer_handle in buffers {
2132 let buffer = buffer_handle.read(cx);
2133 if let Some(file) = File::from_dyn(buffer.file()) {
2134 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2135 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2136 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2137 }
2138 } else {
2139 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2140 }
2141 } else {
2142 return Task::ready(Ok(Default::default()));
2143 }
2144 }
2145
2146 let remote_buffers = self.remote_id().zip(remote_buffers);
2147 let client = self.client.clone();
2148
2149 cx.spawn(|this, mut cx| async move {
2150 let mut project_transaction = ProjectTransaction::default();
2151
2152 if let Some((project_id, remote_buffers)) = remote_buffers {
2153 let response = client
2154 .request(proto::FormatBuffers {
2155 project_id,
2156 buffer_ids: remote_buffers
2157 .iter()
2158 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2159 .collect(),
2160 })
2161 .await?
2162 .transaction
2163 .ok_or_else(|| anyhow!("missing transaction"))?;
2164 project_transaction = this
2165 .update(&mut cx, |this, cx| {
2166 this.deserialize_project_transaction(response, push_to_history, cx)
2167 })
2168 .await?;
2169 }
2170
2171 for (buffer, buffer_abs_path, language_server) in local_buffers {
2172 let text_document = lsp::TextDocumentIdentifier::new(
2173 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2174 );
2175 let capabilities = &language_server.capabilities();
2176 let lsp_edits = if capabilities
2177 .document_formatting_provider
2178 .as_ref()
2179 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2180 {
2181 language_server
2182 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2183 text_document,
2184 options: lsp::FormattingOptions {
2185 tab_size: 4,
2186 insert_spaces: true,
2187 insert_final_newline: Some(true),
2188 ..Default::default()
2189 },
2190 work_done_progress_params: Default::default(),
2191 })
2192 .await?
2193 } else if capabilities
2194 .document_range_formatting_provider
2195 .as_ref()
2196 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2197 {
2198 let buffer_start = lsp::Position::new(0, 0);
2199 let buffer_end =
2200 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2201 language_server
2202 .request::<lsp::request::RangeFormatting>(
2203 lsp::DocumentRangeFormattingParams {
2204 text_document,
2205 range: lsp::Range::new(buffer_start, buffer_end),
2206 options: lsp::FormattingOptions {
2207 tab_size: 4,
2208 insert_spaces: true,
2209 insert_final_newline: Some(true),
2210 ..Default::default()
2211 },
2212 work_done_progress_params: Default::default(),
2213 },
2214 )
2215 .await?
2216 } else {
2217 continue;
2218 };
2219
2220 if let Some(lsp_edits) = lsp_edits {
2221 let edits = this
2222 .update(&mut cx, |this, cx| {
2223 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2224 })
2225 .await?;
2226 buffer.update(&mut cx, |buffer, cx| {
2227 buffer.finalize_last_transaction();
2228 buffer.start_transaction();
2229 for (range, text) in edits {
2230 buffer.edit([range], text, cx);
2231 }
2232 if buffer.end_transaction(cx).is_some() {
2233 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2234 if !push_to_history {
2235 buffer.forget_transaction(transaction.id);
2236 }
2237 project_transaction.0.insert(cx.handle(), transaction);
2238 }
2239 });
2240 }
2241 }
2242
2243 Ok(project_transaction)
2244 })
2245 }
2246
2247 pub fn definition<T: ToPointUtf16>(
2248 &self,
2249 buffer: &ModelHandle<Buffer>,
2250 position: T,
2251 cx: &mut ModelContext<Self>,
2252 ) -> Task<Result<Vec<Location>>> {
2253 let position = position.to_point_utf16(buffer.read(cx));
2254 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2255 }
2256
2257 pub fn references<T: ToPointUtf16>(
2258 &self,
2259 buffer: &ModelHandle<Buffer>,
2260 position: T,
2261 cx: &mut ModelContext<Self>,
2262 ) -> Task<Result<Vec<Location>>> {
2263 let position = position.to_point_utf16(buffer.read(cx));
2264 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2265 }
2266
2267 pub fn document_highlights<T: ToPointUtf16>(
2268 &self,
2269 buffer: &ModelHandle<Buffer>,
2270 position: T,
2271 cx: &mut ModelContext<Self>,
2272 ) -> Task<Result<Vec<DocumentHighlight>>> {
2273 let position = position.to_point_utf16(buffer.read(cx));
2274
2275 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2276 }
2277
2278 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2279 if self.is_local() {
2280 let mut language_servers = HashMap::default();
2281 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2282 if let Some(worktree) = self
2283 .worktree_for_id(*worktree_id, cx)
2284 .and_then(|worktree| worktree.read(cx).as_local())
2285 {
2286 language_servers
2287 .entry(Arc::as_ptr(language_server))
2288 .or_insert((
2289 lsp_adapter.clone(),
2290 language_server.clone(),
2291 *worktree_id,
2292 worktree.abs_path().clone(),
2293 ));
2294 }
2295 }
2296
2297 let mut requests = Vec::new();
2298 for (_, language_server, _, _) in language_servers.values() {
2299 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2300 lsp::WorkspaceSymbolParams {
2301 query: query.to_string(),
2302 ..Default::default()
2303 },
2304 ));
2305 }
2306
2307 cx.spawn_weak(|this, cx| async move {
2308 let responses = futures::future::try_join_all(requests).await?;
2309
2310 let mut symbols = Vec::new();
2311 if let Some(this) = this.upgrade(&cx) {
2312 this.read_with(&cx, |this, cx| {
2313 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2314 language_servers.into_values().zip(responses)
2315 {
2316 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2317 |lsp_symbol| {
2318 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2319 let mut worktree_id = source_worktree_id;
2320 let path;
2321 if let Some((worktree, rel_path)) =
2322 this.find_local_worktree(&abs_path, cx)
2323 {
2324 worktree_id = worktree.read(cx).id();
2325 path = rel_path;
2326 } else {
2327 path = relativize_path(&worktree_abs_path, &abs_path);
2328 }
2329
2330 let label = this
2331 .languages
2332 .select_language(&path)
2333 .and_then(|language| {
2334 language
2335 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2336 })
2337 .unwrap_or_else(|| {
2338 CodeLabel::plain(lsp_symbol.name.clone(), None)
2339 });
2340 let signature = this.symbol_signature(worktree_id, &path);
2341
2342 Some(Symbol {
2343 source_worktree_id,
2344 worktree_id,
2345 language_server_name: adapter.name(),
2346 name: lsp_symbol.name,
2347 kind: lsp_symbol.kind,
2348 label,
2349 path,
2350 range: range_from_lsp(lsp_symbol.location.range),
2351 signature,
2352 })
2353 },
2354 ));
2355 }
2356 })
2357 }
2358
2359 Ok(symbols)
2360 })
2361 } else if let Some(project_id) = self.remote_id() {
2362 let request = self.client.request(proto::GetProjectSymbols {
2363 project_id,
2364 query: query.to_string(),
2365 });
2366 cx.spawn_weak(|this, cx| async move {
2367 let response = request.await?;
2368 let mut symbols = Vec::new();
2369 if let Some(this) = this.upgrade(&cx) {
2370 this.read_with(&cx, |this, _| {
2371 symbols.extend(
2372 response
2373 .symbols
2374 .into_iter()
2375 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2376 );
2377 })
2378 }
2379 Ok(symbols)
2380 })
2381 } else {
2382 Task::ready(Ok(Default::default()))
2383 }
2384 }
2385
2386 pub fn open_buffer_for_symbol(
2387 &mut self,
2388 symbol: &Symbol,
2389 cx: &mut ModelContext<Self>,
2390 ) -> Task<Result<ModelHandle<Buffer>>> {
2391 if self.is_local() {
2392 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2393 symbol.source_worktree_id,
2394 symbol.language_server_name.clone(),
2395 )) {
2396 server.clone()
2397 } else {
2398 return Task::ready(Err(anyhow!(
2399 "language server for worktree and language not found"
2400 )));
2401 };
2402
2403 let worktree_abs_path = if let Some(worktree_abs_path) = self
2404 .worktree_for_id(symbol.worktree_id, cx)
2405 .and_then(|worktree| worktree.read(cx).as_local())
2406 .map(|local_worktree| local_worktree.abs_path())
2407 {
2408 worktree_abs_path
2409 } else {
2410 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2411 };
2412 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2413 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2414 uri
2415 } else {
2416 return Task::ready(Err(anyhow!("invalid symbol path")));
2417 };
2418
2419 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2420 } else if let Some(project_id) = self.remote_id() {
2421 let request = self.client.request(proto::OpenBufferForSymbol {
2422 project_id,
2423 symbol: Some(serialize_symbol(symbol)),
2424 });
2425 cx.spawn(|this, mut cx| async move {
2426 let response = request.await?;
2427 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2428 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2429 .await
2430 })
2431 } else {
2432 Task::ready(Err(anyhow!("project does not have a remote id")))
2433 }
2434 }
2435
2436 pub fn completions<T: ToPointUtf16>(
2437 &self,
2438 source_buffer_handle: &ModelHandle<Buffer>,
2439 position: T,
2440 cx: &mut ModelContext<Self>,
2441 ) -> Task<Result<Vec<Completion>>> {
2442 let source_buffer_handle = source_buffer_handle.clone();
2443 let source_buffer = source_buffer_handle.read(cx);
2444 let buffer_id = source_buffer.remote_id();
2445 let language = source_buffer.language().cloned();
2446 let worktree;
2447 let buffer_abs_path;
2448 if let Some(file) = File::from_dyn(source_buffer.file()) {
2449 worktree = file.worktree.clone();
2450 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2451 } else {
2452 return Task::ready(Ok(Default::default()));
2453 };
2454
2455 let position = position.to_point_utf16(source_buffer);
2456 let anchor = source_buffer.anchor_after(position);
2457
2458 if worktree.read(cx).as_local().is_some() {
2459 let buffer_abs_path = buffer_abs_path.unwrap();
2460 let (_, lang_server) =
2461 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2462 server.clone()
2463 } else {
2464 return Task::ready(Ok(Default::default()));
2465 };
2466
2467 cx.spawn(|_, cx| async move {
2468 let completions = lang_server
2469 .request::<lsp::request::Completion>(lsp::CompletionParams {
2470 text_document_position: lsp::TextDocumentPositionParams::new(
2471 lsp::TextDocumentIdentifier::new(
2472 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2473 ),
2474 point_to_lsp(position),
2475 ),
2476 context: Default::default(),
2477 work_done_progress_params: Default::default(),
2478 partial_result_params: Default::default(),
2479 })
2480 .await
2481 .context("lsp completion request failed")?;
2482
2483 let completions = if let Some(completions) = completions {
2484 match completions {
2485 lsp::CompletionResponse::Array(completions) => completions,
2486 lsp::CompletionResponse::List(list) => list.items,
2487 }
2488 } else {
2489 Default::default()
2490 };
2491
2492 source_buffer_handle.read_with(&cx, |this, _| {
2493 Ok(completions
2494 .into_iter()
2495 .filter_map(|lsp_completion| {
2496 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2497 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2498 (range_from_lsp(edit.range), edit.new_text.clone())
2499 }
2500 None => {
2501 let clipped_position =
2502 this.clip_point_utf16(position, Bias::Left);
2503 if position != clipped_position {
2504 log::info!("completion out of expected range");
2505 return None;
2506 }
2507 (
2508 this.common_prefix_at(
2509 clipped_position,
2510 &lsp_completion.label,
2511 ),
2512 lsp_completion.label.clone(),
2513 )
2514 }
2515 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2516 log::info!("unsupported insert/replace completion");
2517 return None;
2518 }
2519 };
2520
2521 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2522 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2523 if clipped_start == old_range.start && clipped_end == old_range.end {
2524 Some(Completion {
2525 old_range: this.anchor_before(old_range.start)
2526 ..this.anchor_after(old_range.end),
2527 new_text,
2528 label: language
2529 .as_ref()
2530 .and_then(|l| l.label_for_completion(&lsp_completion))
2531 .unwrap_or_else(|| {
2532 CodeLabel::plain(
2533 lsp_completion.label.clone(),
2534 lsp_completion.filter_text.as_deref(),
2535 )
2536 }),
2537 lsp_completion,
2538 })
2539 } else {
2540 log::info!("completion out of expected range");
2541 None
2542 }
2543 })
2544 .collect())
2545 })
2546 })
2547 } else if let Some(project_id) = self.remote_id() {
2548 let rpc = self.client.clone();
2549 let message = proto::GetCompletions {
2550 project_id,
2551 buffer_id,
2552 position: Some(language::proto::serialize_anchor(&anchor)),
2553 version: serialize_version(&source_buffer.version()),
2554 };
2555 cx.spawn_weak(|_, mut cx| async move {
2556 let response = rpc.request(message).await?;
2557
2558 source_buffer_handle
2559 .update(&mut cx, |buffer, _| {
2560 buffer.wait_for_version(deserialize_version(response.version))
2561 })
2562 .await;
2563
2564 response
2565 .completions
2566 .into_iter()
2567 .map(|completion| {
2568 language::proto::deserialize_completion(completion, language.as_ref())
2569 })
2570 .collect()
2571 })
2572 } else {
2573 Task::ready(Ok(Default::default()))
2574 }
2575 }
2576
2577 pub fn apply_additional_edits_for_completion(
2578 &self,
2579 buffer_handle: ModelHandle<Buffer>,
2580 completion: Completion,
2581 push_to_history: bool,
2582 cx: &mut ModelContext<Self>,
2583 ) -> Task<Result<Option<Transaction>>> {
2584 let buffer = buffer_handle.read(cx);
2585 let buffer_id = buffer.remote_id();
2586
2587 if self.is_local() {
2588 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2589 {
2590 server.clone()
2591 } else {
2592 return Task::ready(Ok(Default::default()));
2593 };
2594
2595 cx.spawn(|this, mut cx| async move {
2596 let resolved_completion = lang_server
2597 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2598 .await?;
2599 if let Some(edits) = resolved_completion.additional_text_edits {
2600 let edits = this
2601 .update(&mut cx, |this, cx| {
2602 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2603 })
2604 .await?;
2605 buffer_handle.update(&mut cx, |buffer, cx| {
2606 buffer.finalize_last_transaction();
2607 buffer.start_transaction();
2608 for (range, text) in edits {
2609 buffer.edit([range], text, cx);
2610 }
2611 let transaction = if buffer.end_transaction(cx).is_some() {
2612 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2613 if !push_to_history {
2614 buffer.forget_transaction(transaction.id);
2615 }
2616 Some(transaction)
2617 } else {
2618 None
2619 };
2620 Ok(transaction)
2621 })
2622 } else {
2623 Ok(None)
2624 }
2625 })
2626 } else if let Some(project_id) = self.remote_id() {
2627 let client = self.client.clone();
2628 cx.spawn(|_, mut cx| async move {
2629 let response = client
2630 .request(proto::ApplyCompletionAdditionalEdits {
2631 project_id,
2632 buffer_id,
2633 completion: Some(language::proto::serialize_completion(&completion)),
2634 })
2635 .await?;
2636
2637 if let Some(transaction) = response.transaction {
2638 let transaction = language::proto::deserialize_transaction(transaction)?;
2639 buffer_handle
2640 .update(&mut cx, |buffer, _| {
2641 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2642 })
2643 .await;
2644 if push_to_history {
2645 buffer_handle.update(&mut cx, |buffer, _| {
2646 buffer.push_transaction(transaction.clone(), Instant::now());
2647 });
2648 }
2649 Ok(Some(transaction))
2650 } else {
2651 Ok(None)
2652 }
2653 })
2654 } else {
2655 Task::ready(Err(anyhow!("project does not have a remote id")))
2656 }
2657 }
2658
2659 pub fn code_actions<T: Clone + ToOffset>(
2660 &self,
2661 buffer_handle: &ModelHandle<Buffer>,
2662 range: Range<T>,
2663 cx: &mut ModelContext<Self>,
2664 ) -> Task<Result<Vec<CodeAction>>> {
2665 let buffer_handle = buffer_handle.clone();
2666 let buffer = buffer_handle.read(cx);
2667 let snapshot = buffer.snapshot();
2668 let relevant_diagnostics = snapshot
2669 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2670 .map(|entry| entry.to_lsp_diagnostic_stub())
2671 .collect();
2672 let buffer_id = buffer.remote_id();
2673 let worktree;
2674 let buffer_abs_path;
2675 if let Some(file) = File::from_dyn(buffer.file()) {
2676 worktree = file.worktree.clone();
2677 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2678 } else {
2679 return Task::ready(Ok(Default::default()));
2680 };
2681 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2682
2683 if worktree.read(cx).as_local().is_some() {
2684 let buffer_abs_path = buffer_abs_path.unwrap();
2685 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2686 {
2687 server.clone()
2688 } else {
2689 return Task::ready(Ok(Default::default()));
2690 };
2691
2692 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2693 cx.foreground().spawn(async move {
2694 if !lang_server.capabilities().code_action_provider.is_some() {
2695 return Ok(Default::default());
2696 }
2697
2698 Ok(lang_server
2699 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2700 text_document: lsp::TextDocumentIdentifier::new(
2701 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2702 ),
2703 range: lsp_range,
2704 work_done_progress_params: Default::default(),
2705 partial_result_params: Default::default(),
2706 context: lsp::CodeActionContext {
2707 diagnostics: relevant_diagnostics,
2708 only: Some(vec![
2709 lsp::CodeActionKind::QUICKFIX,
2710 lsp::CodeActionKind::REFACTOR,
2711 lsp::CodeActionKind::REFACTOR_EXTRACT,
2712 lsp::CodeActionKind::SOURCE,
2713 ]),
2714 },
2715 })
2716 .await?
2717 .unwrap_or_default()
2718 .into_iter()
2719 .filter_map(|entry| {
2720 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2721 Some(CodeAction {
2722 range: range.clone(),
2723 lsp_action,
2724 })
2725 } else {
2726 None
2727 }
2728 })
2729 .collect())
2730 })
2731 } else if let Some(project_id) = self.remote_id() {
2732 let rpc = self.client.clone();
2733 let version = buffer.version();
2734 cx.spawn_weak(|_, mut cx| async move {
2735 let response = rpc
2736 .request(proto::GetCodeActions {
2737 project_id,
2738 buffer_id,
2739 start: Some(language::proto::serialize_anchor(&range.start)),
2740 end: Some(language::proto::serialize_anchor(&range.end)),
2741 version: serialize_version(&version),
2742 })
2743 .await?;
2744
2745 buffer_handle
2746 .update(&mut cx, |buffer, _| {
2747 buffer.wait_for_version(deserialize_version(response.version))
2748 })
2749 .await;
2750
2751 response
2752 .actions
2753 .into_iter()
2754 .map(language::proto::deserialize_code_action)
2755 .collect()
2756 })
2757 } else {
2758 Task::ready(Ok(Default::default()))
2759 }
2760 }
2761
2762 pub fn apply_code_action(
2763 &self,
2764 buffer_handle: ModelHandle<Buffer>,
2765 mut action: CodeAction,
2766 push_to_history: bool,
2767 cx: &mut ModelContext<Self>,
2768 ) -> Task<Result<ProjectTransaction>> {
2769 if self.is_local() {
2770 let buffer = buffer_handle.read(cx);
2771 let (lsp_adapter, lang_server) =
2772 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2773 server.clone()
2774 } else {
2775 return Task::ready(Ok(Default::default()));
2776 };
2777 let range = action.range.to_point_utf16(buffer);
2778
2779 cx.spawn(|this, mut cx| async move {
2780 if let Some(lsp_range) = action
2781 .lsp_action
2782 .data
2783 .as_mut()
2784 .and_then(|d| d.get_mut("codeActionParams"))
2785 .and_then(|d| d.get_mut("range"))
2786 {
2787 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2788 action.lsp_action = lang_server
2789 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2790 .await?;
2791 } else {
2792 let actions = this
2793 .update(&mut cx, |this, cx| {
2794 this.code_actions(&buffer_handle, action.range, cx)
2795 })
2796 .await?;
2797 action.lsp_action = actions
2798 .into_iter()
2799 .find(|a| a.lsp_action.title == action.lsp_action.title)
2800 .ok_or_else(|| anyhow!("code action is outdated"))?
2801 .lsp_action;
2802 }
2803
2804 if let Some(edit) = action.lsp_action.edit {
2805 Self::deserialize_workspace_edit(
2806 this,
2807 edit,
2808 push_to_history,
2809 lsp_adapter,
2810 lang_server,
2811 &mut cx,
2812 )
2813 .await
2814 } else if let Some(command) = action.lsp_action.command {
2815 this.update(&mut cx, |this, _| {
2816 this.last_workspace_edits_by_language_server
2817 .remove(&lang_server.server_id());
2818 });
2819 lang_server
2820 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2821 command: command.command,
2822 arguments: command.arguments.unwrap_or_default(),
2823 ..Default::default()
2824 })
2825 .await?;
2826 Ok(this.update(&mut cx, |this, _| {
2827 this.last_workspace_edits_by_language_server
2828 .remove(&lang_server.server_id())
2829 .unwrap_or_default()
2830 }))
2831 } else {
2832 Ok(ProjectTransaction::default())
2833 }
2834 })
2835 } else if let Some(project_id) = self.remote_id() {
2836 let client = self.client.clone();
2837 let request = proto::ApplyCodeAction {
2838 project_id,
2839 buffer_id: buffer_handle.read(cx).remote_id(),
2840 action: Some(language::proto::serialize_code_action(&action)),
2841 };
2842 cx.spawn(|this, mut cx| async move {
2843 let response = client
2844 .request(request)
2845 .await?
2846 .transaction
2847 .ok_or_else(|| anyhow!("missing transaction"))?;
2848 this.update(&mut cx, |this, cx| {
2849 this.deserialize_project_transaction(response, push_to_history, cx)
2850 })
2851 .await
2852 })
2853 } else {
2854 Task::ready(Err(anyhow!("project does not have a remote id")))
2855 }
2856 }
2857
2858 async fn deserialize_workspace_edit(
2859 this: ModelHandle<Self>,
2860 edit: lsp::WorkspaceEdit,
2861 push_to_history: bool,
2862 lsp_adapter: Arc<dyn LspAdapter>,
2863 language_server: Arc<LanguageServer>,
2864 cx: &mut AsyncAppContext,
2865 ) -> Result<ProjectTransaction> {
2866 let fs = this.read_with(cx, |this, _| this.fs.clone());
2867 let mut operations = Vec::new();
2868 if let Some(document_changes) = edit.document_changes {
2869 match document_changes {
2870 lsp::DocumentChanges::Edits(edits) => {
2871 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2872 }
2873 lsp::DocumentChanges::Operations(ops) => operations = ops,
2874 }
2875 } else if let Some(changes) = edit.changes {
2876 operations.extend(changes.into_iter().map(|(uri, edits)| {
2877 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2878 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2879 uri,
2880 version: None,
2881 },
2882 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2883 })
2884 }));
2885 }
2886
2887 let mut project_transaction = ProjectTransaction::default();
2888 for operation in operations {
2889 match operation {
2890 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2891 let abs_path = op
2892 .uri
2893 .to_file_path()
2894 .map_err(|_| anyhow!("can't convert URI to path"))?;
2895
2896 if let Some(parent_path) = abs_path.parent() {
2897 fs.create_dir(parent_path).await?;
2898 }
2899 if abs_path.ends_with("/") {
2900 fs.create_dir(&abs_path).await?;
2901 } else {
2902 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2903 .await?;
2904 }
2905 }
2906 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2907 let source_abs_path = op
2908 .old_uri
2909 .to_file_path()
2910 .map_err(|_| anyhow!("can't convert URI to path"))?;
2911 let target_abs_path = op
2912 .new_uri
2913 .to_file_path()
2914 .map_err(|_| anyhow!("can't convert URI to path"))?;
2915 fs.rename(
2916 &source_abs_path,
2917 &target_abs_path,
2918 op.options.map(Into::into).unwrap_or_default(),
2919 )
2920 .await?;
2921 }
2922 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2923 let abs_path = op
2924 .uri
2925 .to_file_path()
2926 .map_err(|_| anyhow!("can't convert URI to path"))?;
2927 let options = op.options.map(Into::into).unwrap_or_default();
2928 if abs_path.ends_with("/") {
2929 fs.remove_dir(&abs_path, options).await?;
2930 } else {
2931 fs.remove_file(&abs_path, options).await?;
2932 }
2933 }
2934 lsp::DocumentChangeOperation::Edit(op) => {
2935 let buffer_to_edit = this
2936 .update(cx, |this, cx| {
2937 this.open_local_buffer_via_lsp(
2938 op.text_document.uri,
2939 lsp_adapter.clone(),
2940 language_server.clone(),
2941 cx,
2942 )
2943 })
2944 .await?;
2945
2946 let edits = this
2947 .update(cx, |this, cx| {
2948 let edits = op.edits.into_iter().map(|edit| match edit {
2949 lsp::OneOf::Left(edit) => edit,
2950 lsp::OneOf::Right(edit) => edit.text_edit,
2951 });
2952 this.edits_from_lsp(
2953 &buffer_to_edit,
2954 edits,
2955 op.text_document.version,
2956 cx,
2957 )
2958 })
2959 .await?;
2960
2961 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2962 buffer.finalize_last_transaction();
2963 buffer.start_transaction();
2964 for (range, text) in edits {
2965 buffer.edit([range], text, cx);
2966 }
2967 let transaction = if buffer.end_transaction(cx).is_some() {
2968 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2969 if !push_to_history {
2970 buffer.forget_transaction(transaction.id);
2971 }
2972 Some(transaction)
2973 } else {
2974 None
2975 };
2976
2977 transaction
2978 });
2979 if let Some(transaction) = transaction {
2980 project_transaction.0.insert(buffer_to_edit, transaction);
2981 }
2982 }
2983 }
2984 }
2985
2986 Ok(project_transaction)
2987 }
2988
2989 pub fn prepare_rename<T: ToPointUtf16>(
2990 &self,
2991 buffer: ModelHandle<Buffer>,
2992 position: T,
2993 cx: &mut ModelContext<Self>,
2994 ) -> Task<Result<Option<Range<Anchor>>>> {
2995 let position = position.to_point_utf16(buffer.read(cx));
2996 self.request_lsp(buffer, PrepareRename { position }, cx)
2997 }
2998
2999 pub fn perform_rename<T: ToPointUtf16>(
3000 &self,
3001 buffer: ModelHandle<Buffer>,
3002 position: T,
3003 new_name: String,
3004 push_to_history: bool,
3005 cx: &mut ModelContext<Self>,
3006 ) -> Task<Result<ProjectTransaction>> {
3007 let position = position.to_point_utf16(buffer.read(cx));
3008 self.request_lsp(
3009 buffer,
3010 PerformRename {
3011 position,
3012 new_name,
3013 push_to_history,
3014 },
3015 cx,
3016 )
3017 }
3018
3019 pub fn search(
3020 &self,
3021 query: SearchQuery,
3022 cx: &mut ModelContext<Self>,
3023 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3024 if self.is_local() {
3025 let snapshots = self
3026 .visible_worktrees(cx)
3027 .filter_map(|tree| {
3028 let tree = tree.read(cx).as_local()?;
3029 Some(tree.snapshot())
3030 })
3031 .collect::<Vec<_>>();
3032
3033 let background = cx.background().clone();
3034 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3035 if path_count == 0 {
3036 return Task::ready(Ok(Default::default()));
3037 }
3038 let workers = background.num_cpus().min(path_count);
3039 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3040 cx.background()
3041 .spawn({
3042 let fs = self.fs.clone();
3043 let background = cx.background().clone();
3044 let query = query.clone();
3045 async move {
3046 let fs = &fs;
3047 let query = &query;
3048 let matching_paths_tx = &matching_paths_tx;
3049 let paths_per_worker = (path_count + workers - 1) / workers;
3050 let snapshots = &snapshots;
3051 background
3052 .scoped(|scope| {
3053 for worker_ix in 0..workers {
3054 let worker_start_ix = worker_ix * paths_per_worker;
3055 let worker_end_ix = worker_start_ix + paths_per_worker;
3056 scope.spawn(async move {
3057 let mut snapshot_start_ix = 0;
3058 let mut abs_path = PathBuf::new();
3059 for snapshot in snapshots {
3060 let snapshot_end_ix =
3061 snapshot_start_ix + snapshot.visible_file_count();
3062 if worker_end_ix <= snapshot_start_ix {
3063 break;
3064 } else if worker_start_ix > snapshot_end_ix {
3065 snapshot_start_ix = snapshot_end_ix;
3066 continue;
3067 } else {
3068 let start_in_snapshot = worker_start_ix
3069 .saturating_sub(snapshot_start_ix);
3070 let end_in_snapshot =
3071 cmp::min(worker_end_ix, snapshot_end_ix)
3072 - snapshot_start_ix;
3073
3074 for entry in snapshot
3075 .files(false, start_in_snapshot)
3076 .take(end_in_snapshot - start_in_snapshot)
3077 {
3078 if matching_paths_tx.is_closed() {
3079 break;
3080 }
3081
3082 abs_path.clear();
3083 abs_path.push(&snapshot.abs_path());
3084 abs_path.push(&entry.path);
3085 let matches = if let Some(file) =
3086 fs.open_sync(&abs_path).await.log_err()
3087 {
3088 query.detect(file).unwrap_or(false)
3089 } else {
3090 false
3091 };
3092
3093 if matches {
3094 let project_path =
3095 (snapshot.id(), entry.path.clone());
3096 if matching_paths_tx
3097 .send(project_path)
3098 .await
3099 .is_err()
3100 {
3101 break;
3102 }
3103 }
3104 }
3105
3106 snapshot_start_ix = snapshot_end_ix;
3107 }
3108 }
3109 });
3110 }
3111 })
3112 .await;
3113 }
3114 })
3115 .detach();
3116
3117 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3118 let open_buffers = self
3119 .opened_buffers
3120 .values()
3121 .filter_map(|b| b.upgrade(cx))
3122 .collect::<HashSet<_>>();
3123 cx.spawn(|this, cx| async move {
3124 for buffer in &open_buffers {
3125 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3126 buffers_tx.send((buffer.clone(), snapshot)).await?;
3127 }
3128
3129 let open_buffers = Rc::new(RefCell::new(open_buffers));
3130 while let Some(project_path) = matching_paths_rx.next().await {
3131 if buffers_tx.is_closed() {
3132 break;
3133 }
3134
3135 let this = this.clone();
3136 let open_buffers = open_buffers.clone();
3137 let buffers_tx = buffers_tx.clone();
3138 cx.spawn(|mut cx| async move {
3139 if let Some(buffer) = this
3140 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3141 .await
3142 .log_err()
3143 {
3144 if open_buffers.borrow_mut().insert(buffer.clone()) {
3145 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3146 buffers_tx.send((buffer, snapshot)).await?;
3147 }
3148 }
3149
3150 Ok::<_, anyhow::Error>(())
3151 })
3152 .detach();
3153 }
3154
3155 Ok::<_, anyhow::Error>(())
3156 })
3157 .detach_and_log_err(cx);
3158
3159 let background = cx.background().clone();
3160 cx.background().spawn(async move {
3161 let query = &query;
3162 let mut matched_buffers = Vec::new();
3163 for _ in 0..workers {
3164 matched_buffers.push(HashMap::default());
3165 }
3166 background
3167 .scoped(|scope| {
3168 for worker_matched_buffers in matched_buffers.iter_mut() {
3169 let mut buffers_rx = buffers_rx.clone();
3170 scope.spawn(async move {
3171 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3172 let buffer_matches = query
3173 .search(snapshot.as_rope())
3174 .await
3175 .iter()
3176 .map(|range| {
3177 snapshot.anchor_before(range.start)
3178 ..snapshot.anchor_after(range.end)
3179 })
3180 .collect::<Vec<_>>();
3181 if !buffer_matches.is_empty() {
3182 worker_matched_buffers
3183 .insert(buffer.clone(), buffer_matches);
3184 }
3185 }
3186 });
3187 }
3188 })
3189 .await;
3190 Ok(matched_buffers.into_iter().flatten().collect())
3191 })
3192 } else if let Some(project_id) = self.remote_id() {
3193 let request = self.client.request(query.to_proto(project_id));
3194 cx.spawn(|this, mut cx| async move {
3195 let response = request.await?;
3196 let mut result = HashMap::default();
3197 for location in response.locations {
3198 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3199 let target_buffer = this
3200 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3201 .await?;
3202 let start = location
3203 .start
3204 .and_then(deserialize_anchor)
3205 .ok_or_else(|| anyhow!("missing target start"))?;
3206 let end = location
3207 .end
3208 .and_then(deserialize_anchor)
3209 .ok_or_else(|| anyhow!("missing target end"))?;
3210 result
3211 .entry(target_buffer)
3212 .or_insert(Vec::new())
3213 .push(start..end)
3214 }
3215 Ok(result)
3216 })
3217 } else {
3218 Task::ready(Ok(Default::default()))
3219 }
3220 }
3221
3222 fn request_lsp<R: LspCommand>(
3223 &self,
3224 buffer_handle: ModelHandle<Buffer>,
3225 request: R,
3226 cx: &mut ModelContext<Self>,
3227 ) -> Task<Result<R::Response>>
3228 where
3229 <R::LspRequest as lsp::request::Request>::Result: Send,
3230 {
3231 let buffer = buffer_handle.read(cx);
3232 if self.is_local() {
3233 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3234 if let Some((file, (_, language_server))) =
3235 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3236 {
3237 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3238 return cx.spawn(|this, cx| async move {
3239 if !request.check_capabilities(&language_server.capabilities()) {
3240 return Ok(Default::default());
3241 }
3242
3243 let response = language_server
3244 .request::<R::LspRequest>(lsp_params)
3245 .await
3246 .context("lsp request failed")?;
3247 request
3248 .response_from_lsp(response, this, buffer_handle, cx)
3249 .await
3250 });
3251 }
3252 } else if let Some(project_id) = self.remote_id() {
3253 let rpc = self.client.clone();
3254 let message = request.to_proto(project_id, buffer);
3255 return cx.spawn(|this, cx| async move {
3256 let response = rpc.request(message).await?;
3257 request
3258 .response_from_proto(response, this, buffer_handle, cx)
3259 .await
3260 });
3261 }
3262 Task::ready(Ok(Default::default()))
3263 }
3264
3265 pub fn find_or_create_local_worktree(
3266 &mut self,
3267 abs_path: impl AsRef<Path>,
3268 visible: bool,
3269 cx: &mut ModelContext<Self>,
3270 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3271 let abs_path = abs_path.as_ref();
3272 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3273 Task::ready(Ok((tree.clone(), relative_path.into())))
3274 } else {
3275 let worktree = self.create_local_worktree(abs_path, visible, cx);
3276 cx.foreground()
3277 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3278 }
3279 }
3280
3281 pub fn find_local_worktree(
3282 &self,
3283 abs_path: &Path,
3284 cx: &AppContext,
3285 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3286 for tree in self.worktrees(cx) {
3287 if let Some(relative_path) = tree
3288 .read(cx)
3289 .as_local()
3290 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3291 {
3292 return Some((tree.clone(), relative_path.into()));
3293 }
3294 }
3295 None
3296 }
3297
3298 pub fn is_shared(&self) -> bool {
3299 match &self.client_state {
3300 ProjectClientState::Local { is_shared, .. } => *is_shared,
3301 ProjectClientState::Remote { .. } => false,
3302 }
3303 }
3304
3305 fn create_local_worktree(
3306 &mut self,
3307 abs_path: impl AsRef<Path>,
3308 visible: bool,
3309 cx: &mut ModelContext<Self>,
3310 ) -> Task<Result<ModelHandle<Worktree>>> {
3311 let fs = self.fs.clone();
3312 let client = self.client.clone();
3313 let next_entry_id = self.next_entry_id.clone();
3314 let path: Arc<Path> = abs_path.as_ref().into();
3315 let task = self
3316 .loading_local_worktrees
3317 .entry(path.clone())
3318 .or_insert_with(|| {
3319 cx.spawn(|project, mut cx| {
3320 async move {
3321 let worktree = Worktree::local(
3322 client.clone(),
3323 path.clone(),
3324 visible,
3325 fs,
3326 next_entry_id,
3327 &mut cx,
3328 )
3329 .await;
3330 project.update(&mut cx, |project, _| {
3331 project.loading_local_worktrees.remove(&path);
3332 });
3333 let worktree = worktree?;
3334
3335 let (remote_project_id, is_shared) =
3336 project.update(&mut cx, |project, cx| {
3337 project.add_worktree(&worktree, cx);
3338 (project.remote_id(), project.is_shared())
3339 });
3340
3341 if let Some(project_id) = remote_project_id {
3342 if is_shared {
3343 worktree
3344 .update(&mut cx, |worktree, cx| {
3345 worktree.as_local_mut().unwrap().share(project_id, cx)
3346 })
3347 .await?;
3348 } else {
3349 worktree
3350 .update(&mut cx, |worktree, cx| {
3351 worktree.as_local_mut().unwrap().register(project_id, cx)
3352 })
3353 .await?;
3354 }
3355 }
3356
3357 Ok(worktree)
3358 }
3359 .map_err(|err| Arc::new(err))
3360 })
3361 .shared()
3362 })
3363 .clone();
3364 cx.foreground().spawn(async move {
3365 match task.await {
3366 Ok(worktree) => Ok(worktree),
3367 Err(err) => Err(anyhow!("{}", err)),
3368 }
3369 })
3370 }
3371
3372 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3373 self.worktrees.retain(|worktree| {
3374 worktree
3375 .upgrade(cx)
3376 .map_or(false, |w| w.read(cx).id() != id)
3377 });
3378 cx.notify();
3379 }
3380
3381 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3382 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3383 if worktree.read(cx).is_local() {
3384 cx.subscribe(&worktree, |this, worktree, _, cx| {
3385 this.update_local_worktree_buffers(worktree, cx);
3386 })
3387 .detach();
3388 }
3389
3390 let push_strong_handle = {
3391 let worktree = worktree.read(cx);
3392 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3393 };
3394 if push_strong_handle {
3395 self.worktrees
3396 .push(WorktreeHandle::Strong(worktree.clone()));
3397 } else {
3398 cx.observe_release(&worktree, |this, _, cx| {
3399 this.worktrees
3400 .retain(|worktree| worktree.upgrade(cx).is_some());
3401 cx.notify();
3402 })
3403 .detach();
3404 self.worktrees
3405 .push(WorktreeHandle::Weak(worktree.downgrade()));
3406 }
3407 cx.notify();
3408 }
3409
3410 fn update_local_worktree_buffers(
3411 &mut self,
3412 worktree_handle: ModelHandle<Worktree>,
3413 cx: &mut ModelContext<Self>,
3414 ) {
3415 let snapshot = worktree_handle.read(cx).snapshot();
3416 let mut buffers_to_delete = Vec::new();
3417 let mut renamed_buffers = Vec::new();
3418 for (buffer_id, buffer) in &self.opened_buffers {
3419 if let Some(buffer) = buffer.upgrade(cx) {
3420 buffer.update(cx, |buffer, cx| {
3421 if let Some(old_file) = File::from_dyn(buffer.file()) {
3422 if old_file.worktree != worktree_handle {
3423 return;
3424 }
3425
3426 let new_file = if let Some(entry) = old_file
3427 .entry_id
3428 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3429 {
3430 File {
3431 is_local: true,
3432 entry_id: Some(entry.id),
3433 mtime: entry.mtime,
3434 path: entry.path.clone(),
3435 worktree: worktree_handle.clone(),
3436 }
3437 } else if let Some(entry) =
3438 snapshot.entry_for_path(old_file.path().as_ref())
3439 {
3440 File {
3441 is_local: true,
3442 entry_id: Some(entry.id),
3443 mtime: entry.mtime,
3444 path: entry.path.clone(),
3445 worktree: worktree_handle.clone(),
3446 }
3447 } else {
3448 File {
3449 is_local: true,
3450 entry_id: None,
3451 path: old_file.path().clone(),
3452 mtime: old_file.mtime(),
3453 worktree: worktree_handle.clone(),
3454 }
3455 };
3456
3457 let old_path = old_file.abs_path(cx);
3458 if new_file.abs_path(cx) != old_path {
3459 renamed_buffers.push((cx.handle(), old_path));
3460 }
3461
3462 if let Some(project_id) = self.remote_id() {
3463 self.client
3464 .send(proto::UpdateBufferFile {
3465 project_id,
3466 buffer_id: *buffer_id as u64,
3467 file: Some(new_file.to_proto()),
3468 })
3469 .log_err();
3470 }
3471 buffer.file_updated(Box::new(new_file), cx).detach();
3472 }
3473 });
3474 } else {
3475 buffers_to_delete.push(*buffer_id);
3476 }
3477 }
3478
3479 for buffer_id in buffers_to_delete {
3480 self.opened_buffers.remove(&buffer_id);
3481 }
3482
3483 for (buffer, old_path) in renamed_buffers {
3484 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3485 self.assign_language_to_buffer(&buffer, cx);
3486 self.register_buffer_with_language_server(&buffer, cx);
3487 }
3488 }
3489
3490 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3491 let new_active_entry = entry.and_then(|project_path| {
3492 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3493 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3494 Some(entry.id)
3495 });
3496 if new_active_entry != self.active_entry {
3497 self.active_entry = new_active_entry;
3498 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3499 }
3500 }
3501
3502 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3503 self.language_servers_with_diagnostics_running > 0
3504 }
3505
3506 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3507 let mut summary = DiagnosticSummary::default();
3508 for (_, path_summary) in self.diagnostic_summaries(cx) {
3509 summary.error_count += path_summary.error_count;
3510 summary.warning_count += path_summary.warning_count;
3511 summary.info_count += path_summary.info_count;
3512 summary.hint_count += path_summary.hint_count;
3513 }
3514 summary
3515 }
3516
3517 pub fn diagnostic_summaries<'a>(
3518 &'a self,
3519 cx: &'a AppContext,
3520 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3521 self.worktrees(cx).flat_map(move |worktree| {
3522 let worktree = worktree.read(cx);
3523 let worktree_id = worktree.id();
3524 worktree
3525 .diagnostic_summaries()
3526 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3527 })
3528 }
3529
3530 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3531 self.language_servers_with_diagnostics_running += 1;
3532 if self.language_servers_with_diagnostics_running == 1 {
3533 cx.emit(Event::DiskBasedDiagnosticsStarted);
3534 }
3535 }
3536
3537 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3538 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3539 self.language_servers_with_diagnostics_running -= 1;
3540 if self.language_servers_with_diagnostics_running == 0 {
3541 cx.emit(Event::DiskBasedDiagnosticsFinished);
3542 }
3543 }
3544
3545 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3546 self.active_entry
3547 }
3548
3549 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3550 self.worktree_for_id(path.worktree_id, cx)?
3551 .read(cx)
3552 .entry_for_path(&path.path)
3553 .map(|entry| entry.id)
3554 }
3555
3556 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3557 let worktree = self.worktree_for_entry(entry_id, cx)?;
3558 let worktree = worktree.read(cx);
3559 let worktree_id = worktree.id();
3560 let path = worktree.entry_for_id(entry_id)?.path.clone();
3561 Some(ProjectPath { worktree_id, path })
3562 }
3563
3564 // RPC message handlers
3565
3566 async fn handle_unshare_project(
3567 this: ModelHandle<Self>,
3568 _: TypedEnvelope<proto::UnshareProject>,
3569 _: Arc<Client>,
3570 mut cx: AsyncAppContext,
3571 ) -> Result<()> {
3572 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3573 Ok(())
3574 }
3575
3576 async fn handle_add_collaborator(
3577 this: ModelHandle<Self>,
3578 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3579 _: Arc<Client>,
3580 mut cx: AsyncAppContext,
3581 ) -> Result<()> {
3582 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3583 let collaborator = envelope
3584 .payload
3585 .collaborator
3586 .take()
3587 .ok_or_else(|| anyhow!("empty collaborator"))?;
3588
3589 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3590 this.update(&mut cx, |this, cx| {
3591 this.collaborators
3592 .insert(collaborator.peer_id, collaborator);
3593 cx.notify();
3594 });
3595
3596 Ok(())
3597 }
3598
3599 async fn handle_remove_collaborator(
3600 this: ModelHandle<Self>,
3601 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3602 _: Arc<Client>,
3603 mut cx: AsyncAppContext,
3604 ) -> Result<()> {
3605 this.update(&mut cx, |this, cx| {
3606 let peer_id = PeerId(envelope.payload.peer_id);
3607 let replica_id = this
3608 .collaborators
3609 .remove(&peer_id)
3610 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3611 .replica_id;
3612 for (_, buffer) in &this.opened_buffers {
3613 if let Some(buffer) = buffer.upgrade(cx) {
3614 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3615 }
3616 }
3617 cx.emit(Event::CollaboratorLeft(peer_id));
3618 cx.notify();
3619 Ok(())
3620 })
3621 }
3622
3623 async fn handle_register_worktree(
3624 this: ModelHandle<Self>,
3625 envelope: TypedEnvelope<proto::RegisterWorktree>,
3626 client: Arc<Client>,
3627 mut cx: AsyncAppContext,
3628 ) -> Result<()> {
3629 this.update(&mut cx, |this, cx| {
3630 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3631 let replica_id = this.replica_id();
3632 let worktree = proto::Worktree {
3633 id: envelope.payload.worktree_id,
3634 root_name: envelope.payload.root_name,
3635 entries: Default::default(),
3636 diagnostic_summaries: Default::default(),
3637 visible: envelope.payload.visible,
3638 };
3639 let (worktree, load_task) =
3640 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3641 this.add_worktree(&worktree, cx);
3642 load_task.detach();
3643 Ok(())
3644 })
3645 }
3646
3647 async fn handle_unregister_worktree(
3648 this: ModelHandle<Self>,
3649 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3650 _: Arc<Client>,
3651 mut cx: AsyncAppContext,
3652 ) -> Result<()> {
3653 this.update(&mut cx, |this, cx| {
3654 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3655 this.remove_worktree(worktree_id, cx);
3656 Ok(())
3657 })
3658 }
3659
3660 async fn handle_update_worktree(
3661 this: ModelHandle<Self>,
3662 envelope: TypedEnvelope<proto::UpdateWorktree>,
3663 _: Arc<Client>,
3664 mut cx: AsyncAppContext,
3665 ) -> Result<()> {
3666 this.update(&mut cx, |this, cx| {
3667 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3668 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3669 worktree.update(cx, |worktree, _| {
3670 let worktree = worktree.as_remote_mut().unwrap();
3671 worktree.update_from_remote(envelope)
3672 })?;
3673 }
3674 Ok(())
3675 })
3676 }
3677
3678 async fn handle_update_diagnostic_summary(
3679 this: ModelHandle<Self>,
3680 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3681 _: Arc<Client>,
3682 mut cx: AsyncAppContext,
3683 ) -> Result<()> {
3684 this.update(&mut cx, |this, cx| {
3685 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3686 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3687 if let Some(summary) = envelope.payload.summary {
3688 let project_path = ProjectPath {
3689 worktree_id,
3690 path: Path::new(&summary.path).into(),
3691 };
3692 worktree.update(cx, |worktree, _| {
3693 worktree
3694 .as_remote_mut()
3695 .unwrap()
3696 .update_diagnostic_summary(project_path.path.clone(), &summary);
3697 });
3698 cx.emit(Event::DiagnosticsUpdated(project_path));
3699 }
3700 }
3701 Ok(())
3702 })
3703 }
3704
3705 async fn handle_start_language_server(
3706 this: ModelHandle<Self>,
3707 envelope: TypedEnvelope<proto::StartLanguageServer>,
3708 _: Arc<Client>,
3709 mut cx: AsyncAppContext,
3710 ) -> Result<()> {
3711 let server = envelope
3712 .payload
3713 .server
3714 .ok_or_else(|| anyhow!("invalid server"))?;
3715 this.update(&mut cx, |this, cx| {
3716 this.language_server_statuses.insert(
3717 server.id as usize,
3718 LanguageServerStatus {
3719 name: server.name,
3720 pending_work: Default::default(),
3721 pending_diagnostic_updates: 0,
3722 },
3723 );
3724 cx.notify();
3725 });
3726 Ok(())
3727 }
3728
3729 async fn handle_update_language_server(
3730 this: ModelHandle<Self>,
3731 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3732 _: Arc<Client>,
3733 mut cx: AsyncAppContext,
3734 ) -> Result<()> {
3735 let language_server_id = envelope.payload.language_server_id as usize;
3736 match envelope
3737 .payload
3738 .variant
3739 .ok_or_else(|| anyhow!("invalid variant"))?
3740 {
3741 proto::update_language_server::Variant::WorkStart(payload) => {
3742 this.update(&mut cx, |this, cx| {
3743 this.on_lsp_work_start(language_server_id, payload.token, cx);
3744 })
3745 }
3746 proto::update_language_server::Variant::WorkProgress(payload) => {
3747 this.update(&mut cx, |this, cx| {
3748 this.on_lsp_work_progress(
3749 language_server_id,
3750 payload.token,
3751 LanguageServerProgress {
3752 message: payload.message,
3753 percentage: payload.percentage.map(|p| p as usize),
3754 last_update_at: Instant::now(),
3755 },
3756 cx,
3757 );
3758 })
3759 }
3760 proto::update_language_server::Variant::WorkEnd(payload) => {
3761 this.update(&mut cx, |this, cx| {
3762 this.on_lsp_work_end(language_server_id, payload.token, cx);
3763 })
3764 }
3765 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3766 this.update(&mut cx, |this, cx| {
3767 this.disk_based_diagnostics_started(cx);
3768 })
3769 }
3770 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3771 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3772 }
3773 }
3774
3775 Ok(())
3776 }
3777
3778 async fn handle_update_buffer(
3779 this: ModelHandle<Self>,
3780 envelope: TypedEnvelope<proto::UpdateBuffer>,
3781 _: Arc<Client>,
3782 mut cx: AsyncAppContext,
3783 ) -> Result<()> {
3784 this.update(&mut cx, |this, cx| {
3785 let payload = envelope.payload.clone();
3786 let buffer_id = payload.buffer_id;
3787 let ops = payload
3788 .operations
3789 .into_iter()
3790 .map(|op| language::proto::deserialize_operation(op))
3791 .collect::<Result<Vec<_>, _>>()?;
3792 match this.opened_buffers.entry(buffer_id) {
3793 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3794 OpenBuffer::Strong(buffer) => {
3795 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3796 }
3797 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3798 OpenBuffer::Weak(_) => {}
3799 },
3800 hash_map::Entry::Vacant(e) => {
3801 e.insert(OpenBuffer::Loading(ops));
3802 }
3803 }
3804 Ok(())
3805 })
3806 }
3807
3808 async fn handle_update_buffer_file(
3809 this: ModelHandle<Self>,
3810 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3811 _: Arc<Client>,
3812 mut cx: AsyncAppContext,
3813 ) -> Result<()> {
3814 this.update(&mut cx, |this, cx| {
3815 let payload = envelope.payload.clone();
3816 let buffer_id = payload.buffer_id;
3817 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3818 let worktree = this
3819 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3820 .ok_or_else(|| anyhow!("no such worktree"))?;
3821 let file = File::from_proto(file, worktree.clone(), cx)?;
3822 let buffer = this
3823 .opened_buffers
3824 .get_mut(&buffer_id)
3825 .and_then(|b| b.upgrade(cx))
3826 .ok_or_else(|| anyhow!("no such buffer"))?;
3827 buffer.update(cx, |buffer, cx| {
3828 buffer.file_updated(Box::new(file), cx).detach();
3829 });
3830 Ok(())
3831 })
3832 }
3833
3834 async fn handle_save_buffer(
3835 this: ModelHandle<Self>,
3836 envelope: TypedEnvelope<proto::SaveBuffer>,
3837 _: Arc<Client>,
3838 mut cx: AsyncAppContext,
3839 ) -> Result<proto::BufferSaved> {
3840 let buffer_id = envelope.payload.buffer_id;
3841 let requested_version = deserialize_version(envelope.payload.version);
3842
3843 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3844 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3845 let buffer = this
3846 .opened_buffers
3847 .get(&buffer_id)
3848 .map(|buffer| buffer.upgrade(cx).unwrap())
3849 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3850 Ok::<_, anyhow::Error>((project_id, buffer))
3851 })?;
3852 buffer
3853 .update(&mut cx, |buffer, _| {
3854 buffer.wait_for_version(requested_version)
3855 })
3856 .await;
3857
3858 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3859 Ok(proto::BufferSaved {
3860 project_id,
3861 buffer_id,
3862 version: serialize_version(&saved_version),
3863 mtime: Some(mtime.into()),
3864 })
3865 }
3866
3867 async fn handle_reload_buffers(
3868 this: ModelHandle<Self>,
3869 envelope: TypedEnvelope<proto::ReloadBuffers>,
3870 _: Arc<Client>,
3871 mut cx: AsyncAppContext,
3872 ) -> Result<proto::ReloadBuffersResponse> {
3873 let sender_id = envelope.original_sender_id()?;
3874 let reload = this.update(&mut cx, |this, cx| {
3875 let mut buffers = HashSet::default();
3876 for buffer_id in &envelope.payload.buffer_ids {
3877 buffers.insert(
3878 this.opened_buffers
3879 .get(buffer_id)
3880 .map(|buffer| buffer.upgrade(cx).unwrap())
3881 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3882 );
3883 }
3884 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3885 })?;
3886
3887 let project_transaction = reload.await?;
3888 let project_transaction = this.update(&mut cx, |this, cx| {
3889 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3890 });
3891 Ok(proto::ReloadBuffersResponse {
3892 transaction: Some(project_transaction),
3893 })
3894 }
3895
3896 async fn handle_format_buffers(
3897 this: ModelHandle<Self>,
3898 envelope: TypedEnvelope<proto::FormatBuffers>,
3899 _: Arc<Client>,
3900 mut cx: AsyncAppContext,
3901 ) -> Result<proto::FormatBuffersResponse> {
3902 let sender_id = envelope.original_sender_id()?;
3903 let format = this.update(&mut cx, |this, cx| {
3904 let mut buffers = HashSet::default();
3905 for buffer_id in &envelope.payload.buffer_ids {
3906 buffers.insert(
3907 this.opened_buffers
3908 .get(buffer_id)
3909 .map(|buffer| buffer.upgrade(cx).unwrap())
3910 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3911 );
3912 }
3913 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3914 })?;
3915
3916 let project_transaction = format.await?;
3917 let project_transaction = this.update(&mut cx, |this, cx| {
3918 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3919 });
3920 Ok(proto::FormatBuffersResponse {
3921 transaction: Some(project_transaction),
3922 })
3923 }
3924
3925 async fn handle_get_completions(
3926 this: ModelHandle<Self>,
3927 envelope: TypedEnvelope<proto::GetCompletions>,
3928 _: Arc<Client>,
3929 mut cx: AsyncAppContext,
3930 ) -> Result<proto::GetCompletionsResponse> {
3931 let position = envelope
3932 .payload
3933 .position
3934 .and_then(language::proto::deserialize_anchor)
3935 .ok_or_else(|| anyhow!("invalid position"))?;
3936 let version = deserialize_version(envelope.payload.version);
3937 let buffer = this.read_with(&cx, |this, cx| {
3938 this.opened_buffers
3939 .get(&envelope.payload.buffer_id)
3940 .map(|buffer| buffer.upgrade(cx).unwrap())
3941 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3942 })?;
3943 buffer
3944 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3945 .await;
3946 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3947 let completions = this
3948 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3949 .await?;
3950
3951 Ok(proto::GetCompletionsResponse {
3952 completions: completions
3953 .iter()
3954 .map(language::proto::serialize_completion)
3955 .collect(),
3956 version: serialize_version(&version),
3957 })
3958 }
3959
3960 async fn handle_apply_additional_edits_for_completion(
3961 this: ModelHandle<Self>,
3962 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3963 _: Arc<Client>,
3964 mut cx: AsyncAppContext,
3965 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3966 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3967 let buffer = this
3968 .opened_buffers
3969 .get(&envelope.payload.buffer_id)
3970 .map(|buffer| buffer.upgrade(cx).unwrap())
3971 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3972 let language = buffer.read(cx).language();
3973 let completion = language::proto::deserialize_completion(
3974 envelope
3975 .payload
3976 .completion
3977 .ok_or_else(|| anyhow!("invalid completion"))?,
3978 language,
3979 )?;
3980 Ok::<_, anyhow::Error>(
3981 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3982 )
3983 })?;
3984
3985 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3986 transaction: apply_additional_edits
3987 .await?
3988 .as_ref()
3989 .map(language::proto::serialize_transaction),
3990 })
3991 }
3992
3993 async fn handle_get_code_actions(
3994 this: ModelHandle<Self>,
3995 envelope: TypedEnvelope<proto::GetCodeActions>,
3996 _: Arc<Client>,
3997 mut cx: AsyncAppContext,
3998 ) -> Result<proto::GetCodeActionsResponse> {
3999 let start = envelope
4000 .payload
4001 .start
4002 .and_then(language::proto::deserialize_anchor)
4003 .ok_or_else(|| anyhow!("invalid start"))?;
4004 let end = envelope
4005 .payload
4006 .end
4007 .and_then(language::proto::deserialize_anchor)
4008 .ok_or_else(|| anyhow!("invalid end"))?;
4009 let buffer = this.update(&mut cx, |this, cx| {
4010 this.opened_buffers
4011 .get(&envelope.payload.buffer_id)
4012 .map(|buffer| buffer.upgrade(cx).unwrap())
4013 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4014 })?;
4015 buffer
4016 .update(&mut cx, |buffer, _| {
4017 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4018 })
4019 .await;
4020
4021 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4022 let code_actions = this.update(&mut cx, |this, cx| {
4023 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4024 })?;
4025
4026 Ok(proto::GetCodeActionsResponse {
4027 actions: code_actions
4028 .await?
4029 .iter()
4030 .map(language::proto::serialize_code_action)
4031 .collect(),
4032 version: serialize_version(&version),
4033 })
4034 }
4035
4036 async fn handle_apply_code_action(
4037 this: ModelHandle<Self>,
4038 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4039 _: Arc<Client>,
4040 mut cx: AsyncAppContext,
4041 ) -> Result<proto::ApplyCodeActionResponse> {
4042 let sender_id = envelope.original_sender_id()?;
4043 let action = language::proto::deserialize_code_action(
4044 envelope
4045 .payload
4046 .action
4047 .ok_or_else(|| anyhow!("invalid action"))?,
4048 )?;
4049 let apply_code_action = this.update(&mut cx, |this, cx| {
4050 let buffer = this
4051 .opened_buffers
4052 .get(&envelope.payload.buffer_id)
4053 .map(|buffer| buffer.upgrade(cx).unwrap())
4054 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4055 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4056 })?;
4057
4058 let project_transaction = apply_code_action.await?;
4059 let project_transaction = this.update(&mut cx, |this, cx| {
4060 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4061 });
4062 Ok(proto::ApplyCodeActionResponse {
4063 transaction: Some(project_transaction),
4064 })
4065 }
4066
4067 async fn handle_lsp_command<T: LspCommand>(
4068 this: ModelHandle<Self>,
4069 envelope: TypedEnvelope<T::ProtoRequest>,
4070 _: Arc<Client>,
4071 mut cx: AsyncAppContext,
4072 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4073 where
4074 <T::LspRequest as lsp::request::Request>::Result: Send,
4075 {
4076 let sender_id = envelope.original_sender_id()?;
4077 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4078 let buffer_handle = this.read_with(&cx, |this, _| {
4079 this.opened_buffers
4080 .get(&buffer_id)
4081 .and_then(|buffer| buffer.upgrade(&cx))
4082 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4083 })?;
4084 let request = T::from_proto(
4085 envelope.payload,
4086 this.clone(),
4087 buffer_handle.clone(),
4088 cx.clone(),
4089 )
4090 .await?;
4091 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4092 let response = this
4093 .update(&mut cx, |this, cx| {
4094 this.request_lsp(buffer_handle, request, cx)
4095 })
4096 .await?;
4097 this.update(&mut cx, |this, cx| {
4098 Ok(T::response_to_proto(
4099 response,
4100 this,
4101 sender_id,
4102 &buffer_version,
4103 cx,
4104 ))
4105 })
4106 }
4107
4108 async fn handle_get_project_symbols(
4109 this: ModelHandle<Self>,
4110 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4111 _: Arc<Client>,
4112 mut cx: AsyncAppContext,
4113 ) -> Result<proto::GetProjectSymbolsResponse> {
4114 let symbols = this
4115 .update(&mut cx, |this, cx| {
4116 this.symbols(&envelope.payload.query, cx)
4117 })
4118 .await?;
4119
4120 Ok(proto::GetProjectSymbolsResponse {
4121 symbols: symbols.iter().map(serialize_symbol).collect(),
4122 })
4123 }
4124
4125 async fn handle_search_project(
4126 this: ModelHandle<Self>,
4127 envelope: TypedEnvelope<proto::SearchProject>,
4128 _: Arc<Client>,
4129 mut cx: AsyncAppContext,
4130 ) -> Result<proto::SearchProjectResponse> {
4131 let peer_id = envelope.original_sender_id()?;
4132 let query = SearchQuery::from_proto(envelope.payload)?;
4133 let result = this
4134 .update(&mut cx, |this, cx| this.search(query, cx))
4135 .await?;
4136
4137 this.update(&mut cx, |this, cx| {
4138 let mut locations = Vec::new();
4139 for (buffer, ranges) in result {
4140 for range in ranges {
4141 let start = serialize_anchor(&range.start);
4142 let end = serialize_anchor(&range.end);
4143 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4144 locations.push(proto::Location {
4145 buffer: Some(buffer),
4146 start: Some(start),
4147 end: Some(end),
4148 });
4149 }
4150 }
4151 Ok(proto::SearchProjectResponse { locations })
4152 })
4153 }
4154
4155 async fn handle_open_buffer_for_symbol(
4156 this: ModelHandle<Self>,
4157 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4158 _: Arc<Client>,
4159 mut cx: AsyncAppContext,
4160 ) -> Result<proto::OpenBufferForSymbolResponse> {
4161 let peer_id = envelope.original_sender_id()?;
4162 let symbol = envelope
4163 .payload
4164 .symbol
4165 .ok_or_else(|| anyhow!("invalid symbol"))?;
4166 let symbol = this.read_with(&cx, |this, _| {
4167 let symbol = this.deserialize_symbol(symbol)?;
4168 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4169 if signature == symbol.signature {
4170 Ok(symbol)
4171 } else {
4172 Err(anyhow!("invalid symbol signature"))
4173 }
4174 })?;
4175 let buffer = this
4176 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4177 .await?;
4178
4179 Ok(proto::OpenBufferForSymbolResponse {
4180 buffer: Some(this.update(&mut cx, |this, cx| {
4181 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4182 })),
4183 })
4184 }
4185
4186 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4187 let mut hasher = Sha256::new();
4188 hasher.update(worktree_id.to_proto().to_be_bytes());
4189 hasher.update(path.to_string_lossy().as_bytes());
4190 hasher.update(self.nonce.to_be_bytes());
4191 hasher.finalize().as_slice().try_into().unwrap()
4192 }
4193
4194 async fn handle_open_buffer_by_id(
4195 this: ModelHandle<Self>,
4196 envelope: TypedEnvelope<proto::OpenBufferById>,
4197 _: Arc<Client>,
4198 mut cx: AsyncAppContext,
4199 ) -> Result<proto::OpenBufferResponse> {
4200 let peer_id = envelope.original_sender_id()?;
4201 let buffer = this
4202 .update(&mut cx, |this, cx| {
4203 this.open_buffer_by_id(envelope.payload.id, cx)
4204 })
4205 .await?;
4206 this.update(&mut cx, |this, cx| {
4207 Ok(proto::OpenBufferResponse {
4208 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4209 })
4210 })
4211 }
4212
4213 async fn handle_open_buffer_by_path(
4214 this: ModelHandle<Self>,
4215 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4216 _: Arc<Client>,
4217 mut cx: AsyncAppContext,
4218 ) -> Result<proto::OpenBufferResponse> {
4219 let peer_id = envelope.original_sender_id()?;
4220 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4221 let open_buffer = this.update(&mut cx, |this, cx| {
4222 this.open_buffer(
4223 ProjectPath {
4224 worktree_id,
4225 path: PathBuf::from(envelope.payload.path).into(),
4226 },
4227 cx,
4228 )
4229 });
4230
4231 let buffer = open_buffer.await?;
4232 this.update(&mut cx, |this, cx| {
4233 Ok(proto::OpenBufferResponse {
4234 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4235 })
4236 })
4237 }
4238
4239 fn serialize_project_transaction_for_peer(
4240 &mut self,
4241 project_transaction: ProjectTransaction,
4242 peer_id: PeerId,
4243 cx: &AppContext,
4244 ) -> proto::ProjectTransaction {
4245 let mut serialized_transaction = proto::ProjectTransaction {
4246 buffers: Default::default(),
4247 transactions: Default::default(),
4248 };
4249 for (buffer, transaction) in project_transaction.0 {
4250 serialized_transaction
4251 .buffers
4252 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4253 serialized_transaction
4254 .transactions
4255 .push(language::proto::serialize_transaction(&transaction));
4256 }
4257 serialized_transaction
4258 }
4259
4260 fn deserialize_project_transaction(
4261 &mut self,
4262 message: proto::ProjectTransaction,
4263 push_to_history: bool,
4264 cx: &mut ModelContext<Self>,
4265 ) -> Task<Result<ProjectTransaction>> {
4266 cx.spawn(|this, mut cx| async move {
4267 let mut project_transaction = ProjectTransaction::default();
4268 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4269 let buffer = this
4270 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4271 .await?;
4272 let transaction = language::proto::deserialize_transaction(transaction)?;
4273 project_transaction.0.insert(buffer, transaction);
4274 }
4275
4276 for (buffer, transaction) in &project_transaction.0 {
4277 buffer
4278 .update(&mut cx, |buffer, _| {
4279 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4280 })
4281 .await;
4282
4283 if push_to_history {
4284 buffer.update(&mut cx, |buffer, _| {
4285 buffer.push_transaction(transaction.clone(), Instant::now());
4286 });
4287 }
4288 }
4289
4290 Ok(project_transaction)
4291 })
4292 }
4293
4294 fn serialize_buffer_for_peer(
4295 &mut self,
4296 buffer: &ModelHandle<Buffer>,
4297 peer_id: PeerId,
4298 cx: &AppContext,
4299 ) -> proto::Buffer {
4300 let buffer_id = buffer.read(cx).remote_id();
4301 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4302 if shared_buffers.insert(buffer_id) {
4303 proto::Buffer {
4304 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4305 }
4306 } else {
4307 proto::Buffer {
4308 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4309 }
4310 }
4311 }
4312
4313 fn deserialize_buffer(
4314 &mut self,
4315 buffer: proto::Buffer,
4316 cx: &mut ModelContext<Self>,
4317 ) -> Task<Result<ModelHandle<Buffer>>> {
4318 let replica_id = self.replica_id();
4319
4320 let opened_buffer_tx = self.opened_buffer.0.clone();
4321 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4322 cx.spawn(|this, mut cx| async move {
4323 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4324 proto::buffer::Variant::Id(id) => {
4325 let buffer = loop {
4326 let buffer = this.read_with(&cx, |this, cx| {
4327 this.opened_buffers
4328 .get(&id)
4329 .and_then(|buffer| buffer.upgrade(cx))
4330 });
4331 if let Some(buffer) = buffer {
4332 break buffer;
4333 }
4334 opened_buffer_rx
4335 .next()
4336 .await
4337 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4338 };
4339 Ok(buffer)
4340 }
4341 proto::buffer::Variant::State(mut buffer) => {
4342 let mut buffer_worktree = None;
4343 let mut buffer_file = None;
4344 if let Some(file) = buffer.file.take() {
4345 this.read_with(&cx, |this, cx| {
4346 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4347 let worktree =
4348 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4349 anyhow!("no worktree found for id {}", file.worktree_id)
4350 })?;
4351 buffer_file =
4352 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4353 as Box<dyn language::File>);
4354 buffer_worktree = Some(worktree);
4355 Ok::<_, anyhow::Error>(())
4356 })?;
4357 }
4358
4359 let buffer = cx.add_model(|cx| {
4360 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4361 });
4362
4363 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4364
4365 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4366 Ok(buffer)
4367 }
4368 }
4369 })
4370 }
4371
4372 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4373 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4374 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4375 let start = serialized_symbol
4376 .start
4377 .ok_or_else(|| anyhow!("invalid start"))?;
4378 let end = serialized_symbol
4379 .end
4380 .ok_or_else(|| anyhow!("invalid end"))?;
4381 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4382 let path = PathBuf::from(serialized_symbol.path);
4383 let language = self.languages.select_language(&path);
4384 Ok(Symbol {
4385 source_worktree_id,
4386 worktree_id,
4387 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4388 label: language
4389 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4390 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4391 name: serialized_symbol.name,
4392 path,
4393 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4394 kind,
4395 signature: serialized_symbol
4396 .signature
4397 .try_into()
4398 .map_err(|_| anyhow!("invalid signature"))?,
4399 })
4400 }
4401
4402 async fn handle_buffer_saved(
4403 this: ModelHandle<Self>,
4404 envelope: TypedEnvelope<proto::BufferSaved>,
4405 _: Arc<Client>,
4406 mut cx: AsyncAppContext,
4407 ) -> Result<()> {
4408 let version = deserialize_version(envelope.payload.version);
4409 let mtime = envelope
4410 .payload
4411 .mtime
4412 .ok_or_else(|| anyhow!("missing mtime"))?
4413 .into();
4414
4415 this.update(&mut cx, |this, cx| {
4416 let buffer = this
4417 .opened_buffers
4418 .get(&envelope.payload.buffer_id)
4419 .and_then(|buffer| buffer.upgrade(cx));
4420 if let Some(buffer) = buffer {
4421 buffer.update(cx, |buffer, cx| {
4422 buffer.did_save(version, mtime, None, cx);
4423 });
4424 }
4425 Ok(())
4426 })
4427 }
4428
4429 async fn handle_buffer_reloaded(
4430 this: ModelHandle<Self>,
4431 envelope: TypedEnvelope<proto::BufferReloaded>,
4432 _: Arc<Client>,
4433 mut cx: AsyncAppContext,
4434 ) -> Result<()> {
4435 let payload = envelope.payload.clone();
4436 let version = deserialize_version(payload.version);
4437 let mtime = payload
4438 .mtime
4439 .ok_or_else(|| anyhow!("missing mtime"))?
4440 .into();
4441 this.update(&mut cx, |this, cx| {
4442 let buffer = this
4443 .opened_buffers
4444 .get(&payload.buffer_id)
4445 .and_then(|buffer| buffer.upgrade(cx));
4446 if let Some(buffer) = buffer {
4447 buffer.update(cx, |buffer, cx| {
4448 buffer.did_reload(version, mtime, cx);
4449 });
4450 }
4451 Ok(())
4452 })
4453 }
4454
4455 pub fn match_paths<'a>(
4456 &self,
4457 query: &'a str,
4458 include_ignored: bool,
4459 smart_case: bool,
4460 max_results: usize,
4461 cancel_flag: &'a AtomicBool,
4462 cx: &AppContext,
4463 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4464 let worktrees = self
4465 .worktrees(cx)
4466 .filter(|worktree| worktree.read(cx).is_visible())
4467 .collect::<Vec<_>>();
4468 let include_root_name = worktrees.len() > 1;
4469 let candidate_sets = worktrees
4470 .into_iter()
4471 .map(|worktree| CandidateSet {
4472 snapshot: worktree.read(cx).snapshot(),
4473 include_ignored,
4474 include_root_name,
4475 })
4476 .collect::<Vec<_>>();
4477
4478 let background = cx.background().clone();
4479 async move {
4480 fuzzy::match_paths(
4481 candidate_sets.as_slice(),
4482 query,
4483 smart_case,
4484 max_results,
4485 cancel_flag,
4486 background,
4487 )
4488 .await
4489 }
4490 }
4491
4492 fn edits_from_lsp(
4493 &mut self,
4494 buffer: &ModelHandle<Buffer>,
4495 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4496 version: Option<i32>,
4497 cx: &mut ModelContext<Self>,
4498 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4499 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4500 cx.background().spawn(async move {
4501 let snapshot = snapshot?;
4502 let mut lsp_edits = lsp_edits
4503 .into_iter()
4504 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4505 .peekable();
4506
4507 let mut edits = Vec::new();
4508 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4509 // Combine any LSP edits that are adjacent.
4510 //
4511 // Also, combine LSP edits that are separated from each other by only
4512 // a newline. This is important because for some code actions,
4513 // Rust-analyzer rewrites the entire buffer via a series of edits that
4514 // are separated by unchanged newline characters.
4515 //
4516 // In order for the diffing logic below to work properly, any edits that
4517 // cancel each other out must be combined into one.
4518 while let Some((next_range, next_text)) = lsp_edits.peek() {
4519 if next_range.start > range.end {
4520 if next_range.start.row > range.end.row + 1
4521 || next_range.start.column > 0
4522 || snapshot.clip_point_utf16(
4523 PointUtf16::new(range.end.row, u32::MAX),
4524 Bias::Left,
4525 ) > range.end
4526 {
4527 break;
4528 }
4529 new_text.push('\n');
4530 }
4531 range.end = next_range.end;
4532 new_text.push_str(&next_text);
4533 lsp_edits.next();
4534 }
4535
4536 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4537 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4538 {
4539 return Err(anyhow!("invalid edits received from language server"));
4540 }
4541
4542 // For multiline edits, perform a diff of the old and new text so that
4543 // we can identify the changes more precisely, preserving the locations
4544 // of any anchors positioned in the unchanged regions.
4545 if range.end.row > range.start.row {
4546 let mut offset = range.start.to_offset(&snapshot);
4547 let old_text = snapshot.text_for_range(range).collect::<String>();
4548
4549 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4550 let mut moved_since_edit = true;
4551 for change in diff.iter_all_changes() {
4552 let tag = change.tag();
4553 let value = change.value();
4554 match tag {
4555 ChangeTag::Equal => {
4556 offset += value.len();
4557 moved_since_edit = true;
4558 }
4559 ChangeTag::Delete => {
4560 let start = snapshot.anchor_after(offset);
4561 let end = snapshot.anchor_before(offset + value.len());
4562 if moved_since_edit {
4563 edits.push((start..end, String::new()));
4564 } else {
4565 edits.last_mut().unwrap().0.end = end;
4566 }
4567 offset += value.len();
4568 moved_since_edit = false;
4569 }
4570 ChangeTag::Insert => {
4571 if moved_since_edit {
4572 let anchor = snapshot.anchor_after(offset);
4573 edits.push((anchor.clone()..anchor, value.to_string()));
4574 } else {
4575 edits.last_mut().unwrap().1.push_str(value);
4576 }
4577 moved_since_edit = false;
4578 }
4579 }
4580 }
4581 } else if range.end == range.start {
4582 let anchor = snapshot.anchor_after(range.start);
4583 edits.push((anchor.clone()..anchor, new_text));
4584 } else {
4585 let edit_start = snapshot.anchor_after(range.start);
4586 let edit_end = snapshot.anchor_before(range.end);
4587 edits.push((edit_start..edit_end, new_text));
4588 }
4589 }
4590
4591 Ok(edits)
4592 })
4593 }
4594
4595 fn buffer_snapshot_for_lsp_version(
4596 &mut self,
4597 buffer: &ModelHandle<Buffer>,
4598 version: Option<i32>,
4599 cx: &AppContext,
4600 ) -> Result<TextBufferSnapshot> {
4601 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4602
4603 if let Some(version) = version {
4604 let buffer_id = buffer.read(cx).remote_id();
4605 let snapshots = self
4606 .buffer_snapshots
4607 .get_mut(&buffer_id)
4608 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4609 let mut found_snapshot = None;
4610 snapshots.retain(|(snapshot_version, snapshot)| {
4611 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4612 false
4613 } else {
4614 if *snapshot_version == version {
4615 found_snapshot = Some(snapshot.clone());
4616 }
4617 true
4618 }
4619 });
4620
4621 found_snapshot.ok_or_else(|| {
4622 anyhow!(
4623 "snapshot not found for buffer {} at version {}",
4624 buffer_id,
4625 version
4626 )
4627 })
4628 } else {
4629 Ok((buffer.read(cx)).text_snapshot())
4630 }
4631 }
4632
4633 fn language_server_for_buffer(
4634 &self,
4635 buffer: &Buffer,
4636 cx: &AppContext,
4637 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4638 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4639 let worktree_id = file.worktree_id(cx);
4640 self.language_servers
4641 .get(&(worktree_id, language.lsp_adapter()?.name()))
4642 } else {
4643 None
4644 }
4645 }
4646}
4647
4648impl WorktreeHandle {
4649 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4650 match self {
4651 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4652 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4653 }
4654 }
4655}
4656
4657impl OpenBuffer {
4658 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4659 match self {
4660 OpenBuffer::Strong(handle) => Some(handle.clone()),
4661 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4662 OpenBuffer::Loading(_) => None,
4663 }
4664 }
4665}
4666
4667struct CandidateSet {
4668 snapshot: Snapshot,
4669 include_ignored: bool,
4670 include_root_name: bool,
4671}
4672
4673impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4674 type Candidates = CandidateSetIter<'a>;
4675
4676 fn id(&self) -> usize {
4677 self.snapshot.id().to_usize()
4678 }
4679
4680 fn len(&self) -> usize {
4681 if self.include_ignored {
4682 self.snapshot.file_count()
4683 } else {
4684 self.snapshot.visible_file_count()
4685 }
4686 }
4687
4688 fn prefix(&self) -> Arc<str> {
4689 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4690 self.snapshot.root_name().into()
4691 } else if self.include_root_name {
4692 format!("{}/", self.snapshot.root_name()).into()
4693 } else {
4694 "".into()
4695 }
4696 }
4697
4698 fn candidates(&'a self, start: usize) -> Self::Candidates {
4699 CandidateSetIter {
4700 traversal: self.snapshot.files(self.include_ignored, start),
4701 }
4702 }
4703}
4704
4705struct CandidateSetIter<'a> {
4706 traversal: Traversal<'a>,
4707}
4708
4709impl<'a> Iterator for CandidateSetIter<'a> {
4710 type Item = PathMatchCandidate<'a>;
4711
4712 fn next(&mut self) -> Option<Self::Item> {
4713 self.traversal.next().map(|entry| {
4714 if let EntryKind::File(char_bag) = entry.kind {
4715 PathMatchCandidate {
4716 path: &entry.path,
4717 char_bag,
4718 }
4719 } else {
4720 unreachable!()
4721 }
4722 })
4723 }
4724}
4725
4726impl Entity for Project {
4727 type Event = Event;
4728
4729 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4730 match &self.client_state {
4731 ProjectClientState::Local { remote_id_rx, .. } => {
4732 if let Some(project_id) = *remote_id_rx.borrow() {
4733 self.client
4734 .send(proto::UnregisterProject { project_id })
4735 .log_err();
4736 }
4737 }
4738 ProjectClientState::Remote { remote_id, .. } => {
4739 self.client
4740 .send(proto::LeaveProject {
4741 project_id: *remote_id,
4742 })
4743 .log_err();
4744 }
4745 }
4746 }
4747
4748 fn app_will_quit(
4749 &mut self,
4750 _: &mut MutableAppContext,
4751 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4752 let shutdown_futures = self
4753 .language_servers
4754 .drain()
4755 .filter_map(|(_, (_, server))| server.shutdown())
4756 .collect::<Vec<_>>();
4757 Some(
4758 async move {
4759 futures::future::join_all(shutdown_futures).await;
4760 }
4761 .boxed(),
4762 )
4763 }
4764}
4765
4766impl Collaborator {
4767 fn from_proto(
4768 message: proto::Collaborator,
4769 user_store: &ModelHandle<UserStore>,
4770 cx: &mut AsyncAppContext,
4771 ) -> impl Future<Output = Result<Self>> {
4772 let user = user_store.update(cx, |user_store, cx| {
4773 user_store.fetch_user(message.user_id, cx)
4774 });
4775
4776 async move {
4777 Ok(Self {
4778 peer_id: PeerId(message.peer_id),
4779 user: user.await?,
4780 replica_id: message.replica_id as ReplicaId,
4781 })
4782 }
4783 }
4784}
4785
4786impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4787 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4788 Self {
4789 worktree_id,
4790 path: path.as_ref().into(),
4791 }
4792 }
4793}
4794
4795impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4796 fn from(options: lsp::CreateFileOptions) -> Self {
4797 Self {
4798 overwrite: options.overwrite.unwrap_or(false),
4799 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4800 }
4801 }
4802}
4803
4804impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4805 fn from(options: lsp::RenameFileOptions) -> Self {
4806 Self {
4807 overwrite: options.overwrite.unwrap_or(false),
4808 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4809 }
4810 }
4811}
4812
4813impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4814 fn from(options: lsp::DeleteFileOptions) -> Self {
4815 Self {
4816 recursive: options.recursive.unwrap_or(false),
4817 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4818 }
4819 }
4820}
4821
4822fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4823 proto::Symbol {
4824 source_worktree_id: symbol.source_worktree_id.to_proto(),
4825 worktree_id: symbol.worktree_id.to_proto(),
4826 language_server_name: symbol.language_server_name.0.to_string(),
4827 name: symbol.name.clone(),
4828 kind: unsafe { mem::transmute(symbol.kind) },
4829 path: symbol.path.to_string_lossy().to_string(),
4830 start: Some(proto::Point {
4831 row: symbol.range.start.row,
4832 column: symbol.range.start.column,
4833 }),
4834 end: Some(proto::Point {
4835 row: symbol.range.end.row,
4836 column: symbol.range.end.column,
4837 }),
4838 signature: symbol.signature.to_vec(),
4839 }
4840}
4841
4842fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4843 let mut path_components = path.components();
4844 let mut base_components = base.components();
4845 let mut components: Vec<Component> = Vec::new();
4846 loop {
4847 match (path_components.next(), base_components.next()) {
4848 (None, None) => break,
4849 (Some(a), None) => {
4850 components.push(a);
4851 components.extend(path_components.by_ref());
4852 break;
4853 }
4854 (None, _) => components.push(Component::ParentDir),
4855 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4856 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4857 (Some(a), Some(_)) => {
4858 components.push(Component::ParentDir);
4859 for _ in base_components {
4860 components.push(Component::ParentDir);
4861 }
4862 components.push(a);
4863 components.extend(path_components.by_ref());
4864 break;
4865 }
4866 }
4867 }
4868 components.iter().map(|c| c.as_os_str()).collect()
4869}
4870
4871impl Item for Buffer {
4872 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4873 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4874 }
4875}
4876
4877#[cfg(test)]
4878mod tests {
4879 use super::{Event, *};
4880 use fs::RealFs;
4881 use futures::{future, StreamExt};
4882 use gpui::test::subscribe;
4883 use language::{
4884 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4885 ToPoint,
4886 };
4887 use lsp::Url;
4888 use serde_json::json;
4889 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4890 use unindent::Unindent as _;
4891 use util::{assert_set_eq, test::temp_tree};
4892 use worktree::WorktreeHandle as _;
4893
4894 #[gpui::test]
4895 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4896 let dir = temp_tree(json!({
4897 "root": {
4898 "apple": "",
4899 "banana": {
4900 "carrot": {
4901 "date": "",
4902 "endive": "",
4903 }
4904 },
4905 "fennel": {
4906 "grape": "",
4907 }
4908 }
4909 }));
4910
4911 let root_link_path = dir.path().join("root_link");
4912 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4913 unix::fs::symlink(
4914 &dir.path().join("root/fennel"),
4915 &dir.path().join("root/finnochio"),
4916 )
4917 .unwrap();
4918
4919 let project = Project::test(Arc::new(RealFs), cx);
4920
4921 let (tree, _) = project
4922 .update(cx, |project, cx| {
4923 project.find_or_create_local_worktree(&root_link_path, true, cx)
4924 })
4925 .await
4926 .unwrap();
4927
4928 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4929 .await;
4930 cx.read(|cx| {
4931 let tree = tree.read(cx);
4932 assert_eq!(tree.file_count(), 5);
4933 assert_eq!(
4934 tree.inode_for_path("fennel/grape"),
4935 tree.inode_for_path("finnochio/grape")
4936 );
4937 });
4938
4939 let cancel_flag = Default::default();
4940 let results = project
4941 .read_with(cx, |project, cx| {
4942 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4943 })
4944 .await;
4945 assert_eq!(
4946 results
4947 .into_iter()
4948 .map(|result| result.path)
4949 .collect::<Vec<Arc<Path>>>(),
4950 vec![
4951 PathBuf::from("banana/carrot/date").into(),
4952 PathBuf::from("banana/carrot/endive").into(),
4953 ]
4954 );
4955 }
4956
4957 #[gpui::test]
4958 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4959 cx.foreground().forbid_parking();
4960
4961 let mut rust_language = Language::new(
4962 LanguageConfig {
4963 name: "Rust".into(),
4964 path_suffixes: vec!["rs".to_string()],
4965 ..Default::default()
4966 },
4967 Some(tree_sitter_rust::language()),
4968 );
4969 let mut json_language = Language::new(
4970 LanguageConfig {
4971 name: "JSON".into(),
4972 path_suffixes: vec!["json".to_string()],
4973 ..Default::default()
4974 },
4975 None,
4976 );
4977 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4978 name: "the-rust-language-server",
4979 capabilities: lsp::ServerCapabilities {
4980 completion_provider: Some(lsp::CompletionOptions {
4981 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4982 ..Default::default()
4983 }),
4984 ..Default::default()
4985 },
4986 ..Default::default()
4987 });
4988 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4989 name: "the-json-language-server",
4990 capabilities: lsp::ServerCapabilities {
4991 completion_provider: Some(lsp::CompletionOptions {
4992 trigger_characters: Some(vec![":".to_string()]),
4993 ..Default::default()
4994 }),
4995 ..Default::default()
4996 },
4997 ..Default::default()
4998 });
4999
5000 let fs = FakeFs::new(cx.background());
5001 fs.insert_tree(
5002 "/the-root",
5003 json!({
5004 "test.rs": "const A: i32 = 1;",
5005 "test2.rs": "",
5006 "Cargo.toml": "a = 1",
5007 "package.json": "{\"a\": 1}",
5008 }),
5009 )
5010 .await;
5011
5012 let project = Project::test(fs.clone(), cx);
5013 project.update(cx, |project, _| {
5014 project.languages.add(Arc::new(rust_language));
5015 project.languages.add(Arc::new(json_language));
5016 });
5017
5018 let worktree_id = project
5019 .update(cx, |project, cx| {
5020 project.find_or_create_local_worktree("/the-root", true, cx)
5021 })
5022 .await
5023 .unwrap()
5024 .0
5025 .read_with(cx, |tree, _| tree.id());
5026
5027 // Open a buffer without an associated language server.
5028 let toml_buffer = project
5029 .update(cx, |project, cx| {
5030 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5031 })
5032 .await
5033 .unwrap();
5034
5035 // Open a buffer with an associated language server.
5036 let rust_buffer = project
5037 .update(cx, |project, cx| {
5038 project.open_buffer((worktree_id, "test.rs"), cx)
5039 })
5040 .await
5041 .unwrap();
5042
5043 // A server is started up, and it is notified about Rust files.
5044 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5045 assert_eq!(
5046 fake_rust_server
5047 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5048 .await
5049 .text_document,
5050 lsp::TextDocumentItem {
5051 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5052 version: 0,
5053 text: "const A: i32 = 1;".to_string(),
5054 language_id: Default::default()
5055 }
5056 );
5057
5058 // The buffer is configured based on the language server's capabilities.
5059 rust_buffer.read_with(cx, |buffer, _| {
5060 assert_eq!(
5061 buffer.completion_triggers(),
5062 &[".".to_string(), "::".to_string()]
5063 );
5064 });
5065 toml_buffer.read_with(cx, |buffer, _| {
5066 assert!(buffer.completion_triggers().is_empty());
5067 });
5068
5069 // Edit a buffer. The changes are reported to the language server.
5070 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5071 assert_eq!(
5072 fake_rust_server
5073 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5074 .await
5075 .text_document,
5076 lsp::VersionedTextDocumentIdentifier::new(
5077 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5078 1
5079 )
5080 );
5081
5082 // Open a third buffer with a different associated language server.
5083 let json_buffer = project
5084 .update(cx, |project, cx| {
5085 project.open_buffer((worktree_id, "package.json"), cx)
5086 })
5087 .await
5088 .unwrap();
5089
5090 // A json language server is started up and is only notified about the json buffer.
5091 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5092 assert_eq!(
5093 fake_json_server
5094 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5095 .await
5096 .text_document,
5097 lsp::TextDocumentItem {
5098 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5099 version: 0,
5100 text: "{\"a\": 1}".to_string(),
5101 language_id: Default::default()
5102 }
5103 );
5104
5105 // This buffer is configured based on the second language server's
5106 // capabilities.
5107 json_buffer.read_with(cx, |buffer, _| {
5108 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5109 });
5110
5111 // When opening another buffer whose language server is already running,
5112 // it is also configured based on the existing language server's capabilities.
5113 let rust_buffer2 = project
5114 .update(cx, |project, cx| {
5115 project.open_buffer((worktree_id, "test2.rs"), cx)
5116 })
5117 .await
5118 .unwrap();
5119 rust_buffer2.read_with(cx, |buffer, _| {
5120 assert_eq!(
5121 buffer.completion_triggers(),
5122 &[".".to_string(), "::".to_string()]
5123 );
5124 });
5125
5126 // Changes are reported only to servers matching the buffer's language.
5127 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5128 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5129 assert_eq!(
5130 fake_rust_server
5131 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5132 .await
5133 .text_document,
5134 lsp::VersionedTextDocumentIdentifier::new(
5135 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5136 1
5137 )
5138 );
5139
5140 // Save notifications are reported to all servers.
5141 toml_buffer
5142 .update(cx, |buffer, cx| buffer.save(cx))
5143 .await
5144 .unwrap();
5145 assert_eq!(
5146 fake_rust_server
5147 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5148 .await
5149 .text_document,
5150 lsp::TextDocumentIdentifier::new(
5151 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5152 )
5153 );
5154 assert_eq!(
5155 fake_json_server
5156 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5157 .await
5158 .text_document,
5159 lsp::TextDocumentIdentifier::new(
5160 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5161 )
5162 );
5163
5164 // Renames are reported only to servers matching the buffer's language.
5165 fs.rename(
5166 Path::new("/the-root/test2.rs"),
5167 Path::new("/the-root/test3.rs"),
5168 Default::default(),
5169 )
5170 .await
5171 .unwrap();
5172 assert_eq!(
5173 fake_rust_server
5174 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5175 .await
5176 .text_document,
5177 lsp::TextDocumentIdentifier::new(
5178 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5179 ),
5180 );
5181 assert_eq!(
5182 fake_rust_server
5183 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5184 .await
5185 .text_document,
5186 lsp::TextDocumentItem {
5187 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5188 version: 0,
5189 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5190 language_id: Default::default()
5191 },
5192 );
5193
5194 rust_buffer2.update(cx, |buffer, cx| {
5195 buffer.update_diagnostics(
5196 DiagnosticSet::from_sorted_entries(
5197 vec![DiagnosticEntry {
5198 diagnostic: Default::default(),
5199 range: Anchor::MIN..Anchor::MAX,
5200 }],
5201 &buffer.snapshot(),
5202 ),
5203 cx,
5204 );
5205 assert_eq!(
5206 buffer
5207 .snapshot()
5208 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5209 .count(),
5210 1
5211 );
5212 });
5213
5214 // When the rename changes the extension of the file, the buffer gets closed on the old
5215 // language server and gets opened on the new one.
5216 fs.rename(
5217 Path::new("/the-root/test3.rs"),
5218 Path::new("/the-root/test3.json"),
5219 Default::default(),
5220 )
5221 .await
5222 .unwrap();
5223 assert_eq!(
5224 fake_rust_server
5225 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5226 .await
5227 .text_document,
5228 lsp::TextDocumentIdentifier::new(
5229 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5230 ),
5231 );
5232 assert_eq!(
5233 fake_json_server
5234 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5235 .await
5236 .text_document,
5237 lsp::TextDocumentItem {
5238 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5239 version: 0,
5240 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5241 language_id: Default::default()
5242 },
5243 );
5244 // We clear the diagnostics, since the language has changed.
5245 rust_buffer2.read_with(cx, |buffer, _| {
5246 assert_eq!(
5247 buffer
5248 .snapshot()
5249 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5250 .count(),
5251 0
5252 );
5253 });
5254
5255 // The renamed file's version resets after changing language server.
5256 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5257 assert_eq!(
5258 fake_json_server
5259 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5260 .await
5261 .text_document,
5262 lsp::VersionedTextDocumentIdentifier::new(
5263 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5264 1
5265 )
5266 );
5267
5268 // Restart language servers
5269 project.update(cx, |project, cx| {
5270 project.restart_language_servers_for_buffers(
5271 vec![rust_buffer.clone(), json_buffer.clone()],
5272 cx,
5273 );
5274 });
5275
5276 let mut rust_shutdown_requests = fake_rust_server
5277 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5278 let mut json_shutdown_requests = fake_json_server
5279 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5280 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5281
5282 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5283 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5284
5285 // Ensure rust document is reopened in new rust language server
5286 assert_eq!(
5287 fake_rust_server
5288 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5289 .await
5290 .text_document,
5291 lsp::TextDocumentItem {
5292 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5293 version: 1,
5294 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5295 language_id: Default::default()
5296 }
5297 );
5298
5299 // Ensure json documents are reopened in new json language server
5300 assert_set_eq!(
5301 [
5302 fake_json_server
5303 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5304 .await
5305 .text_document,
5306 fake_json_server
5307 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5308 .await
5309 .text_document,
5310 ],
5311 [
5312 lsp::TextDocumentItem {
5313 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5314 version: 0,
5315 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5316 language_id: Default::default()
5317 },
5318 lsp::TextDocumentItem {
5319 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5320 version: 1,
5321 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5322 language_id: Default::default()
5323 }
5324 ]
5325 );
5326
5327 // Close notifications are reported only to servers matching the buffer's language.
5328 cx.update(|_| drop(json_buffer));
5329 let close_message = lsp::DidCloseTextDocumentParams {
5330 text_document: lsp::TextDocumentIdentifier::new(
5331 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5332 ),
5333 };
5334 assert_eq!(
5335 fake_json_server
5336 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5337 .await,
5338 close_message,
5339 );
5340 }
5341
5342 #[gpui::test]
5343 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5344 cx.foreground().forbid_parking();
5345
5346 let progress_token = "the-progress-token";
5347 let mut language = Language::new(
5348 LanguageConfig {
5349 name: "Rust".into(),
5350 path_suffixes: vec!["rs".to_string()],
5351 ..Default::default()
5352 },
5353 Some(tree_sitter_rust::language()),
5354 );
5355 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5356 disk_based_diagnostics_progress_token: Some(progress_token),
5357 disk_based_diagnostics_sources: &["disk"],
5358 ..Default::default()
5359 });
5360
5361 let fs = FakeFs::new(cx.background());
5362 fs.insert_tree(
5363 "/dir",
5364 json!({
5365 "a.rs": "fn a() { A }",
5366 "b.rs": "const y: i32 = 1",
5367 }),
5368 )
5369 .await;
5370
5371 let project = Project::test(fs, cx);
5372 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5373
5374 let (tree, _) = project
5375 .update(cx, |project, cx| {
5376 project.find_or_create_local_worktree("/dir", true, cx)
5377 })
5378 .await
5379 .unwrap();
5380 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5381
5382 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5383 .await;
5384
5385 // Cause worktree to start the fake language server
5386 let _buffer = project
5387 .update(cx, |project, cx| {
5388 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5389 })
5390 .await
5391 .unwrap();
5392
5393 let mut events = subscribe(&project, cx);
5394
5395 let mut fake_server = fake_servers.next().await.unwrap();
5396 fake_server.start_progress(progress_token).await;
5397 assert_eq!(
5398 events.next().await.unwrap(),
5399 Event::DiskBasedDiagnosticsStarted
5400 );
5401
5402 fake_server.start_progress(progress_token).await;
5403 fake_server.end_progress(progress_token).await;
5404 fake_server.start_progress(progress_token).await;
5405
5406 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5407 lsp::PublishDiagnosticsParams {
5408 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5409 version: None,
5410 diagnostics: vec![lsp::Diagnostic {
5411 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5412 severity: Some(lsp::DiagnosticSeverity::ERROR),
5413 message: "undefined variable 'A'".to_string(),
5414 ..Default::default()
5415 }],
5416 },
5417 );
5418 assert_eq!(
5419 events.next().await.unwrap(),
5420 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5421 );
5422
5423 fake_server.end_progress(progress_token).await;
5424 fake_server.end_progress(progress_token).await;
5425 assert_eq!(
5426 events.next().await.unwrap(),
5427 Event::DiskBasedDiagnosticsUpdated
5428 );
5429 assert_eq!(
5430 events.next().await.unwrap(),
5431 Event::DiskBasedDiagnosticsFinished
5432 );
5433
5434 let buffer = project
5435 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5436 .await
5437 .unwrap();
5438
5439 buffer.read_with(cx, |buffer, _| {
5440 let snapshot = buffer.snapshot();
5441 let diagnostics = snapshot
5442 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5443 .collect::<Vec<_>>();
5444 assert_eq!(
5445 diagnostics,
5446 &[DiagnosticEntry {
5447 range: Point::new(0, 9)..Point::new(0, 10),
5448 diagnostic: Diagnostic {
5449 severity: lsp::DiagnosticSeverity::ERROR,
5450 message: "undefined variable 'A'".to_string(),
5451 group_id: 0,
5452 is_primary: true,
5453 ..Default::default()
5454 }
5455 }]
5456 )
5457 });
5458 }
5459
5460 #[gpui::test]
5461 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5462 cx.foreground().forbid_parking();
5463
5464 let mut language = Language::new(
5465 LanguageConfig {
5466 name: "Rust".into(),
5467 path_suffixes: vec!["rs".to_string()],
5468 ..Default::default()
5469 },
5470 Some(tree_sitter_rust::language()),
5471 );
5472 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5473 disk_based_diagnostics_sources: &["disk"],
5474 ..Default::default()
5475 });
5476
5477 let text = "
5478 fn a() { A }
5479 fn b() { BB }
5480 fn c() { CCC }
5481 "
5482 .unindent();
5483
5484 let fs = FakeFs::new(cx.background());
5485 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5486
5487 let project = Project::test(fs, cx);
5488 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5489
5490 let worktree_id = project
5491 .update(cx, |project, cx| {
5492 project.find_or_create_local_worktree("/dir", true, cx)
5493 })
5494 .await
5495 .unwrap()
5496 .0
5497 .read_with(cx, |tree, _| tree.id());
5498
5499 let buffer = project
5500 .update(cx, |project, cx| {
5501 project.open_buffer((worktree_id, "a.rs"), cx)
5502 })
5503 .await
5504 .unwrap();
5505
5506 let mut fake_server = fake_servers.next().await.unwrap();
5507 let open_notification = fake_server
5508 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5509 .await;
5510
5511 // Edit the buffer, moving the content down
5512 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5513 let change_notification_1 = fake_server
5514 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5515 .await;
5516 assert!(
5517 change_notification_1.text_document.version > open_notification.text_document.version
5518 );
5519
5520 // Report some diagnostics for the initial version of the buffer
5521 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5522 lsp::PublishDiagnosticsParams {
5523 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5524 version: Some(open_notification.text_document.version),
5525 diagnostics: vec![
5526 lsp::Diagnostic {
5527 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5528 severity: Some(DiagnosticSeverity::ERROR),
5529 message: "undefined variable 'A'".to_string(),
5530 source: Some("disk".to_string()),
5531 ..Default::default()
5532 },
5533 lsp::Diagnostic {
5534 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5535 severity: Some(DiagnosticSeverity::ERROR),
5536 message: "undefined variable 'BB'".to_string(),
5537 source: Some("disk".to_string()),
5538 ..Default::default()
5539 },
5540 lsp::Diagnostic {
5541 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5542 severity: Some(DiagnosticSeverity::ERROR),
5543 source: Some("disk".to_string()),
5544 message: "undefined variable 'CCC'".to_string(),
5545 ..Default::default()
5546 },
5547 ],
5548 },
5549 );
5550
5551 // The diagnostics have moved down since they were created.
5552 buffer.next_notification(cx).await;
5553 buffer.read_with(cx, |buffer, _| {
5554 assert_eq!(
5555 buffer
5556 .snapshot()
5557 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5558 .collect::<Vec<_>>(),
5559 &[
5560 DiagnosticEntry {
5561 range: Point::new(3, 9)..Point::new(3, 11),
5562 diagnostic: Diagnostic {
5563 severity: DiagnosticSeverity::ERROR,
5564 message: "undefined variable 'BB'".to_string(),
5565 is_disk_based: true,
5566 group_id: 1,
5567 is_primary: true,
5568 ..Default::default()
5569 },
5570 },
5571 DiagnosticEntry {
5572 range: Point::new(4, 9)..Point::new(4, 12),
5573 diagnostic: Diagnostic {
5574 severity: DiagnosticSeverity::ERROR,
5575 message: "undefined variable 'CCC'".to_string(),
5576 is_disk_based: true,
5577 group_id: 2,
5578 is_primary: true,
5579 ..Default::default()
5580 }
5581 }
5582 ]
5583 );
5584 assert_eq!(
5585 chunks_with_diagnostics(buffer, 0..buffer.len()),
5586 [
5587 ("\n\nfn a() { ".to_string(), None),
5588 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5589 (" }\nfn b() { ".to_string(), None),
5590 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5591 (" }\nfn c() { ".to_string(), None),
5592 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5593 (" }\n".to_string(), None),
5594 ]
5595 );
5596 assert_eq!(
5597 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5598 [
5599 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5600 (" }\nfn c() { ".to_string(), None),
5601 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5602 ]
5603 );
5604 });
5605
5606 // Ensure overlapping diagnostics are highlighted correctly.
5607 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5608 lsp::PublishDiagnosticsParams {
5609 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5610 version: Some(open_notification.text_document.version),
5611 diagnostics: vec![
5612 lsp::Diagnostic {
5613 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5614 severity: Some(DiagnosticSeverity::ERROR),
5615 message: "undefined variable 'A'".to_string(),
5616 source: Some("disk".to_string()),
5617 ..Default::default()
5618 },
5619 lsp::Diagnostic {
5620 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5621 severity: Some(DiagnosticSeverity::WARNING),
5622 message: "unreachable statement".to_string(),
5623 source: Some("disk".to_string()),
5624 ..Default::default()
5625 },
5626 ],
5627 },
5628 );
5629
5630 buffer.next_notification(cx).await;
5631 buffer.read_with(cx, |buffer, _| {
5632 assert_eq!(
5633 buffer
5634 .snapshot()
5635 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5636 .collect::<Vec<_>>(),
5637 &[
5638 DiagnosticEntry {
5639 range: Point::new(2, 9)..Point::new(2, 12),
5640 diagnostic: Diagnostic {
5641 severity: DiagnosticSeverity::WARNING,
5642 message: "unreachable statement".to_string(),
5643 is_disk_based: true,
5644 group_id: 1,
5645 is_primary: true,
5646 ..Default::default()
5647 }
5648 },
5649 DiagnosticEntry {
5650 range: Point::new(2, 9)..Point::new(2, 10),
5651 diagnostic: Diagnostic {
5652 severity: DiagnosticSeverity::ERROR,
5653 message: "undefined variable 'A'".to_string(),
5654 is_disk_based: true,
5655 group_id: 0,
5656 is_primary: true,
5657 ..Default::default()
5658 },
5659 }
5660 ]
5661 );
5662 assert_eq!(
5663 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5664 [
5665 ("fn a() { ".to_string(), None),
5666 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5667 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5668 ("\n".to_string(), None),
5669 ]
5670 );
5671 assert_eq!(
5672 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5673 [
5674 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5675 ("\n".to_string(), None),
5676 ]
5677 );
5678 });
5679
5680 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5681 // changes since the last save.
5682 buffer.update(cx, |buffer, cx| {
5683 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5684 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5685 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5686 });
5687 let change_notification_2 = fake_server
5688 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5689 .await;
5690 assert!(
5691 change_notification_2.text_document.version
5692 > change_notification_1.text_document.version
5693 );
5694
5695 // Handle out-of-order diagnostics
5696 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5697 lsp::PublishDiagnosticsParams {
5698 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5699 version: Some(change_notification_2.text_document.version),
5700 diagnostics: vec![
5701 lsp::Diagnostic {
5702 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5703 severity: Some(DiagnosticSeverity::ERROR),
5704 message: "undefined variable 'BB'".to_string(),
5705 source: Some("disk".to_string()),
5706 ..Default::default()
5707 },
5708 lsp::Diagnostic {
5709 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5710 severity: Some(DiagnosticSeverity::WARNING),
5711 message: "undefined variable 'A'".to_string(),
5712 source: Some("disk".to_string()),
5713 ..Default::default()
5714 },
5715 ],
5716 },
5717 );
5718
5719 buffer.next_notification(cx).await;
5720 buffer.read_with(cx, |buffer, _| {
5721 assert_eq!(
5722 buffer
5723 .snapshot()
5724 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5725 .collect::<Vec<_>>(),
5726 &[
5727 DiagnosticEntry {
5728 range: Point::new(2, 21)..Point::new(2, 22),
5729 diagnostic: Diagnostic {
5730 severity: DiagnosticSeverity::WARNING,
5731 message: "undefined variable 'A'".to_string(),
5732 is_disk_based: true,
5733 group_id: 1,
5734 is_primary: true,
5735 ..Default::default()
5736 }
5737 },
5738 DiagnosticEntry {
5739 range: Point::new(3, 9)..Point::new(3, 14),
5740 diagnostic: Diagnostic {
5741 severity: DiagnosticSeverity::ERROR,
5742 message: "undefined variable 'BB'".to_string(),
5743 is_disk_based: true,
5744 group_id: 0,
5745 is_primary: true,
5746 ..Default::default()
5747 },
5748 }
5749 ]
5750 );
5751 });
5752 }
5753
5754 #[gpui::test]
5755 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5756 cx.foreground().forbid_parking();
5757
5758 let text = concat!(
5759 "let one = ;\n", //
5760 "let two = \n",
5761 "let three = 3;\n",
5762 );
5763
5764 let fs = FakeFs::new(cx.background());
5765 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5766
5767 let project = Project::test(fs, cx);
5768 let worktree_id = project
5769 .update(cx, |project, cx| {
5770 project.find_or_create_local_worktree("/dir", true, cx)
5771 })
5772 .await
5773 .unwrap()
5774 .0
5775 .read_with(cx, |tree, _| tree.id());
5776
5777 let buffer = project
5778 .update(cx, |project, cx| {
5779 project.open_buffer((worktree_id, "a.rs"), cx)
5780 })
5781 .await
5782 .unwrap();
5783
5784 project.update(cx, |project, cx| {
5785 project
5786 .update_buffer_diagnostics(
5787 &buffer,
5788 vec![
5789 DiagnosticEntry {
5790 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5791 diagnostic: Diagnostic {
5792 severity: DiagnosticSeverity::ERROR,
5793 message: "syntax error 1".to_string(),
5794 ..Default::default()
5795 },
5796 },
5797 DiagnosticEntry {
5798 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5799 diagnostic: Diagnostic {
5800 severity: DiagnosticSeverity::ERROR,
5801 message: "syntax error 2".to_string(),
5802 ..Default::default()
5803 },
5804 },
5805 ],
5806 None,
5807 cx,
5808 )
5809 .unwrap();
5810 });
5811
5812 // An empty range is extended forward to include the following character.
5813 // At the end of a line, an empty range is extended backward to include
5814 // the preceding character.
5815 buffer.read_with(cx, |buffer, _| {
5816 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5817 assert_eq!(
5818 chunks
5819 .iter()
5820 .map(|(s, d)| (s.as_str(), *d))
5821 .collect::<Vec<_>>(),
5822 &[
5823 ("let one = ", None),
5824 (";", Some(DiagnosticSeverity::ERROR)),
5825 ("\nlet two =", None),
5826 (" ", Some(DiagnosticSeverity::ERROR)),
5827 ("\nlet three = 3;\n", None)
5828 ]
5829 );
5830 });
5831 }
5832
5833 #[gpui::test]
5834 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5835 cx.foreground().forbid_parking();
5836
5837 let mut language = Language::new(
5838 LanguageConfig {
5839 name: "Rust".into(),
5840 path_suffixes: vec!["rs".to_string()],
5841 ..Default::default()
5842 },
5843 Some(tree_sitter_rust::language()),
5844 );
5845 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5846
5847 let text = "
5848 fn a() {
5849 f1();
5850 }
5851 fn b() {
5852 f2();
5853 }
5854 fn c() {
5855 f3();
5856 }
5857 "
5858 .unindent();
5859
5860 let fs = FakeFs::new(cx.background());
5861 fs.insert_tree(
5862 "/dir",
5863 json!({
5864 "a.rs": text.clone(),
5865 }),
5866 )
5867 .await;
5868
5869 let project = Project::test(fs, cx);
5870 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5871
5872 let worktree_id = project
5873 .update(cx, |project, cx| {
5874 project.find_or_create_local_worktree("/dir", true, cx)
5875 })
5876 .await
5877 .unwrap()
5878 .0
5879 .read_with(cx, |tree, _| tree.id());
5880
5881 let buffer = project
5882 .update(cx, |project, cx| {
5883 project.open_buffer((worktree_id, "a.rs"), cx)
5884 })
5885 .await
5886 .unwrap();
5887
5888 let mut fake_server = fake_servers.next().await.unwrap();
5889 let lsp_document_version = fake_server
5890 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5891 .await
5892 .text_document
5893 .version;
5894
5895 // Simulate editing the buffer after the language server computes some edits.
5896 buffer.update(cx, |buffer, cx| {
5897 buffer.edit(
5898 [Point::new(0, 0)..Point::new(0, 0)],
5899 "// above first function\n",
5900 cx,
5901 );
5902 buffer.edit(
5903 [Point::new(2, 0)..Point::new(2, 0)],
5904 " // inside first function\n",
5905 cx,
5906 );
5907 buffer.edit(
5908 [Point::new(6, 4)..Point::new(6, 4)],
5909 "// inside second function ",
5910 cx,
5911 );
5912
5913 assert_eq!(
5914 buffer.text(),
5915 "
5916 // above first function
5917 fn a() {
5918 // inside first function
5919 f1();
5920 }
5921 fn b() {
5922 // inside second function f2();
5923 }
5924 fn c() {
5925 f3();
5926 }
5927 "
5928 .unindent()
5929 );
5930 });
5931
5932 let edits = project
5933 .update(cx, |project, cx| {
5934 project.edits_from_lsp(
5935 &buffer,
5936 vec![
5937 // replace body of first function
5938 lsp::TextEdit {
5939 range: lsp::Range::new(
5940 lsp::Position::new(0, 0),
5941 lsp::Position::new(3, 0),
5942 ),
5943 new_text: "
5944 fn a() {
5945 f10();
5946 }
5947 "
5948 .unindent(),
5949 },
5950 // edit inside second function
5951 lsp::TextEdit {
5952 range: lsp::Range::new(
5953 lsp::Position::new(4, 6),
5954 lsp::Position::new(4, 6),
5955 ),
5956 new_text: "00".into(),
5957 },
5958 // edit inside third function via two distinct edits
5959 lsp::TextEdit {
5960 range: lsp::Range::new(
5961 lsp::Position::new(7, 5),
5962 lsp::Position::new(7, 5),
5963 ),
5964 new_text: "4000".into(),
5965 },
5966 lsp::TextEdit {
5967 range: lsp::Range::new(
5968 lsp::Position::new(7, 5),
5969 lsp::Position::new(7, 6),
5970 ),
5971 new_text: "".into(),
5972 },
5973 ],
5974 Some(lsp_document_version),
5975 cx,
5976 )
5977 })
5978 .await
5979 .unwrap();
5980
5981 buffer.update(cx, |buffer, cx| {
5982 for (range, new_text) in edits {
5983 buffer.edit([range], new_text, cx);
5984 }
5985 assert_eq!(
5986 buffer.text(),
5987 "
5988 // above first function
5989 fn a() {
5990 // inside first function
5991 f10();
5992 }
5993 fn b() {
5994 // inside second function f200();
5995 }
5996 fn c() {
5997 f4000();
5998 }
5999 "
6000 .unindent()
6001 );
6002 });
6003 }
6004
6005 #[gpui::test]
6006 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6007 cx.foreground().forbid_parking();
6008
6009 let text = "
6010 use a::b;
6011 use a::c;
6012
6013 fn f() {
6014 b();
6015 c();
6016 }
6017 "
6018 .unindent();
6019
6020 let fs = FakeFs::new(cx.background());
6021 fs.insert_tree(
6022 "/dir",
6023 json!({
6024 "a.rs": text.clone(),
6025 }),
6026 )
6027 .await;
6028
6029 let project = Project::test(fs, cx);
6030 let worktree_id = project
6031 .update(cx, |project, cx| {
6032 project.find_or_create_local_worktree("/dir", true, cx)
6033 })
6034 .await
6035 .unwrap()
6036 .0
6037 .read_with(cx, |tree, _| tree.id());
6038
6039 let buffer = project
6040 .update(cx, |project, cx| {
6041 project.open_buffer((worktree_id, "a.rs"), cx)
6042 })
6043 .await
6044 .unwrap();
6045
6046 // Simulate the language server sending us a small edit in the form of a very large diff.
6047 // Rust-analyzer does this when performing a merge-imports code action.
6048 let edits = project
6049 .update(cx, |project, cx| {
6050 project.edits_from_lsp(
6051 &buffer,
6052 [
6053 // Replace the first use statement without editing the semicolon.
6054 lsp::TextEdit {
6055 range: lsp::Range::new(
6056 lsp::Position::new(0, 4),
6057 lsp::Position::new(0, 8),
6058 ),
6059 new_text: "a::{b, c}".into(),
6060 },
6061 // Reinsert the remainder of the file between the semicolon and the final
6062 // newline of the file.
6063 lsp::TextEdit {
6064 range: lsp::Range::new(
6065 lsp::Position::new(0, 9),
6066 lsp::Position::new(0, 9),
6067 ),
6068 new_text: "\n\n".into(),
6069 },
6070 lsp::TextEdit {
6071 range: lsp::Range::new(
6072 lsp::Position::new(0, 9),
6073 lsp::Position::new(0, 9),
6074 ),
6075 new_text: "
6076 fn f() {
6077 b();
6078 c();
6079 }"
6080 .unindent(),
6081 },
6082 // Delete everything after the first newline of the file.
6083 lsp::TextEdit {
6084 range: lsp::Range::new(
6085 lsp::Position::new(1, 0),
6086 lsp::Position::new(7, 0),
6087 ),
6088 new_text: "".into(),
6089 },
6090 ],
6091 None,
6092 cx,
6093 )
6094 })
6095 .await
6096 .unwrap();
6097
6098 buffer.update(cx, |buffer, cx| {
6099 let edits = edits
6100 .into_iter()
6101 .map(|(range, text)| {
6102 (
6103 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6104 text,
6105 )
6106 })
6107 .collect::<Vec<_>>();
6108
6109 assert_eq!(
6110 edits,
6111 [
6112 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6113 (Point::new(1, 0)..Point::new(2, 0), "".into())
6114 ]
6115 );
6116
6117 for (range, new_text) in edits {
6118 buffer.edit([range], new_text, cx);
6119 }
6120 assert_eq!(
6121 buffer.text(),
6122 "
6123 use a::{b, c};
6124
6125 fn f() {
6126 b();
6127 c();
6128 }
6129 "
6130 .unindent()
6131 );
6132 });
6133 }
6134
6135 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6136 buffer: &Buffer,
6137 range: Range<T>,
6138 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6139 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6140 for chunk in buffer.snapshot().chunks(range, true) {
6141 if chunks.last().map_or(false, |prev_chunk| {
6142 prev_chunk.1 == chunk.diagnostic_severity
6143 }) {
6144 chunks.last_mut().unwrap().0.push_str(chunk.text);
6145 } else {
6146 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6147 }
6148 }
6149 chunks
6150 }
6151
6152 #[gpui::test]
6153 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6154 let dir = temp_tree(json!({
6155 "root": {
6156 "dir1": {},
6157 "dir2": {
6158 "dir3": {}
6159 }
6160 }
6161 }));
6162
6163 let project = Project::test(Arc::new(RealFs), cx);
6164 let (tree, _) = project
6165 .update(cx, |project, cx| {
6166 project.find_or_create_local_worktree(&dir.path(), true, cx)
6167 })
6168 .await
6169 .unwrap();
6170
6171 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6172 .await;
6173
6174 let cancel_flag = Default::default();
6175 let results = project
6176 .read_with(cx, |project, cx| {
6177 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6178 })
6179 .await;
6180
6181 assert!(results.is_empty());
6182 }
6183
6184 #[gpui::test]
6185 async fn test_definition(cx: &mut gpui::TestAppContext) {
6186 let mut language = Language::new(
6187 LanguageConfig {
6188 name: "Rust".into(),
6189 path_suffixes: vec!["rs".to_string()],
6190 ..Default::default()
6191 },
6192 Some(tree_sitter_rust::language()),
6193 );
6194 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6195
6196 let fs = FakeFs::new(cx.background());
6197 fs.insert_tree(
6198 "/dir",
6199 json!({
6200 "a.rs": "const fn a() { A }",
6201 "b.rs": "const y: i32 = crate::a()",
6202 }),
6203 )
6204 .await;
6205
6206 let project = Project::test(fs, cx);
6207 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6208
6209 let (tree, _) = project
6210 .update(cx, |project, cx| {
6211 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6212 })
6213 .await
6214 .unwrap();
6215 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6216 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6217 .await;
6218
6219 let buffer = project
6220 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6221 .await
6222 .unwrap();
6223
6224 let fake_server = fake_servers.next().await.unwrap();
6225 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6226 let params = params.text_document_position_params;
6227 assert_eq!(
6228 params.text_document.uri.to_file_path().unwrap(),
6229 Path::new("/dir/b.rs"),
6230 );
6231 assert_eq!(params.position, lsp::Position::new(0, 22));
6232
6233 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6234 lsp::Location::new(
6235 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6236 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6237 ),
6238 )))
6239 });
6240
6241 let mut definitions = project
6242 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6243 .await
6244 .unwrap();
6245
6246 assert_eq!(definitions.len(), 1);
6247 let definition = definitions.pop().unwrap();
6248 cx.update(|cx| {
6249 let target_buffer = definition.buffer.read(cx);
6250 assert_eq!(
6251 target_buffer
6252 .file()
6253 .unwrap()
6254 .as_local()
6255 .unwrap()
6256 .abs_path(cx),
6257 Path::new("/dir/a.rs"),
6258 );
6259 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6260 assert_eq!(
6261 list_worktrees(&project, cx),
6262 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6263 );
6264
6265 drop(definition);
6266 });
6267 cx.read(|cx| {
6268 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6269 });
6270
6271 fn list_worktrees<'a>(
6272 project: &'a ModelHandle<Project>,
6273 cx: &'a AppContext,
6274 ) -> Vec<(&'a Path, bool)> {
6275 project
6276 .read(cx)
6277 .worktrees(cx)
6278 .map(|worktree| {
6279 let worktree = worktree.read(cx);
6280 (
6281 worktree.as_local().unwrap().abs_path().as_ref(),
6282 worktree.is_visible(),
6283 )
6284 })
6285 .collect::<Vec<_>>()
6286 }
6287 }
6288
6289 #[gpui::test(iterations = 10)]
6290 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6291 let mut language = Language::new(
6292 LanguageConfig {
6293 name: "TypeScript".into(),
6294 path_suffixes: vec!["ts".to_string()],
6295 ..Default::default()
6296 },
6297 None,
6298 );
6299 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6300
6301 let fs = FakeFs::new(cx.background());
6302 fs.insert_tree(
6303 "/dir",
6304 json!({
6305 "a.ts": "a",
6306 }),
6307 )
6308 .await;
6309
6310 let project = Project::test(fs, cx);
6311 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6312
6313 let (tree, _) = project
6314 .update(cx, |project, cx| {
6315 project.find_or_create_local_worktree("/dir", true, cx)
6316 })
6317 .await
6318 .unwrap();
6319 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6320 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6321 .await;
6322
6323 let buffer = project
6324 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6325 .await
6326 .unwrap();
6327
6328 let fake_server = fake_language_servers.next().await.unwrap();
6329
6330 // Language server returns code actions that contain commands, and not edits.
6331 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6332 fake_server
6333 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6334 Ok(Some(vec![
6335 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6336 title: "The code action".into(),
6337 command: Some(lsp::Command {
6338 title: "The command".into(),
6339 command: "_the/command".into(),
6340 arguments: Some(vec![json!("the-argument")]),
6341 }),
6342 ..Default::default()
6343 }),
6344 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6345 title: "two".into(),
6346 ..Default::default()
6347 }),
6348 ]))
6349 })
6350 .next()
6351 .await;
6352
6353 let action = actions.await.unwrap()[0].clone();
6354 let apply = project.update(cx, |project, cx| {
6355 project.apply_code_action(buffer.clone(), action, true, cx)
6356 });
6357
6358 // Resolving the code action does not populate its edits. In absence of
6359 // edits, we must execute the given command.
6360 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6361 |action, _| async move { Ok(action) },
6362 );
6363
6364 // While executing the command, the language server sends the editor
6365 // a `workspaceEdit` request.
6366 fake_server
6367 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6368 let fake = fake_server.clone();
6369 move |params, _| {
6370 assert_eq!(params.command, "_the/command");
6371 let fake = fake.clone();
6372 async move {
6373 fake.server
6374 .request::<lsp::request::ApplyWorkspaceEdit>(
6375 lsp::ApplyWorkspaceEditParams {
6376 label: None,
6377 edit: lsp::WorkspaceEdit {
6378 changes: Some(
6379 [(
6380 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6381 vec![lsp::TextEdit {
6382 range: lsp::Range::new(
6383 lsp::Position::new(0, 0),
6384 lsp::Position::new(0, 0),
6385 ),
6386 new_text: "X".into(),
6387 }],
6388 )]
6389 .into_iter()
6390 .collect(),
6391 ),
6392 ..Default::default()
6393 },
6394 },
6395 )
6396 .await
6397 .unwrap();
6398 Ok(Some(json!(null)))
6399 }
6400 }
6401 })
6402 .next()
6403 .await;
6404
6405 // Applying the code action returns a project transaction containing the edits
6406 // sent by the language server in its `workspaceEdit` request.
6407 let transaction = apply.await.unwrap();
6408 assert!(transaction.0.contains_key(&buffer));
6409 buffer.update(cx, |buffer, cx| {
6410 assert_eq!(buffer.text(), "Xa");
6411 buffer.undo(cx);
6412 assert_eq!(buffer.text(), "a");
6413 });
6414 }
6415
6416 #[gpui::test]
6417 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6418 let fs = FakeFs::new(cx.background());
6419 fs.insert_tree(
6420 "/dir",
6421 json!({
6422 "file1": "the old contents",
6423 }),
6424 )
6425 .await;
6426
6427 let project = Project::test(fs.clone(), cx);
6428 let worktree_id = project
6429 .update(cx, |p, cx| {
6430 p.find_or_create_local_worktree("/dir", true, cx)
6431 })
6432 .await
6433 .unwrap()
6434 .0
6435 .read_with(cx, |tree, _| tree.id());
6436
6437 let buffer = project
6438 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6439 .await
6440 .unwrap();
6441 buffer
6442 .update(cx, |buffer, cx| {
6443 assert_eq!(buffer.text(), "the old contents");
6444 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6445 buffer.save(cx)
6446 })
6447 .await
6448 .unwrap();
6449
6450 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6451 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6452 }
6453
6454 #[gpui::test]
6455 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6456 let fs = FakeFs::new(cx.background());
6457 fs.insert_tree(
6458 "/dir",
6459 json!({
6460 "file1": "the old contents",
6461 }),
6462 )
6463 .await;
6464
6465 let project = Project::test(fs.clone(), cx);
6466 let worktree_id = project
6467 .update(cx, |p, cx| {
6468 p.find_or_create_local_worktree("/dir/file1", true, cx)
6469 })
6470 .await
6471 .unwrap()
6472 .0
6473 .read_with(cx, |tree, _| tree.id());
6474
6475 let buffer = project
6476 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6477 .await
6478 .unwrap();
6479 buffer
6480 .update(cx, |buffer, cx| {
6481 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6482 buffer.save(cx)
6483 })
6484 .await
6485 .unwrap();
6486
6487 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6488 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6489 }
6490
6491 #[gpui::test]
6492 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6493 let fs = FakeFs::new(cx.background());
6494 fs.insert_tree("/dir", json!({})).await;
6495
6496 let project = Project::test(fs.clone(), cx);
6497 let (worktree, _) = project
6498 .update(cx, |project, cx| {
6499 project.find_or_create_local_worktree("/dir", true, cx)
6500 })
6501 .await
6502 .unwrap();
6503 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6504
6505 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6506 buffer.update(cx, |buffer, cx| {
6507 buffer.edit([0..0], "abc", cx);
6508 assert!(buffer.is_dirty());
6509 assert!(!buffer.has_conflict());
6510 });
6511 project
6512 .update(cx, |project, cx| {
6513 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6514 })
6515 .await
6516 .unwrap();
6517 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6518 buffer.read_with(cx, |buffer, cx| {
6519 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6520 assert!(!buffer.is_dirty());
6521 assert!(!buffer.has_conflict());
6522 });
6523
6524 let opened_buffer = project
6525 .update(cx, |project, cx| {
6526 project.open_buffer((worktree_id, "file1"), cx)
6527 })
6528 .await
6529 .unwrap();
6530 assert_eq!(opened_buffer, buffer);
6531 }
6532
6533 #[gpui::test(retries = 5)]
6534 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6535 let dir = temp_tree(json!({
6536 "a": {
6537 "file1": "",
6538 "file2": "",
6539 "file3": "",
6540 },
6541 "b": {
6542 "c": {
6543 "file4": "",
6544 "file5": "",
6545 }
6546 }
6547 }));
6548
6549 let project = Project::test(Arc::new(RealFs), cx);
6550 let rpc = project.read_with(cx, |p, _| p.client.clone());
6551
6552 let (tree, _) = project
6553 .update(cx, |p, cx| {
6554 p.find_or_create_local_worktree(dir.path(), true, cx)
6555 })
6556 .await
6557 .unwrap();
6558 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6559
6560 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6561 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6562 async move { buffer.await.unwrap() }
6563 };
6564 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6565 tree.read_with(cx, |tree, _| {
6566 tree.entry_for_path(path)
6567 .expect(&format!("no entry for path {}", path))
6568 .id
6569 })
6570 };
6571
6572 let buffer2 = buffer_for_path("a/file2", cx).await;
6573 let buffer3 = buffer_for_path("a/file3", cx).await;
6574 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6575 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6576
6577 let file2_id = id_for_path("a/file2", &cx);
6578 let file3_id = id_for_path("a/file3", &cx);
6579 let file4_id = id_for_path("b/c/file4", &cx);
6580
6581 // Wait for the initial scan.
6582 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6583 .await;
6584
6585 // Create a remote copy of this worktree.
6586 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6587 let (remote, load_task) = cx.update(|cx| {
6588 Worktree::remote(
6589 1,
6590 1,
6591 initial_snapshot.to_proto(&Default::default(), true),
6592 rpc.clone(),
6593 cx,
6594 )
6595 });
6596 load_task.await;
6597
6598 cx.read(|cx| {
6599 assert!(!buffer2.read(cx).is_dirty());
6600 assert!(!buffer3.read(cx).is_dirty());
6601 assert!(!buffer4.read(cx).is_dirty());
6602 assert!(!buffer5.read(cx).is_dirty());
6603 });
6604
6605 // Rename and delete files and directories.
6606 tree.flush_fs_events(&cx).await;
6607 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6608 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6609 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6610 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6611 tree.flush_fs_events(&cx).await;
6612
6613 let expected_paths = vec![
6614 "a",
6615 "a/file1",
6616 "a/file2.new",
6617 "b",
6618 "d",
6619 "d/file3",
6620 "d/file4",
6621 ];
6622
6623 cx.read(|app| {
6624 assert_eq!(
6625 tree.read(app)
6626 .paths()
6627 .map(|p| p.to_str().unwrap())
6628 .collect::<Vec<_>>(),
6629 expected_paths
6630 );
6631
6632 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6633 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6634 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6635
6636 assert_eq!(
6637 buffer2.read(app).file().unwrap().path().as_ref(),
6638 Path::new("a/file2.new")
6639 );
6640 assert_eq!(
6641 buffer3.read(app).file().unwrap().path().as_ref(),
6642 Path::new("d/file3")
6643 );
6644 assert_eq!(
6645 buffer4.read(app).file().unwrap().path().as_ref(),
6646 Path::new("d/file4")
6647 );
6648 assert_eq!(
6649 buffer5.read(app).file().unwrap().path().as_ref(),
6650 Path::new("b/c/file5")
6651 );
6652
6653 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6654 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6655 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6656 assert!(buffer5.read(app).file().unwrap().is_deleted());
6657 });
6658
6659 // Update the remote worktree. Check that it becomes consistent with the
6660 // local worktree.
6661 remote.update(cx, |remote, cx| {
6662 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6663 &initial_snapshot,
6664 1,
6665 1,
6666 true,
6667 );
6668 remote
6669 .as_remote_mut()
6670 .unwrap()
6671 .snapshot
6672 .apply_remote_update(update_message)
6673 .unwrap();
6674
6675 assert_eq!(
6676 remote
6677 .paths()
6678 .map(|p| p.to_str().unwrap())
6679 .collect::<Vec<_>>(),
6680 expected_paths
6681 );
6682 });
6683 }
6684
6685 #[gpui::test]
6686 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6687 let fs = FakeFs::new(cx.background());
6688 fs.insert_tree(
6689 "/the-dir",
6690 json!({
6691 "a.txt": "a-contents",
6692 "b.txt": "b-contents",
6693 }),
6694 )
6695 .await;
6696
6697 let project = Project::test(fs.clone(), cx);
6698 let worktree_id = project
6699 .update(cx, |p, cx| {
6700 p.find_or_create_local_worktree("/the-dir", true, cx)
6701 })
6702 .await
6703 .unwrap()
6704 .0
6705 .read_with(cx, |tree, _| tree.id());
6706
6707 // Spawn multiple tasks to open paths, repeating some paths.
6708 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6709 (
6710 p.open_buffer((worktree_id, "a.txt"), cx),
6711 p.open_buffer((worktree_id, "b.txt"), cx),
6712 p.open_buffer((worktree_id, "a.txt"), cx),
6713 )
6714 });
6715
6716 let buffer_a_1 = buffer_a_1.await.unwrap();
6717 let buffer_a_2 = buffer_a_2.await.unwrap();
6718 let buffer_b = buffer_b.await.unwrap();
6719 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6720 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6721
6722 // There is only one buffer per path.
6723 let buffer_a_id = buffer_a_1.id();
6724 assert_eq!(buffer_a_2.id(), buffer_a_id);
6725
6726 // Open the same path again while it is still open.
6727 drop(buffer_a_1);
6728 let buffer_a_3 = project
6729 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6730 .await
6731 .unwrap();
6732
6733 // There's still only one buffer per path.
6734 assert_eq!(buffer_a_3.id(), buffer_a_id);
6735 }
6736
6737 #[gpui::test]
6738 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6739 use std::fs;
6740
6741 let dir = temp_tree(json!({
6742 "file1": "abc",
6743 "file2": "def",
6744 "file3": "ghi",
6745 }));
6746
6747 let project = Project::test(Arc::new(RealFs), cx);
6748 let (worktree, _) = project
6749 .update(cx, |p, cx| {
6750 p.find_or_create_local_worktree(dir.path(), true, cx)
6751 })
6752 .await
6753 .unwrap();
6754 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6755
6756 worktree.flush_fs_events(&cx).await;
6757 worktree
6758 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6759 .await;
6760
6761 let buffer1 = project
6762 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6763 .await
6764 .unwrap();
6765 let events = Rc::new(RefCell::new(Vec::new()));
6766
6767 // initially, the buffer isn't dirty.
6768 buffer1.update(cx, |buffer, cx| {
6769 cx.subscribe(&buffer1, {
6770 let events = events.clone();
6771 move |_, _, event, _| match event {
6772 BufferEvent::Operation(_) => {}
6773 _ => events.borrow_mut().push(event.clone()),
6774 }
6775 })
6776 .detach();
6777
6778 assert!(!buffer.is_dirty());
6779 assert!(events.borrow().is_empty());
6780
6781 buffer.edit(vec![1..2], "", cx);
6782 });
6783
6784 // after the first edit, the buffer is dirty, and emits a dirtied event.
6785 buffer1.update(cx, |buffer, cx| {
6786 assert!(buffer.text() == "ac");
6787 assert!(buffer.is_dirty());
6788 assert_eq!(
6789 *events.borrow(),
6790 &[language::Event::Edited, language::Event::Dirtied]
6791 );
6792 events.borrow_mut().clear();
6793 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6794 });
6795
6796 // after saving, the buffer is not dirty, and emits a saved event.
6797 buffer1.update(cx, |buffer, cx| {
6798 assert!(!buffer.is_dirty());
6799 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6800 events.borrow_mut().clear();
6801
6802 buffer.edit(vec![1..1], "B", cx);
6803 buffer.edit(vec![2..2], "D", cx);
6804 });
6805
6806 // after editing again, the buffer is dirty, and emits another dirty event.
6807 buffer1.update(cx, |buffer, cx| {
6808 assert!(buffer.text() == "aBDc");
6809 assert!(buffer.is_dirty());
6810 assert_eq!(
6811 *events.borrow(),
6812 &[
6813 language::Event::Edited,
6814 language::Event::Dirtied,
6815 language::Event::Edited,
6816 ],
6817 );
6818 events.borrow_mut().clear();
6819
6820 // TODO - currently, after restoring the buffer to its
6821 // previously-saved state, the is still considered dirty.
6822 buffer.edit([1..3], "", cx);
6823 assert!(buffer.text() == "ac");
6824 assert!(buffer.is_dirty());
6825 });
6826
6827 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6828
6829 // When a file is deleted, the buffer is considered dirty.
6830 let events = Rc::new(RefCell::new(Vec::new()));
6831 let buffer2 = project
6832 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6833 .await
6834 .unwrap();
6835 buffer2.update(cx, |_, cx| {
6836 cx.subscribe(&buffer2, {
6837 let events = events.clone();
6838 move |_, _, event, _| events.borrow_mut().push(event.clone())
6839 })
6840 .detach();
6841 });
6842
6843 fs::remove_file(dir.path().join("file2")).unwrap();
6844 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6845 assert_eq!(
6846 *events.borrow(),
6847 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6848 );
6849
6850 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6851 let events = Rc::new(RefCell::new(Vec::new()));
6852 let buffer3 = project
6853 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6854 .await
6855 .unwrap();
6856 buffer3.update(cx, |_, cx| {
6857 cx.subscribe(&buffer3, {
6858 let events = events.clone();
6859 move |_, _, event, _| events.borrow_mut().push(event.clone())
6860 })
6861 .detach();
6862 });
6863
6864 worktree.flush_fs_events(&cx).await;
6865 buffer3.update(cx, |buffer, cx| {
6866 buffer.edit(Some(0..0), "x", cx);
6867 });
6868 events.borrow_mut().clear();
6869 fs::remove_file(dir.path().join("file3")).unwrap();
6870 buffer3
6871 .condition(&cx, |_, _| !events.borrow().is_empty())
6872 .await;
6873 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6874 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6875 }
6876
6877 #[gpui::test]
6878 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6879 use std::fs;
6880
6881 let initial_contents = "aaa\nbbbbb\nc\n";
6882 let dir = temp_tree(json!({ "the-file": initial_contents }));
6883
6884 let project = Project::test(Arc::new(RealFs), cx);
6885 let (worktree, _) = project
6886 .update(cx, |p, cx| {
6887 p.find_or_create_local_worktree(dir.path(), true, cx)
6888 })
6889 .await
6890 .unwrap();
6891 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6892
6893 worktree
6894 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6895 .await;
6896
6897 let abs_path = dir.path().join("the-file");
6898 let buffer = project
6899 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6900 .await
6901 .unwrap();
6902
6903 // TODO
6904 // Add a cursor on each row.
6905 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6906 // assert!(!buffer.is_dirty());
6907 // buffer.add_selection_set(
6908 // &(0..3)
6909 // .map(|row| Selection {
6910 // id: row as usize,
6911 // start: Point::new(row, 1),
6912 // end: Point::new(row, 1),
6913 // reversed: false,
6914 // goal: SelectionGoal::None,
6915 // })
6916 // .collect::<Vec<_>>(),
6917 // cx,
6918 // )
6919 // });
6920
6921 // Change the file on disk, adding two new lines of text, and removing
6922 // one line.
6923 buffer.read_with(cx, |buffer, _| {
6924 assert!(!buffer.is_dirty());
6925 assert!(!buffer.has_conflict());
6926 });
6927 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6928 fs::write(&abs_path, new_contents).unwrap();
6929
6930 // Because the buffer was not modified, it is reloaded from disk. Its
6931 // contents are edited according to the diff between the old and new
6932 // file contents.
6933 buffer
6934 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6935 .await;
6936
6937 buffer.update(cx, |buffer, _| {
6938 assert_eq!(buffer.text(), new_contents);
6939 assert!(!buffer.is_dirty());
6940 assert!(!buffer.has_conflict());
6941
6942 // TODO
6943 // let cursor_positions = buffer
6944 // .selection_set(selection_set_id)
6945 // .unwrap()
6946 // .selections::<Point>(&*buffer)
6947 // .map(|selection| {
6948 // assert_eq!(selection.start, selection.end);
6949 // selection.start
6950 // })
6951 // .collect::<Vec<_>>();
6952 // assert_eq!(
6953 // cursor_positions,
6954 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6955 // );
6956 });
6957
6958 // Modify the buffer
6959 buffer.update(cx, |buffer, cx| {
6960 buffer.edit(vec![0..0], " ", cx);
6961 assert!(buffer.is_dirty());
6962 assert!(!buffer.has_conflict());
6963 });
6964
6965 // Change the file on disk again, adding blank lines to the beginning.
6966 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6967
6968 // Because the buffer is modified, it doesn't reload from disk, but is
6969 // marked as having a conflict.
6970 buffer
6971 .condition(&cx, |buffer, _| buffer.has_conflict())
6972 .await;
6973 }
6974
6975 #[gpui::test]
6976 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6977 cx.foreground().forbid_parking();
6978
6979 let fs = FakeFs::new(cx.background());
6980 fs.insert_tree(
6981 "/the-dir",
6982 json!({
6983 "a.rs": "
6984 fn foo(mut v: Vec<usize>) {
6985 for x in &v {
6986 v.push(1);
6987 }
6988 }
6989 "
6990 .unindent(),
6991 }),
6992 )
6993 .await;
6994
6995 let project = Project::test(fs.clone(), cx);
6996 let (worktree, _) = project
6997 .update(cx, |p, cx| {
6998 p.find_or_create_local_worktree("/the-dir", true, cx)
6999 })
7000 .await
7001 .unwrap();
7002 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7003
7004 let buffer = project
7005 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7006 .await
7007 .unwrap();
7008
7009 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7010 let message = lsp::PublishDiagnosticsParams {
7011 uri: buffer_uri.clone(),
7012 diagnostics: vec![
7013 lsp::Diagnostic {
7014 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7015 severity: Some(DiagnosticSeverity::WARNING),
7016 message: "error 1".to_string(),
7017 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7018 location: lsp::Location {
7019 uri: buffer_uri.clone(),
7020 range: lsp::Range::new(
7021 lsp::Position::new(1, 8),
7022 lsp::Position::new(1, 9),
7023 ),
7024 },
7025 message: "error 1 hint 1".to_string(),
7026 }]),
7027 ..Default::default()
7028 },
7029 lsp::Diagnostic {
7030 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7031 severity: Some(DiagnosticSeverity::HINT),
7032 message: "error 1 hint 1".to_string(),
7033 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7034 location: lsp::Location {
7035 uri: buffer_uri.clone(),
7036 range: lsp::Range::new(
7037 lsp::Position::new(1, 8),
7038 lsp::Position::new(1, 9),
7039 ),
7040 },
7041 message: "original diagnostic".to_string(),
7042 }]),
7043 ..Default::default()
7044 },
7045 lsp::Diagnostic {
7046 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7047 severity: Some(DiagnosticSeverity::ERROR),
7048 message: "error 2".to_string(),
7049 related_information: Some(vec![
7050 lsp::DiagnosticRelatedInformation {
7051 location: lsp::Location {
7052 uri: buffer_uri.clone(),
7053 range: lsp::Range::new(
7054 lsp::Position::new(1, 13),
7055 lsp::Position::new(1, 15),
7056 ),
7057 },
7058 message: "error 2 hint 1".to_string(),
7059 },
7060 lsp::DiagnosticRelatedInformation {
7061 location: lsp::Location {
7062 uri: buffer_uri.clone(),
7063 range: lsp::Range::new(
7064 lsp::Position::new(1, 13),
7065 lsp::Position::new(1, 15),
7066 ),
7067 },
7068 message: "error 2 hint 2".to_string(),
7069 },
7070 ]),
7071 ..Default::default()
7072 },
7073 lsp::Diagnostic {
7074 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7075 severity: Some(DiagnosticSeverity::HINT),
7076 message: "error 2 hint 1".to_string(),
7077 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7078 location: lsp::Location {
7079 uri: buffer_uri.clone(),
7080 range: lsp::Range::new(
7081 lsp::Position::new(2, 8),
7082 lsp::Position::new(2, 17),
7083 ),
7084 },
7085 message: "original diagnostic".to_string(),
7086 }]),
7087 ..Default::default()
7088 },
7089 lsp::Diagnostic {
7090 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7091 severity: Some(DiagnosticSeverity::HINT),
7092 message: "error 2 hint 2".to_string(),
7093 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7094 location: lsp::Location {
7095 uri: buffer_uri.clone(),
7096 range: lsp::Range::new(
7097 lsp::Position::new(2, 8),
7098 lsp::Position::new(2, 17),
7099 ),
7100 },
7101 message: "original diagnostic".to_string(),
7102 }]),
7103 ..Default::default()
7104 },
7105 ],
7106 version: None,
7107 };
7108
7109 project
7110 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7111 .unwrap();
7112 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7113
7114 assert_eq!(
7115 buffer
7116 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7117 .collect::<Vec<_>>(),
7118 &[
7119 DiagnosticEntry {
7120 range: Point::new(1, 8)..Point::new(1, 9),
7121 diagnostic: Diagnostic {
7122 severity: DiagnosticSeverity::WARNING,
7123 message: "error 1".to_string(),
7124 group_id: 0,
7125 is_primary: true,
7126 ..Default::default()
7127 }
7128 },
7129 DiagnosticEntry {
7130 range: Point::new(1, 8)..Point::new(1, 9),
7131 diagnostic: Diagnostic {
7132 severity: DiagnosticSeverity::HINT,
7133 message: "error 1 hint 1".to_string(),
7134 group_id: 0,
7135 is_primary: false,
7136 ..Default::default()
7137 }
7138 },
7139 DiagnosticEntry {
7140 range: Point::new(1, 13)..Point::new(1, 15),
7141 diagnostic: Diagnostic {
7142 severity: DiagnosticSeverity::HINT,
7143 message: "error 2 hint 1".to_string(),
7144 group_id: 1,
7145 is_primary: false,
7146 ..Default::default()
7147 }
7148 },
7149 DiagnosticEntry {
7150 range: Point::new(1, 13)..Point::new(1, 15),
7151 diagnostic: Diagnostic {
7152 severity: DiagnosticSeverity::HINT,
7153 message: "error 2 hint 2".to_string(),
7154 group_id: 1,
7155 is_primary: false,
7156 ..Default::default()
7157 }
7158 },
7159 DiagnosticEntry {
7160 range: Point::new(2, 8)..Point::new(2, 17),
7161 diagnostic: Diagnostic {
7162 severity: DiagnosticSeverity::ERROR,
7163 message: "error 2".to_string(),
7164 group_id: 1,
7165 is_primary: true,
7166 ..Default::default()
7167 }
7168 }
7169 ]
7170 );
7171
7172 assert_eq!(
7173 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7174 &[
7175 DiagnosticEntry {
7176 range: Point::new(1, 8)..Point::new(1, 9),
7177 diagnostic: Diagnostic {
7178 severity: DiagnosticSeverity::WARNING,
7179 message: "error 1".to_string(),
7180 group_id: 0,
7181 is_primary: true,
7182 ..Default::default()
7183 }
7184 },
7185 DiagnosticEntry {
7186 range: Point::new(1, 8)..Point::new(1, 9),
7187 diagnostic: Diagnostic {
7188 severity: DiagnosticSeverity::HINT,
7189 message: "error 1 hint 1".to_string(),
7190 group_id: 0,
7191 is_primary: false,
7192 ..Default::default()
7193 }
7194 },
7195 ]
7196 );
7197 assert_eq!(
7198 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7199 &[
7200 DiagnosticEntry {
7201 range: Point::new(1, 13)..Point::new(1, 15),
7202 diagnostic: Diagnostic {
7203 severity: DiagnosticSeverity::HINT,
7204 message: "error 2 hint 1".to_string(),
7205 group_id: 1,
7206 is_primary: false,
7207 ..Default::default()
7208 }
7209 },
7210 DiagnosticEntry {
7211 range: Point::new(1, 13)..Point::new(1, 15),
7212 diagnostic: Diagnostic {
7213 severity: DiagnosticSeverity::HINT,
7214 message: "error 2 hint 2".to_string(),
7215 group_id: 1,
7216 is_primary: false,
7217 ..Default::default()
7218 }
7219 },
7220 DiagnosticEntry {
7221 range: Point::new(2, 8)..Point::new(2, 17),
7222 diagnostic: Diagnostic {
7223 severity: DiagnosticSeverity::ERROR,
7224 message: "error 2".to_string(),
7225 group_id: 1,
7226 is_primary: true,
7227 ..Default::default()
7228 }
7229 }
7230 ]
7231 );
7232 }
7233
7234 #[gpui::test]
7235 async fn test_rename(cx: &mut gpui::TestAppContext) {
7236 cx.foreground().forbid_parking();
7237
7238 let mut language = Language::new(
7239 LanguageConfig {
7240 name: "Rust".into(),
7241 path_suffixes: vec!["rs".to_string()],
7242 ..Default::default()
7243 },
7244 Some(tree_sitter_rust::language()),
7245 );
7246 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7247
7248 let fs = FakeFs::new(cx.background());
7249 fs.insert_tree(
7250 "/dir",
7251 json!({
7252 "one.rs": "const ONE: usize = 1;",
7253 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7254 }),
7255 )
7256 .await;
7257
7258 let project = Project::test(fs.clone(), cx);
7259 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7260
7261 let (tree, _) = project
7262 .update(cx, |project, cx| {
7263 project.find_or_create_local_worktree("/dir", true, cx)
7264 })
7265 .await
7266 .unwrap();
7267 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7268 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7269 .await;
7270
7271 let buffer = project
7272 .update(cx, |project, cx| {
7273 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7274 })
7275 .await
7276 .unwrap();
7277
7278 let fake_server = fake_servers.next().await.unwrap();
7279
7280 let response = project.update(cx, |project, cx| {
7281 project.prepare_rename(buffer.clone(), 7, cx)
7282 });
7283 fake_server
7284 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7285 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7286 assert_eq!(params.position, lsp::Position::new(0, 7));
7287 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7288 lsp::Position::new(0, 6),
7289 lsp::Position::new(0, 9),
7290 ))))
7291 })
7292 .next()
7293 .await
7294 .unwrap();
7295 let range = response.await.unwrap().unwrap();
7296 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7297 assert_eq!(range, 6..9);
7298
7299 let response = project.update(cx, |project, cx| {
7300 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7301 });
7302 fake_server
7303 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7304 assert_eq!(
7305 params.text_document_position.text_document.uri.as_str(),
7306 "file:///dir/one.rs"
7307 );
7308 assert_eq!(
7309 params.text_document_position.position,
7310 lsp::Position::new(0, 7)
7311 );
7312 assert_eq!(params.new_name, "THREE");
7313 Ok(Some(lsp::WorkspaceEdit {
7314 changes: Some(
7315 [
7316 (
7317 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7318 vec![lsp::TextEdit::new(
7319 lsp::Range::new(
7320 lsp::Position::new(0, 6),
7321 lsp::Position::new(0, 9),
7322 ),
7323 "THREE".to_string(),
7324 )],
7325 ),
7326 (
7327 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7328 vec![
7329 lsp::TextEdit::new(
7330 lsp::Range::new(
7331 lsp::Position::new(0, 24),
7332 lsp::Position::new(0, 27),
7333 ),
7334 "THREE".to_string(),
7335 ),
7336 lsp::TextEdit::new(
7337 lsp::Range::new(
7338 lsp::Position::new(0, 35),
7339 lsp::Position::new(0, 38),
7340 ),
7341 "THREE".to_string(),
7342 ),
7343 ],
7344 ),
7345 ]
7346 .into_iter()
7347 .collect(),
7348 ),
7349 ..Default::default()
7350 }))
7351 })
7352 .next()
7353 .await
7354 .unwrap();
7355 let mut transaction = response.await.unwrap().0;
7356 assert_eq!(transaction.len(), 2);
7357 assert_eq!(
7358 transaction
7359 .remove_entry(&buffer)
7360 .unwrap()
7361 .0
7362 .read_with(cx, |buffer, _| buffer.text()),
7363 "const THREE: usize = 1;"
7364 );
7365 assert_eq!(
7366 transaction
7367 .into_keys()
7368 .next()
7369 .unwrap()
7370 .read_with(cx, |buffer, _| buffer.text()),
7371 "const TWO: usize = one::THREE + one::THREE;"
7372 );
7373 }
7374
7375 #[gpui::test]
7376 async fn test_search(cx: &mut gpui::TestAppContext) {
7377 let fs = FakeFs::new(cx.background());
7378 fs.insert_tree(
7379 "/dir",
7380 json!({
7381 "one.rs": "const ONE: usize = 1;",
7382 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7383 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7384 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7385 }),
7386 )
7387 .await;
7388 let project = Project::test(fs.clone(), cx);
7389 let (tree, _) = project
7390 .update(cx, |project, cx| {
7391 project.find_or_create_local_worktree("/dir", true, cx)
7392 })
7393 .await
7394 .unwrap();
7395 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7396 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7397 .await;
7398
7399 assert_eq!(
7400 search(&project, SearchQuery::text("TWO", false, true), cx)
7401 .await
7402 .unwrap(),
7403 HashMap::from_iter([
7404 ("two.rs".to_string(), vec![6..9]),
7405 ("three.rs".to_string(), vec![37..40])
7406 ])
7407 );
7408
7409 let buffer_4 = project
7410 .update(cx, |project, cx| {
7411 project.open_buffer((worktree_id, "four.rs"), cx)
7412 })
7413 .await
7414 .unwrap();
7415 buffer_4.update(cx, |buffer, cx| {
7416 buffer.edit([20..28, 31..43], "two::TWO", cx);
7417 });
7418
7419 assert_eq!(
7420 search(&project, SearchQuery::text("TWO", false, true), cx)
7421 .await
7422 .unwrap(),
7423 HashMap::from_iter([
7424 ("two.rs".to_string(), vec![6..9]),
7425 ("three.rs".to_string(), vec![37..40]),
7426 ("four.rs".to_string(), vec![25..28, 36..39])
7427 ])
7428 );
7429
7430 async fn search(
7431 project: &ModelHandle<Project>,
7432 query: SearchQuery,
7433 cx: &mut gpui::TestAppContext,
7434 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7435 let results = project
7436 .update(cx, |project, cx| project.search(query, cx))
7437 .await?;
7438
7439 Ok(results
7440 .into_iter()
7441 .map(|(buffer, ranges)| {
7442 buffer.read_with(cx, |buffer, _| {
7443 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7444 let ranges = ranges
7445 .into_iter()
7446 .map(|range| range.to_offset(buffer))
7447 .collect::<Vec<_>>();
7448 (path, ranges)
7449 })
7450 })
7451 .collect())
7452 }
7453 }
7454}