1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use settings::Settings;
32use sha2::{Digest, Sha256};
33use similar::{ChangeTag, TextDiff};
34use std::{
35 cell::RefCell,
36 cmp::{self, Ordering},
37 convert::TryInto,
38 hash::Hash,
39 mem,
40 ops::Range,
41 path::{Component, Path, PathBuf},
42 rc::Rc,
43 sync::{
44 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
45 Arc,
46 },
47 time::Instant,
48};
49use util::{post_inc, ResultExt, TryFutureExt as _};
50
51pub use fs::*;
52pub use worktree::*;
53
54pub trait Item: Entity {
55 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
56}
57
58pub struct Project {
59 worktrees: Vec<WorktreeHandle>,
60 active_entry: Option<ProjectEntryId>,
61 languages: Arc<LanguageRegistry>,
62 language_servers:
63 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
64 started_language_servers:
65 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
66 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
67 language_server_settings: Arc<Mutex<serde_json::Value>>,
68 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
69 next_language_server_id: usize,
70 client: Arc<client::Client>,
71 next_entry_id: Arc<AtomicUsize>,
72 user_store: ModelHandle<UserStore>,
73 fs: Arc<dyn Fs>,
74 client_state: ProjectClientState,
75 collaborators: HashMap<PeerId, Collaborator>,
76 subscriptions: Vec<client::Subscription>,
77 language_servers_with_diagnostics_running: isize,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136pub struct LanguageServerStatus {
137 pub name: String,
138 pub pending_work: BTreeMap<String, LanguageServerProgress>,
139 pending_diagnostic_updates: isize,
140}
141
142#[derive(Clone, Debug)]
143pub struct LanguageServerProgress {
144 pub message: Option<String>,
145 pub percentage: Option<usize>,
146 pub last_update_at: Instant,
147}
148
149#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
150pub struct ProjectPath {
151 pub worktree_id: WorktreeId,
152 pub path: Arc<Path>,
153}
154
155#[derive(Clone, Debug, Default, PartialEq)]
156pub struct DiagnosticSummary {
157 pub error_count: usize,
158 pub warning_count: usize,
159 pub info_count: usize,
160 pub hint_count: usize,
161}
162
163#[derive(Debug)]
164pub struct Location {
165 pub buffer: ModelHandle<Buffer>,
166 pub range: Range<language::Anchor>,
167}
168
169#[derive(Debug)]
170pub struct DocumentHighlight {
171 pub range: Range<language::Anchor>,
172 pub kind: DocumentHighlightKind,
173}
174
175#[derive(Clone, Debug)]
176pub struct Symbol {
177 pub source_worktree_id: WorktreeId,
178 pub worktree_id: WorktreeId,
179 pub language_server_name: LanguageServerName,
180 pub path: PathBuf,
181 pub label: CodeLabel,
182 pub name: String,
183 pub kind: lsp::SymbolKind,
184 pub range: Range<PointUtf16>,
185 pub signature: [u8; 32],
186}
187
188#[derive(Default)]
189pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
190
191impl DiagnosticSummary {
192 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
193 let mut this = Self {
194 error_count: 0,
195 warning_count: 0,
196 info_count: 0,
197 hint_count: 0,
198 };
199
200 for entry in diagnostics {
201 if entry.diagnostic.is_primary {
202 match entry.diagnostic.severity {
203 DiagnosticSeverity::ERROR => this.error_count += 1,
204 DiagnosticSeverity::WARNING => this.warning_count += 1,
205 DiagnosticSeverity::INFORMATION => this.info_count += 1,
206 DiagnosticSeverity::HINT => this.hint_count += 1,
207 _ => {}
208 }
209 }
210 }
211
212 this
213 }
214
215 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
216 proto::DiagnosticSummary {
217 path: path.to_string_lossy().to_string(),
218 error_count: self.error_count as u32,
219 warning_count: self.warning_count as u32,
220 info_count: self.info_count as u32,
221 hint_count: self.hint_count as u32,
222 }
223 }
224}
225
226#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
227pub struct ProjectEntryId(usize);
228
229impl ProjectEntryId {
230 pub fn new(counter: &AtomicUsize) -> Self {
231 Self(counter.fetch_add(1, SeqCst))
232 }
233
234 pub fn from_proto(id: u64) -> Self {
235 Self(id as usize)
236 }
237
238 pub fn to_proto(&self) -> u64 {
239 self.0 as u64
240 }
241
242 pub fn to_usize(&self) -> usize {
243 self.0
244 }
245}
246
247impl Project {
248 pub fn init(client: &Arc<Client>) {
249 client.add_model_message_handler(Self::handle_add_collaborator);
250 client.add_model_message_handler(Self::handle_buffer_reloaded);
251 client.add_model_message_handler(Self::handle_buffer_saved);
252 client.add_model_message_handler(Self::handle_start_language_server);
253 client.add_model_message_handler(Self::handle_update_language_server);
254 client.add_model_message_handler(Self::handle_remove_collaborator);
255 client.add_model_message_handler(Self::handle_register_worktree);
256 client.add_model_message_handler(Self::handle_unregister_worktree);
257 client.add_model_message_handler(Self::handle_unshare_project);
258 client.add_model_message_handler(Self::handle_update_buffer_file);
259 client.add_model_message_handler(Self::handle_update_buffer);
260 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
261 client.add_model_message_handler(Self::handle_update_worktree);
262 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
263 client.add_model_request_handler(Self::handle_apply_code_action);
264 client.add_model_request_handler(Self::handle_reload_buffers);
265 client.add_model_request_handler(Self::handle_format_buffers);
266 client.add_model_request_handler(Self::handle_get_code_actions);
267 client.add_model_request_handler(Self::handle_get_completions);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
269 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
270 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
271 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
272 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
273 client.add_model_request_handler(Self::handle_search_project);
274 client.add_model_request_handler(Self::handle_get_project_symbols);
275 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
276 client.add_model_request_handler(Self::handle_open_buffer_by_id);
277 client.add_model_request_handler(Self::handle_open_buffer_by_path);
278 client.add_model_request_handler(Self::handle_save_buffer);
279 }
280
281 pub fn local(
282 client: Arc<Client>,
283 user_store: ModelHandle<UserStore>,
284 languages: Arc<LanguageRegistry>,
285 fs: Arc<dyn Fs>,
286 cx: &mut MutableAppContext,
287 ) -> ModelHandle<Self> {
288 cx.add_model(|cx: &mut ModelContext<Self>| {
289 let (remote_id_tx, remote_id_rx) = watch::channel();
290 let _maintain_remote_id_task = cx.spawn_weak({
291 let rpc = client.clone();
292 move |this, mut cx| {
293 async move {
294 let mut status = rpc.status();
295 while let Some(status) = status.next().await {
296 if let Some(this) = this.upgrade(&cx) {
297 if status.is_connected() {
298 this.update(&mut cx, |this, cx| this.register(cx)).await?;
299 } else {
300 this.update(&mut cx, |this, cx| this.unregister(cx));
301 }
302 }
303 }
304 Ok(())
305 }
306 .log_err()
307 }
308 });
309
310 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
311 Self {
312 worktrees: Default::default(),
313 collaborators: Default::default(),
314 opened_buffers: Default::default(),
315 shared_buffers: Default::default(),
316 loading_buffers: Default::default(),
317 loading_local_worktrees: Default::default(),
318 buffer_snapshots: Default::default(),
319 client_state: ProjectClientState::Local {
320 is_shared: false,
321 remote_id_tx,
322 remote_id_rx,
323 _maintain_remote_id_task,
324 },
325 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
326 subscriptions: Vec::new(),
327 active_entry: None,
328 languages,
329 client,
330 user_store,
331 fs,
332 next_entry_id: Default::default(),
333 language_servers_with_diagnostics_running: 0,
334 language_servers: Default::default(),
335 started_language_servers: Default::default(),
336 language_server_statuses: Default::default(),
337 last_workspace_edits_by_language_server: Default::default(),
338 language_server_settings: Default::default(),
339 next_language_server_id: 0,
340 nonce: StdRng::from_entropy().gen(),
341 }
342 })
343 }
344
345 pub async fn remote(
346 remote_id: u64,
347 client: Arc<Client>,
348 user_store: ModelHandle<UserStore>,
349 languages: Arc<LanguageRegistry>,
350 fs: Arc<dyn Fs>,
351 cx: &mut AsyncAppContext,
352 ) -> Result<ModelHandle<Self>> {
353 client.authenticate_and_connect(true, &cx).await?;
354
355 let response = client
356 .request(proto::JoinProject {
357 project_id: remote_id,
358 })
359 .await?;
360
361 let replica_id = response.replica_id as ReplicaId;
362
363 let mut worktrees = Vec::new();
364 for worktree in response.worktrees {
365 let (worktree, load_task) = cx
366 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
367 worktrees.push(worktree);
368 load_task.detach();
369 }
370
371 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
372 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
373 let mut this = Self {
374 worktrees: Vec::new(),
375 loading_buffers: Default::default(),
376 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
377 shared_buffers: Default::default(),
378 loading_local_worktrees: Default::default(),
379 active_entry: None,
380 collaborators: Default::default(),
381 languages,
382 user_store: user_store.clone(),
383 fs,
384 next_entry_id: Default::default(),
385 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
386 client: client.clone(),
387 client_state: ProjectClientState::Remote {
388 sharing_has_stopped: false,
389 remote_id,
390 replica_id,
391 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
392 async move {
393 let mut status = client.status();
394 let is_connected =
395 status.next().await.map_or(false, |s| s.is_connected());
396 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
397 if !is_connected || status.next().await.is_some() {
398 if let Some(this) = this.upgrade(&cx) {
399 this.update(&mut cx, |this, cx| this.project_unshared(cx))
400 }
401 }
402 Ok(())
403 }
404 .log_err()
405 }),
406 },
407 language_servers_with_diagnostics_running: 0,
408 language_servers: Default::default(),
409 started_language_servers: Default::default(),
410 language_server_settings: Default::default(),
411 language_server_statuses: response
412 .language_servers
413 .into_iter()
414 .map(|server| {
415 (
416 server.id as usize,
417 LanguageServerStatus {
418 name: server.name,
419 pending_work: Default::default(),
420 pending_diagnostic_updates: 0,
421 },
422 )
423 })
424 .collect(),
425 last_workspace_edits_by_language_server: Default::default(),
426 next_language_server_id: 0,
427 opened_buffers: Default::default(),
428 buffer_snapshots: Default::default(),
429 nonce: StdRng::from_entropy().gen(),
430 };
431 for worktree in worktrees {
432 this.add_worktree(&worktree, cx);
433 }
434 this
435 });
436
437 let user_ids = response
438 .collaborators
439 .iter()
440 .map(|peer| peer.user_id)
441 .collect();
442 user_store
443 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
444 .await?;
445 let mut collaborators = HashMap::default();
446 for message in response.collaborators {
447 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
448 collaborators.insert(collaborator.peer_id, collaborator);
449 }
450
451 this.update(cx, |this, _| {
452 this.collaborators = collaborators;
453 });
454
455 Ok(this)
456 }
457
458 #[cfg(any(test, feature = "test-support"))]
459 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
460 let languages = Arc::new(LanguageRegistry::test());
461 let http_client = client::test::FakeHttpClient::with_404_response();
462 let client = client::Client::new(http_client.clone());
463 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
464 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
465 }
466
467 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
468 self.opened_buffers
469 .get(&remote_id)
470 .and_then(|buffer| buffer.upgrade(cx))
471 }
472
473 #[cfg(any(test, feature = "test-support"))]
474 pub fn languages(&self) -> &Arc<LanguageRegistry> {
475 &self.languages
476 }
477
478 #[cfg(any(test, feature = "test-support"))]
479 pub fn check_invariants(&self, cx: &AppContext) {
480 if self.is_local() {
481 let mut worktree_root_paths = HashMap::default();
482 for worktree in self.worktrees(cx) {
483 let worktree = worktree.read(cx);
484 let abs_path = worktree.as_local().unwrap().abs_path().clone();
485 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
486 assert_eq!(
487 prev_worktree_id,
488 None,
489 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
490 abs_path,
491 worktree.id(),
492 prev_worktree_id
493 )
494 }
495 } else {
496 let replica_id = self.replica_id();
497 for buffer in self.opened_buffers.values() {
498 if let Some(buffer) = buffer.upgrade(cx) {
499 let buffer = buffer.read(cx);
500 assert_eq!(
501 buffer.deferred_ops_len(),
502 0,
503 "replica {}, buffer {} has deferred operations",
504 replica_id,
505 buffer.remote_id()
506 );
507 }
508 }
509 }
510 }
511
512 #[cfg(any(test, feature = "test-support"))]
513 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
514 let path = path.into();
515 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
516 self.opened_buffers.iter().any(|(_, buffer)| {
517 if let Some(buffer) = buffer.upgrade(cx) {
518 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
519 if file.worktree == worktree && file.path() == &path.path {
520 return true;
521 }
522 }
523 }
524 false
525 })
526 } else {
527 false
528 }
529 }
530
531 pub fn fs(&self) -> &Arc<dyn Fs> {
532 &self.fs
533 }
534
535 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
536 self.unshare(cx);
537 for worktree in &self.worktrees {
538 if let Some(worktree) = worktree.upgrade(cx) {
539 worktree.update(cx, |worktree, _| {
540 worktree.as_local_mut().unwrap().unregister();
541 });
542 }
543 }
544
545 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
546 *remote_id_tx.borrow_mut() = None;
547 }
548
549 self.subscriptions.clear();
550 }
551
552 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
553 self.unregister(cx);
554
555 let response = self.client.request(proto::RegisterProject {});
556 cx.spawn(|this, mut cx| async move {
557 let remote_id = response.await?.project_id;
558
559 let mut registrations = Vec::new();
560 this.update(&mut cx, |this, cx| {
561 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
562 *remote_id_tx.borrow_mut() = Some(remote_id);
563 }
564
565 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
566
567 this.subscriptions
568 .push(this.client.add_model_for_remote_entity(remote_id, cx));
569
570 for worktree in &this.worktrees {
571 if let Some(worktree) = worktree.upgrade(cx) {
572 registrations.push(worktree.update(cx, |worktree, cx| {
573 let worktree = worktree.as_local_mut().unwrap();
574 worktree.register(remote_id, cx)
575 }));
576 }
577 }
578 });
579
580 futures::future::try_join_all(registrations).await?;
581 Ok(())
582 })
583 }
584
585 pub fn remote_id(&self) -> Option<u64> {
586 match &self.client_state {
587 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
588 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
589 }
590 }
591
592 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
593 let mut id = None;
594 let mut watch = None;
595 match &self.client_state {
596 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
597 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
598 }
599
600 async move {
601 if let Some(id) = id {
602 return id;
603 }
604 let mut watch = watch.unwrap();
605 loop {
606 let id = *watch.borrow();
607 if let Some(id) = id {
608 return id;
609 }
610 watch.next().await;
611 }
612 }
613 }
614
615 pub fn replica_id(&self) -> ReplicaId {
616 match &self.client_state {
617 ProjectClientState::Local { .. } => 0,
618 ProjectClientState::Remote { replica_id, .. } => *replica_id,
619 }
620 }
621
622 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
623 &self.collaborators
624 }
625
626 pub fn worktrees<'a>(
627 &'a self,
628 cx: &'a AppContext,
629 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
630 self.worktrees
631 .iter()
632 .filter_map(move |worktree| worktree.upgrade(cx))
633 }
634
635 pub fn visible_worktrees<'a>(
636 &'a self,
637 cx: &'a AppContext,
638 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
639 self.worktrees.iter().filter_map(|worktree| {
640 worktree.upgrade(cx).and_then(|worktree| {
641 if worktree.read(cx).is_visible() {
642 Some(worktree)
643 } else {
644 None
645 }
646 })
647 })
648 }
649
650 pub fn worktree_for_id(
651 &self,
652 id: WorktreeId,
653 cx: &AppContext,
654 ) -> Option<ModelHandle<Worktree>> {
655 self.worktrees(cx)
656 .find(|worktree| worktree.read(cx).id() == id)
657 }
658
659 pub fn worktree_for_entry(
660 &self,
661 entry_id: ProjectEntryId,
662 cx: &AppContext,
663 ) -> Option<ModelHandle<Worktree>> {
664 self.worktrees(cx)
665 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
666 }
667
668 pub fn worktree_id_for_entry(
669 &self,
670 entry_id: ProjectEntryId,
671 cx: &AppContext,
672 ) -> Option<WorktreeId> {
673 self.worktree_for_entry(entry_id, cx)
674 .map(|worktree| worktree.read(cx).id())
675 }
676
677 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
678 let rpc = self.client.clone();
679 cx.spawn(|this, mut cx| async move {
680 let project_id = this.update(&mut cx, |this, cx| {
681 if let ProjectClientState::Local {
682 is_shared,
683 remote_id_rx,
684 ..
685 } = &mut this.client_state
686 {
687 *is_shared = true;
688
689 for open_buffer in this.opened_buffers.values_mut() {
690 match open_buffer {
691 OpenBuffer::Strong(_) => {}
692 OpenBuffer::Weak(buffer) => {
693 if let Some(buffer) = buffer.upgrade(cx) {
694 *open_buffer = OpenBuffer::Strong(buffer);
695 }
696 }
697 OpenBuffer::Loading(_) => unreachable!(),
698 }
699 }
700
701 for worktree_handle in this.worktrees.iter_mut() {
702 match worktree_handle {
703 WorktreeHandle::Strong(_) => {}
704 WorktreeHandle::Weak(worktree) => {
705 if let Some(worktree) = worktree.upgrade(cx) {
706 *worktree_handle = WorktreeHandle::Strong(worktree);
707 }
708 }
709 }
710 }
711
712 remote_id_rx
713 .borrow()
714 .ok_or_else(|| anyhow!("no project id"))
715 } else {
716 Err(anyhow!("can't share a remote project"))
717 }
718 })?;
719
720 rpc.request(proto::ShareProject { project_id }).await?;
721
722 let mut tasks = Vec::new();
723 this.update(&mut cx, |this, cx| {
724 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
725 worktree.update(cx, |worktree, cx| {
726 let worktree = worktree.as_local_mut().unwrap();
727 tasks.push(worktree.share(project_id, cx));
728 });
729 }
730 });
731 for task in tasks {
732 task.await?;
733 }
734 this.update(&mut cx, |_, cx| cx.notify());
735 Ok(())
736 })
737 }
738
739 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
740 let rpc = self.client.clone();
741
742 if let ProjectClientState::Local {
743 is_shared,
744 remote_id_rx,
745 ..
746 } = &mut self.client_state
747 {
748 if !*is_shared {
749 return;
750 }
751
752 *is_shared = false;
753 self.collaborators.clear();
754 self.shared_buffers.clear();
755 for worktree_handle in self.worktrees.iter_mut() {
756 if let WorktreeHandle::Strong(worktree) = worktree_handle {
757 let is_visible = worktree.update(cx, |worktree, _| {
758 worktree.as_local_mut().unwrap().unshare();
759 worktree.is_visible()
760 });
761 if !is_visible {
762 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
763 }
764 }
765 }
766
767 for open_buffer in self.opened_buffers.values_mut() {
768 match open_buffer {
769 OpenBuffer::Strong(buffer) => {
770 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
771 }
772 _ => {}
773 }
774 }
775
776 if let Some(project_id) = *remote_id_rx.borrow() {
777 rpc.send(proto::UnshareProject { project_id }).log_err();
778 }
779
780 cx.notify();
781 } else {
782 log::error!("attempted to unshare a remote project");
783 }
784 }
785
786 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
787 if let ProjectClientState::Remote {
788 sharing_has_stopped,
789 ..
790 } = &mut self.client_state
791 {
792 *sharing_has_stopped = true;
793 self.collaborators.clear();
794 cx.notify();
795 }
796 }
797
798 pub fn is_read_only(&self) -> bool {
799 match &self.client_state {
800 ProjectClientState::Local { .. } => false,
801 ProjectClientState::Remote {
802 sharing_has_stopped,
803 ..
804 } => *sharing_has_stopped,
805 }
806 }
807
808 pub fn is_local(&self) -> bool {
809 match &self.client_state {
810 ProjectClientState::Local { .. } => true,
811 ProjectClientState::Remote { .. } => false,
812 }
813 }
814
815 pub fn is_remote(&self) -> bool {
816 !self.is_local()
817 }
818
819 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
820 if self.is_remote() {
821 return Err(anyhow!("creating buffers as a guest is not supported yet"));
822 }
823
824 let buffer = cx.add_model(|cx| {
825 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
826 });
827 self.register_buffer(&buffer, cx)?;
828 Ok(buffer)
829 }
830
831 pub fn open_path(
832 &mut self,
833 path: impl Into<ProjectPath>,
834 cx: &mut ModelContext<Self>,
835 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
836 let task = self.open_buffer(path, cx);
837 cx.spawn_weak(|_, cx| async move {
838 let buffer = task.await?;
839 let project_entry_id = buffer
840 .read_with(&cx, |buffer, cx| {
841 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
842 })
843 .ok_or_else(|| anyhow!("no project entry"))?;
844 Ok((project_entry_id, buffer.into()))
845 })
846 }
847
848 pub fn open_buffer(
849 &mut self,
850 path: impl Into<ProjectPath>,
851 cx: &mut ModelContext<Self>,
852 ) -> Task<Result<ModelHandle<Buffer>>> {
853 let project_path = path.into();
854 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
855 worktree
856 } else {
857 return Task::ready(Err(anyhow!("no such worktree")));
858 };
859
860 // If there is already a buffer for the given path, then return it.
861 let existing_buffer = self.get_open_buffer(&project_path, cx);
862 if let Some(existing_buffer) = existing_buffer {
863 return Task::ready(Ok(existing_buffer));
864 }
865
866 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
867 // If the given path is already being loaded, then wait for that existing
868 // task to complete and return the same buffer.
869 hash_map::Entry::Occupied(e) => e.get().clone(),
870
871 // Otherwise, record the fact that this path is now being loaded.
872 hash_map::Entry::Vacant(entry) => {
873 let (mut tx, rx) = postage::watch::channel();
874 entry.insert(rx.clone());
875
876 let load_buffer = if worktree.read(cx).is_local() {
877 self.open_local_buffer(&project_path.path, &worktree, cx)
878 } else {
879 self.open_remote_buffer(&project_path.path, &worktree, cx)
880 };
881
882 cx.spawn(move |this, mut cx| async move {
883 let load_result = load_buffer.await;
884 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
885 // Record the fact that the buffer is no longer loading.
886 this.loading_buffers.remove(&project_path);
887 let buffer = load_result.map_err(Arc::new)?;
888 Ok(buffer)
889 }));
890 })
891 .detach();
892 rx
893 }
894 };
895
896 cx.foreground().spawn(async move {
897 loop {
898 if let Some(result) = loading_watch.borrow().as_ref() {
899 match result {
900 Ok(buffer) => return Ok(buffer.clone()),
901 Err(error) => return Err(anyhow!("{}", error)),
902 }
903 }
904 loading_watch.next().await;
905 }
906 })
907 }
908
909 fn open_local_buffer(
910 &mut self,
911 path: &Arc<Path>,
912 worktree: &ModelHandle<Worktree>,
913 cx: &mut ModelContext<Self>,
914 ) -> Task<Result<ModelHandle<Buffer>>> {
915 let load_buffer = worktree.update(cx, |worktree, cx| {
916 let worktree = worktree.as_local_mut().unwrap();
917 worktree.load_buffer(path, cx)
918 });
919 cx.spawn(|this, mut cx| async move {
920 let buffer = load_buffer.await?;
921 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
922 Ok(buffer)
923 })
924 }
925
926 fn open_remote_buffer(
927 &mut self,
928 path: &Arc<Path>,
929 worktree: &ModelHandle<Worktree>,
930 cx: &mut ModelContext<Self>,
931 ) -> Task<Result<ModelHandle<Buffer>>> {
932 let rpc = self.client.clone();
933 let project_id = self.remote_id().unwrap();
934 let remote_worktree_id = worktree.read(cx).id();
935 let path = path.clone();
936 let path_string = path.to_string_lossy().to_string();
937 cx.spawn(|this, mut cx| async move {
938 let response = rpc
939 .request(proto::OpenBufferByPath {
940 project_id,
941 worktree_id: remote_worktree_id.to_proto(),
942 path: path_string,
943 })
944 .await?;
945 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
946 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
947 .await
948 })
949 }
950
951 fn open_local_buffer_via_lsp(
952 &mut self,
953 abs_path: lsp::Url,
954 lsp_adapter: Arc<dyn LspAdapter>,
955 lsp_server: Arc<LanguageServer>,
956 cx: &mut ModelContext<Self>,
957 ) -> Task<Result<ModelHandle<Buffer>>> {
958 cx.spawn(|this, mut cx| async move {
959 let abs_path = abs_path
960 .to_file_path()
961 .map_err(|_| anyhow!("can't convert URI to path"))?;
962 let (worktree, relative_path) = if let Some(result) =
963 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
964 {
965 result
966 } else {
967 let worktree = this
968 .update(&mut cx, |this, cx| {
969 this.create_local_worktree(&abs_path, false, cx)
970 })
971 .await?;
972 this.update(&mut cx, |this, cx| {
973 this.language_servers.insert(
974 (worktree.read(cx).id(), lsp_adapter.name()),
975 (lsp_adapter, lsp_server),
976 );
977 });
978 (worktree, PathBuf::new())
979 };
980
981 let project_path = ProjectPath {
982 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
983 path: relative_path.into(),
984 };
985 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
986 .await
987 })
988 }
989
990 pub fn open_buffer_by_id(
991 &mut self,
992 id: u64,
993 cx: &mut ModelContext<Self>,
994 ) -> Task<Result<ModelHandle<Buffer>>> {
995 if let Some(buffer) = self.buffer_for_id(id, cx) {
996 Task::ready(Ok(buffer))
997 } else if self.is_local() {
998 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
999 } else if let Some(project_id) = self.remote_id() {
1000 let request = self
1001 .client
1002 .request(proto::OpenBufferById { project_id, id });
1003 cx.spawn(|this, mut cx| async move {
1004 let buffer = request
1005 .await?
1006 .buffer
1007 .ok_or_else(|| anyhow!("invalid buffer"))?;
1008 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1009 .await
1010 })
1011 } else {
1012 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1013 }
1014 }
1015
1016 pub fn save_buffer_as(
1017 &mut self,
1018 buffer: ModelHandle<Buffer>,
1019 abs_path: PathBuf,
1020 cx: &mut ModelContext<Project>,
1021 ) -> Task<Result<()>> {
1022 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1023 let old_path =
1024 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1025 cx.spawn(|this, mut cx| async move {
1026 if let Some(old_path) = old_path {
1027 this.update(&mut cx, |this, cx| {
1028 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1029 });
1030 }
1031 let (worktree, path) = worktree_task.await?;
1032 worktree
1033 .update(&mut cx, |worktree, cx| {
1034 worktree
1035 .as_local_mut()
1036 .unwrap()
1037 .save_buffer_as(buffer.clone(), path, cx)
1038 })
1039 .await?;
1040 this.update(&mut cx, |this, cx| {
1041 this.assign_language_to_buffer(&buffer, cx);
1042 this.register_buffer_with_language_server(&buffer, cx);
1043 });
1044 Ok(())
1045 })
1046 }
1047
1048 pub fn get_open_buffer(
1049 &mut self,
1050 path: &ProjectPath,
1051 cx: &mut ModelContext<Self>,
1052 ) -> Option<ModelHandle<Buffer>> {
1053 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1054 self.opened_buffers.values().find_map(|buffer| {
1055 let buffer = buffer.upgrade(cx)?;
1056 let file = File::from_dyn(buffer.read(cx).file())?;
1057 if file.worktree == worktree && file.path() == &path.path {
1058 Some(buffer)
1059 } else {
1060 None
1061 }
1062 })
1063 }
1064
1065 fn register_buffer(
1066 &mut self,
1067 buffer: &ModelHandle<Buffer>,
1068 cx: &mut ModelContext<Self>,
1069 ) -> Result<()> {
1070 let remote_id = buffer.read(cx).remote_id();
1071 let open_buffer = if self.is_remote() || self.is_shared() {
1072 OpenBuffer::Strong(buffer.clone())
1073 } else {
1074 OpenBuffer::Weak(buffer.downgrade())
1075 };
1076
1077 match self.opened_buffers.insert(remote_id, open_buffer) {
1078 None => {}
1079 Some(OpenBuffer::Loading(operations)) => {
1080 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1081 }
1082 Some(OpenBuffer::Weak(existing_handle)) => {
1083 if existing_handle.upgrade(cx).is_some() {
1084 Err(anyhow!(
1085 "already registered buffer with remote id {}",
1086 remote_id
1087 ))?
1088 }
1089 }
1090 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1091 "already registered buffer with remote id {}",
1092 remote_id
1093 ))?,
1094 }
1095 cx.subscribe(buffer, |this, buffer, event, cx| {
1096 this.on_buffer_event(buffer, event, cx);
1097 })
1098 .detach();
1099
1100 self.assign_language_to_buffer(buffer, cx);
1101 self.register_buffer_with_language_server(buffer, cx);
1102 cx.observe_release(buffer, |this, buffer, cx| {
1103 if let Some(file) = File::from_dyn(buffer.file()) {
1104 if file.is_local() {
1105 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1106 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1107 server
1108 .notify::<lsp::notification::DidCloseTextDocument>(
1109 lsp::DidCloseTextDocumentParams {
1110 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1111 },
1112 )
1113 .log_err();
1114 }
1115 }
1116 }
1117 })
1118 .detach();
1119
1120 Ok(())
1121 }
1122
1123 fn register_buffer_with_language_server(
1124 &mut self,
1125 buffer_handle: &ModelHandle<Buffer>,
1126 cx: &mut ModelContext<Self>,
1127 ) {
1128 let buffer = buffer_handle.read(cx);
1129 let buffer_id = buffer.remote_id();
1130 if let Some(file) = File::from_dyn(buffer.file()) {
1131 if file.is_local() {
1132 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1133 let initial_snapshot = buffer.text_snapshot();
1134 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1135
1136 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1137 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1138 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1139 .log_err();
1140 }
1141 }
1142
1143 if let Some((_, server)) = language_server {
1144 server
1145 .notify::<lsp::notification::DidOpenTextDocument>(
1146 lsp::DidOpenTextDocumentParams {
1147 text_document: lsp::TextDocumentItem::new(
1148 uri,
1149 Default::default(),
1150 0,
1151 initial_snapshot.text(),
1152 ),
1153 }
1154 .clone(),
1155 )
1156 .log_err();
1157 buffer_handle.update(cx, |buffer, cx| {
1158 buffer.set_completion_triggers(
1159 server
1160 .capabilities()
1161 .completion_provider
1162 .as_ref()
1163 .and_then(|provider| provider.trigger_characters.clone())
1164 .unwrap_or(Vec::new()),
1165 cx,
1166 )
1167 });
1168 self.buffer_snapshots
1169 .insert(buffer_id, vec![(0, initial_snapshot)]);
1170 }
1171 }
1172 }
1173 }
1174
1175 fn unregister_buffer_from_language_server(
1176 &mut self,
1177 buffer: &ModelHandle<Buffer>,
1178 old_path: PathBuf,
1179 cx: &mut ModelContext<Self>,
1180 ) {
1181 buffer.update(cx, |buffer, cx| {
1182 buffer.update_diagnostics(Default::default(), cx);
1183 self.buffer_snapshots.remove(&buffer.remote_id());
1184 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1185 language_server
1186 .notify::<lsp::notification::DidCloseTextDocument>(
1187 lsp::DidCloseTextDocumentParams {
1188 text_document: lsp::TextDocumentIdentifier::new(
1189 lsp::Url::from_file_path(old_path).unwrap(),
1190 ),
1191 },
1192 )
1193 .log_err();
1194 }
1195 });
1196 }
1197
1198 fn on_buffer_event(
1199 &mut self,
1200 buffer: ModelHandle<Buffer>,
1201 event: &BufferEvent,
1202 cx: &mut ModelContext<Self>,
1203 ) -> Option<()> {
1204 match event {
1205 BufferEvent::Operation(operation) => {
1206 let project_id = self.remote_id()?;
1207 let request = self.client.request(proto::UpdateBuffer {
1208 project_id,
1209 buffer_id: buffer.read(cx).remote_id(),
1210 operations: vec![language::proto::serialize_operation(&operation)],
1211 });
1212 cx.background().spawn(request).detach_and_log_err(cx);
1213 }
1214 BufferEvent::Edited { .. } => {
1215 let (_, language_server) = self
1216 .language_server_for_buffer(buffer.read(cx), cx)?
1217 .clone();
1218 let buffer = buffer.read(cx);
1219 let file = File::from_dyn(buffer.file())?;
1220 let abs_path = file.as_local()?.abs_path(cx);
1221 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1222 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1223 let (version, prev_snapshot) = buffer_snapshots.last()?;
1224 let next_snapshot = buffer.text_snapshot();
1225 let next_version = version + 1;
1226
1227 let content_changes = buffer
1228 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1229 .map(|edit| {
1230 let edit_start = edit.new.start.0;
1231 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1232 let new_text = next_snapshot
1233 .text_for_range(edit.new.start.1..edit.new.end.1)
1234 .collect();
1235 lsp::TextDocumentContentChangeEvent {
1236 range: Some(lsp::Range::new(
1237 point_to_lsp(edit_start),
1238 point_to_lsp(edit_end),
1239 )),
1240 range_length: None,
1241 text: new_text,
1242 }
1243 })
1244 .collect();
1245
1246 buffer_snapshots.push((next_version, next_snapshot));
1247
1248 language_server
1249 .notify::<lsp::notification::DidChangeTextDocument>(
1250 lsp::DidChangeTextDocumentParams {
1251 text_document: lsp::VersionedTextDocumentIdentifier::new(
1252 uri,
1253 next_version,
1254 ),
1255 content_changes,
1256 },
1257 )
1258 .log_err();
1259 }
1260 BufferEvent::Saved => {
1261 let file = File::from_dyn(buffer.read(cx).file())?;
1262 let worktree_id = file.worktree_id(cx);
1263 let abs_path = file.as_local()?.abs_path(cx);
1264 let text_document = lsp::TextDocumentIdentifier {
1265 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1266 };
1267
1268 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1269 server
1270 .notify::<lsp::notification::DidSaveTextDocument>(
1271 lsp::DidSaveTextDocumentParams {
1272 text_document: text_document.clone(),
1273 text: None,
1274 },
1275 )
1276 .log_err();
1277 }
1278 }
1279 _ => {}
1280 }
1281
1282 None
1283 }
1284
1285 fn language_servers_for_worktree(
1286 &self,
1287 worktree_id: WorktreeId,
1288 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1289 self.language_servers.iter().filter_map(
1290 move |((language_server_worktree_id, _), server)| {
1291 if *language_server_worktree_id == worktree_id {
1292 Some(server)
1293 } else {
1294 None
1295 }
1296 },
1297 )
1298 }
1299
1300 fn assign_language_to_buffer(
1301 &mut self,
1302 buffer: &ModelHandle<Buffer>,
1303 cx: &mut ModelContext<Self>,
1304 ) -> Option<()> {
1305 // If the buffer has a language, set it and start the language server if we haven't already.
1306 let full_path = buffer.read(cx).file()?.full_path(cx);
1307 let language = self.languages.select_language(&full_path)?;
1308 buffer.update(cx, |buffer, cx| {
1309 buffer.set_language(Some(language.clone()), cx);
1310 });
1311
1312 let file = File::from_dyn(buffer.read(cx).file())?;
1313 let worktree = file.worktree.read(cx).as_local()?;
1314 let worktree_id = worktree.id();
1315 let worktree_abs_path = worktree.abs_path().clone();
1316 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1317
1318 None
1319 }
1320
1321 fn start_language_server(
1322 &mut self,
1323 worktree_id: WorktreeId,
1324 worktree_path: Arc<Path>,
1325 language: Arc<Language>,
1326 cx: &mut ModelContext<Self>,
1327 ) {
1328 let adapter = if let Some(adapter) = language.lsp_adapter() {
1329 adapter
1330 } else {
1331 return;
1332 };
1333 let key = (worktree_id, adapter.name());
1334 self.started_language_servers
1335 .entry(key.clone())
1336 .or_insert_with(|| {
1337 let server_id = post_inc(&mut self.next_language_server_id);
1338 let language_server = self.languages.start_language_server(
1339 server_id,
1340 language.clone(),
1341 worktree_path,
1342 self.client.http_client(),
1343 cx,
1344 );
1345 cx.spawn_weak(|this, mut cx| async move {
1346 let language_server = language_server?.await.log_err()?;
1347 let language_server = language_server
1348 .initialize(adapter.initialization_options())
1349 .await
1350 .log_err()?;
1351 let this = this.upgrade(&cx)?;
1352 let disk_based_diagnostics_progress_token =
1353 adapter.disk_based_diagnostics_progress_token();
1354
1355 language_server
1356 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1357 let this = this.downgrade();
1358 let adapter = adapter.clone();
1359 move |params, mut cx| {
1360 if let Some(this) = this.upgrade(&cx) {
1361 this.update(&mut cx, |this, cx| {
1362 this.on_lsp_diagnostics_published(
1363 server_id,
1364 params,
1365 &adapter,
1366 disk_based_diagnostics_progress_token,
1367 cx,
1368 );
1369 });
1370 }
1371 }
1372 })
1373 .detach();
1374
1375 language_server
1376 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1377 let settings = this
1378 .read_with(&cx, |this, _| this.language_server_settings.clone());
1379 move |params, _| {
1380 let settings = settings.lock().clone();
1381 async move {
1382 Ok(params
1383 .items
1384 .into_iter()
1385 .map(|item| {
1386 if let Some(section) = &item.section {
1387 settings
1388 .get(section)
1389 .cloned()
1390 .unwrap_or(serde_json::Value::Null)
1391 } else {
1392 settings.clone()
1393 }
1394 })
1395 .collect())
1396 }
1397 }
1398 })
1399 .detach();
1400
1401 language_server
1402 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1403 let this = this.downgrade();
1404 let adapter = adapter.clone();
1405 let language_server = language_server.clone();
1406 move |params, cx| {
1407 Self::on_lsp_workspace_edit(
1408 this,
1409 params,
1410 server_id,
1411 adapter.clone(),
1412 language_server.clone(),
1413 cx,
1414 )
1415 }
1416 })
1417 .detach();
1418
1419 language_server
1420 .on_notification::<lsp::notification::Progress, _>({
1421 let this = this.downgrade();
1422 move |params, mut cx| {
1423 if let Some(this) = this.upgrade(&cx) {
1424 this.update(&mut cx, |this, cx| {
1425 this.on_lsp_progress(
1426 params,
1427 server_id,
1428 disk_based_diagnostics_progress_token,
1429 cx,
1430 );
1431 });
1432 }
1433 }
1434 })
1435 .detach();
1436
1437 this.update(&mut cx, |this, cx| {
1438 this.language_servers
1439 .insert(key.clone(), (adapter, language_server.clone()));
1440 this.language_server_statuses.insert(
1441 server_id,
1442 LanguageServerStatus {
1443 name: language_server.name().to_string(),
1444 pending_work: Default::default(),
1445 pending_diagnostic_updates: 0,
1446 },
1447 );
1448 language_server
1449 .notify::<lsp::notification::DidChangeConfiguration>(
1450 lsp::DidChangeConfigurationParams {
1451 settings: this.language_server_settings.lock().clone(),
1452 },
1453 )
1454 .ok();
1455
1456 if let Some(project_id) = this.remote_id() {
1457 this.client
1458 .send(proto::StartLanguageServer {
1459 project_id,
1460 server: Some(proto::LanguageServer {
1461 id: server_id as u64,
1462 name: language_server.name().to_string(),
1463 }),
1464 })
1465 .log_err();
1466 }
1467
1468 // Tell the language server about every open buffer in the worktree that matches the language.
1469 for buffer in this.opened_buffers.values() {
1470 if let Some(buffer_handle) = buffer.upgrade(cx) {
1471 let buffer = buffer_handle.read(cx);
1472 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1473 file
1474 } else {
1475 continue;
1476 };
1477 let language = if let Some(language) = buffer.language() {
1478 language
1479 } else {
1480 continue;
1481 };
1482 if file.worktree.read(cx).id() != key.0
1483 || language.lsp_adapter().map(|a| a.name())
1484 != Some(key.1.clone())
1485 {
1486 continue;
1487 }
1488
1489 let file = file.as_local()?;
1490 let versions = this
1491 .buffer_snapshots
1492 .entry(buffer.remote_id())
1493 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1494 let (version, initial_snapshot) = versions.last().unwrap();
1495 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1496 language_server
1497 .notify::<lsp::notification::DidOpenTextDocument>(
1498 lsp::DidOpenTextDocumentParams {
1499 text_document: lsp::TextDocumentItem::new(
1500 uri,
1501 Default::default(),
1502 *version,
1503 initial_snapshot.text(),
1504 ),
1505 },
1506 )
1507 .log_err()?;
1508 buffer_handle.update(cx, |buffer, cx| {
1509 buffer.set_completion_triggers(
1510 language_server
1511 .capabilities()
1512 .completion_provider
1513 .as_ref()
1514 .and_then(|provider| {
1515 provider.trigger_characters.clone()
1516 })
1517 .unwrap_or(Vec::new()),
1518 cx,
1519 )
1520 });
1521 }
1522 }
1523
1524 cx.notify();
1525 Some(())
1526 });
1527
1528 Some(language_server)
1529 })
1530 });
1531 }
1532
1533 pub fn restart_language_servers_for_buffers(
1534 &mut self,
1535 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1536 cx: &mut ModelContext<Self>,
1537 ) -> Option<()> {
1538 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1539 .into_iter()
1540 .filter_map(|buffer| {
1541 let file = File::from_dyn(buffer.read(cx).file())?;
1542 let worktree = file.worktree.read(cx).as_local()?;
1543 let worktree_id = worktree.id();
1544 let worktree_abs_path = worktree.abs_path().clone();
1545 let full_path = file.full_path(cx);
1546 Some((worktree_id, worktree_abs_path, full_path))
1547 })
1548 .collect();
1549 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1550 let language = self.languages.select_language(&full_path)?;
1551 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1552 }
1553
1554 None
1555 }
1556
1557 fn restart_language_server(
1558 &mut self,
1559 worktree_id: WorktreeId,
1560 worktree_path: Arc<Path>,
1561 language: Arc<Language>,
1562 cx: &mut ModelContext<Self>,
1563 ) {
1564 let adapter = if let Some(adapter) = language.lsp_adapter() {
1565 adapter
1566 } else {
1567 return;
1568 };
1569 let key = (worktree_id, adapter.name());
1570 let server_to_shutdown = self.language_servers.remove(&key);
1571 self.started_language_servers.remove(&key);
1572 server_to_shutdown
1573 .as_ref()
1574 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1575 cx.spawn_weak(|this, mut cx| async move {
1576 if let Some(this) = this.upgrade(&cx) {
1577 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1578 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1579 shutdown_task.await;
1580 }
1581 }
1582
1583 this.update(&mut cx, |this, cx| {
1584 this.start_language_server(worktree_id, worktree_path, language, cx);
1585 });
1586 }
1587 })
1588 .detach();
1589 }
1590
1591 fn on_lsp_diagnostics_published(
1592 &mut self,
1593 server_id: usize,
1594 mut params: lsp::PublishDiagnosticsParams,
1595 adapter: &Arc<dyn LspAdapter>,
1596 disk_based_diagnostics_progress_token: Option<&str>,
1597 cx: &mut ModelContext<Self>,
1598 ) {
1599 adapter.process_diagnostics(&mut params);
1600 if disk_based_diagnostics_progress_token.is_none() {
1601 self.disk_based_diagnostics_started(cx);
1602 self.broadcast_language_server_update(
1603 server_id,
1604 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1605 proto::LspDiskBasedDiagnosticsUpdating {},
1606 ),
1607 );
1608 }
1609 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1610 .log_err();
1611 if disk_based_diagnostics_progress_token.is_none() {
1612 self.disk_based_diagnostics_finished(cx);
1613 self.broadcast_language_server_update(
1614 server_id,
1615 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1616 proto::LspDiskBasedDiagnosticsUpdated {},
1617 ),
1618 );
1619 }
1620 }
1621
1622 fn on_lsp_progress(
1623 &mut self,
1624 progress: lsp::ProgressParams,
1625 server_id: usize,
1626 disk_based_diagnostics_progress_token: Option<&str>,
1627 cx: &mut ModelContext<Self>,
1628 ) {
1629 let token = match progress.token {
1630 lsp::NumberOrString::String(token) => token,
1631 lsp::NumberOrString::Number(token) => {
1632 log::info!("skipping numeric progress token {}", token);
1633 return;
1634 }
1635 };
1636
1637 match progress.value {
1638 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1639 lsp::WorkDoneProgress::Begin(_) => {
1640 let language_server_status =
1641 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1642 status
1643 } else {
1644 return;
1645 };
1646
1647 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1648 language_server_status.pending_diagnostic_updates += 1;
1649 if language_server_status.pending_diagnostic_updates == 1 {
1650 self.disk_based_diagnostics_started(cx);
1651 self.broadcast_language_server_update(
1652 server_id,
1653 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1654 proto::LspDiskBasedDiagnosticsUpdating {},
1655 ),
1656 );
1657 }
1658 } else {
1659 self.on_lsp_work_start(server_id, token.clone(), cx);
1660 self.broadcast_language_server_update(
1661 server_id,
1662 proto::update_language_server::Variant::WorkStart(
1663 proto::LspWorkStart { token },
1664 ),
1665 );
1666 }
1667 }
1668 lsp::WorkDoneProgress::Report(report) => {
1669 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1670 self.on_lsp_work_progress(
1671 server_id,
1672 token.clone(),
1673 LanguageServerProgress {
1674 message: report.message.clone(),
1675 percentage: report.percentage.map(|p| p as usize),
1676 last_update_at: Instant::now(),
1677 },
1678 cx,
1679 );
1680 self.broadcast_language_server_update(
1681 server_id,
1682 proto::update_language_server::Variant::WorkProgress(
1683 proto::LspWorkProgress {
1684 token,
1685 message: report.message,
1686 percentage: report.percentage.map(|p| p as u32),
1687 },
1688 ),
1689 );
1690 }
1691 }
1692 lsp::WorkDoneProgress::End(_) => {
1693 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1694 let language_server_status = if let Some(status) =
1695 self.language_server_statuses.get_mut(&server_id)
1696 {
1697 status
1698 } else {
1699 return;
1700 };
1701
1702 language_server_status.pending_diagnostic_updates -= 1;
1703 if language_server_status.pending_diagnostic_updates == 0 {
1704 self.disk_based_diagnostics_finished(cx);
1705 self.broadcast_language_server_update(
1706 server_id,
1707 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1708 proto::LspDiskBasedDiagnosticsUpdated {},
1709 ),
1710 );
1711 }
1712 } else {
1713 self.on_lsp_work_end(server_id, token.clone(), cx);
1714 self.broadcast_language_server_update(
1715 server_id,
1716 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1717 token,
1718 }),
1719 );
1720 }
1721 }
1722 },
1723 }
1724 }
1725
1726 fn on_lsp_work_start(
1727 &mut self,
1728 language_server_id: usize,
1729 token: String,
1730 cx: &mut ModelContext<Self>,
1731 ) {
1732 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1733 status.pending_work.insert(
1734 token,
1735 LanguageServerProgress {
1736 message: None,
1737 percentage: None,
1738 last_update_at: Instant::now(),
1739 },
1740 );
1741 cx.notify();
1742 }
1743 }
1744
1745 fn on_lsp_work_progress(
1746 &mut self,
1747 language_server_id: usize,
1748 token: String,
1749 progress: LanguageServerProgress,
1750 cx: &mut ModelContext<Self>,
1751 ) {
1752 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1753 status.pending_work.insert(token, progress);
1754 cx.notify();
1755 }
1756 }
1757
1758 fn on_lsp_work_end(
1759 &mut self,
1760 language_server_id: usize,
1761 token: String,
1762 cx: &mut ModelContext<Self>,
1763 ) {
1764 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1765 status.pending_work.remove(&token);
1766 cx.notify();
1767 }
1768 }
1769
1770 async fn on_lsp_workspace_edit(
1771 this: WeakModelHandle<Self>,
1772 params: lsp::ApplyWorkspaceEditParams,
1773 server_id: usize,
1774 adapter: Arc<dyn LspAdapter>,
1775 language_server: Arc<LanguageServer>,
1776 mut cx: AsyncAppContext,
1777 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1778 let this = this
1779 .upgrade(&cx)
1780 .ok_or_else(|| anyhow!("project project closed"))?;
1781 let transaction = Self::deserialize_workspace_edit(
1782 this.clone(),
1783 params.edit,
1784 true,
1785 adapter.clone(),
1786 language_server.clone(),
1787 &mut cx,
1788 )
1789 .await
1790 .log_err();
1791 this.update(&mut cx, |this, _| {
1792 if let Some(transaction) = transaction {
1793 this.last_workspace_edits_by_language_server
1794 .insert(server_id, transaction);
1795 }
1796 });
1797 Ok(lsp::ApplyWorkspaceEditResponse {
1798 applied: true,
1799 failed_change: None,
1800 failure_reason: None,
1801 })
1802 }
1803
1804 fn broadcast_language_server_update(
1805 &self,
1806 language_server_id: usize,
1807 event: proto::update_language_server::Variant,
1808 ) {
1809 if let Some(project_id) = self.remote_id() {
1810 self.client
1811 .send(proto::UpdateLanguageServer {
1812 project_id,
1813 language_server_id: language_server_id as u64,
1814 variant: Some(event),
1815 })
1816 .log_err();
1817 }
1818 }
1819
1820 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1821 for (_, server) in self.language_servers.values() {
1822 server
1823 .notify::<lsp::notification::DidChangeConfiguration>(
1824 lsp::DidChangeConfigurationParams {
1825 settings: settings.clone(),
1826 },
1827 )
1828 .ok();
1829 }
1830 *self.language_server_settings.lock() = settings;
1831 }
1832
1833 pub fn language_server_statuses(
1834 &self,
1835 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1836 self.language_server_statuses.values()
1837 }
1838
1839 pub fn update_diagnostics(
1840 &mut self,
1841 params: lsp::PublishDiagnosticsParams,
1842 disk_based_sources: &[&str],
1843 cx: &mut ModelContext<Self>,
1844 ) -> Result<()> {
1845 let abs_path = params
1846 .uri
1847 .to_file_path()
1848 .map_err(|_| anyhow!("URI is not a file"))?;
1849 let mut next_group_id = 0;
1850 let mut diagnostics = Vec::default();
1851 let mut primary_diagnostic_group_ids = HashMap::default();
1852 let mut sources_by_group_id = HashMap::default();
1853 let mut supporting_diagnostics = HashMap::default();
1854 for diagnostic in ¶ms.diagnostics {
1855 let source = diagnostic.source.as_ref();
1856 let code = diagnostic.code.as_ref().map(|code| match code {
1857 lsp::NumberOrString::Number(code) => code.to_string(),
1858 lsp::NumberOrString::String(code) => code.clone(),
1859 });
1860 let range = range_from_lsp(diagnostic.range);
1861 let is_supporting = diagnostic
1862 .related_information
1863 .as_ref()
1864 .map_or(false, |infos| {
1865 infos.iter().any(|info| {
1866 primary_diagnostic_group_ids.contains_key(&(
1867 source,
1868 code.clone(),
1869 range_from_lsp(info.location.range),
1870 ))
1871 })
1872 });
1873
1874 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1875 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1876 });
1877
1878 if is_supporting {
1879 supporting_diagnostics.insert(
1880 (source, code.clone(), range),
1881 (diagnostic.severity, is_unnecessary),
1882 );
1883 } else {
1884 let group_id = post_inc(&mut next_group_id);
1885 let is_disk_based = source.map_or(false, |source| {
1886 disk_based_sources.contains(&source.as_str())
1887 });
1888
1889 sources_by_group_id.insert(group_id, source);
1890 primary_diagnostic_group_ids
1891 .insert((source, code.clone(), range.clone()), group_id);
1892
1893 diagnostics.push(DiagnosticEntry {
1894 range,
1895 diagnostic: Diagnostic {
1896 code: code.clone(),
1897 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1898 message: diagnostic.message.clone(),
1899 group_id,
1900 is_primary: true,
1901 is_valid: true,
1902 is_disk_based,
1903 is_unnecessary,
1904 },
1905 });
1906 if let Some(infos) = &diagnostic.related_information {
1907 for info in infos {
1908 if info.location.uri == params.uri && !info.message.is_empty() {
1909 let range = range_from_lsp(info.location.range);
1910 diagnostics.push(DiagnosticEntry {
1911 range,
1912 diagnostic: Diagnostic {
1913 code: code.clone(),
1914 severity: DiagnosticSeverity::INFORMATION,
1915 message: info.message.clone(),
1916 group_id,
1917 is_primary: false,
1918 is_valid: true,
1919 is_disk_based,
1920 is_unnecessary: false,
1921 },
1922 });
1923 }
1924 }
1925 }
1926 }
1927 }
1928
1929 for entry in &mut diagnostics {
1930 let diagnostic = &mut entry.diagnostic;
1931 if !diagnostic.is_primary {
1932 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1933 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1934 source,
1935 diagnostic.code.clone(),
1936 entry.range.clone(),
1937 )) {
1938 if let Some(severity) = severity {
1939 diagnostic.severity = severity;
1940 }
1941 diagnostic.is_unnecessary = is_unnecessary;
1942 }
1943 }
1944 }
1945
1946 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1947 Ok(())
1948 }
1949
1950 pub fn update_diagnostic_entries(
1951 &mut self,
1952 abs_path: PathBuf,
1953 version: Option<i32>,
1954 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1955 cx: &mut ModelContext<Project>,
1956 ) -> Result<(), anyhow::Error> {
1957 let (worktree, relative_path) = self
1958 .find_local_worktree(&abs_path, cx)
1959 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1960 if !worktree.read(cx).is_visible() {
1961 return Ok(());
1962 }
1963
1964 let project_path = ProjectPath {
1965 worktree_id: worktree.read(cx).id(),
1966 path: relative_path.into(),
1967 };
1968
1969 for buffer in self.opened_buffers.values() {
1970 if let Some(buffer) = buffer.upgrade(cx) {
1971 if buffer
1972 .read(cx)
1973 .file()
1974 .map_or(false, |file| *file.path() == project_path.path)
1975 {
1976 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1977 break;
1978 }
1979 }
1980 }
1981 worktree.update(cx, |worktree, cx| {
1982 worktree
1983 .as_local_mut()
1984 .ok_or_else(|| anyhow!("not a local worktree"))?
1985 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1986 })?;
1987 cx.emit(Event::DiagnosticsUpdated(project_path));
1988 Ok(())
1989 }
1990
1991 fn update_buffer_diagnostics(
1992 &mut self,
1993 buffer: &ModelHandle<Buffer>,
1994 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1995 version: Option<i32>,
1996 cx: &mut ModelContext<Self>,
1997 ) -> Result<()> {
1998 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1999 Ordering::Equal
2000 .then_with(|| b.is_primary.cmp(&a.is_primary))
2001 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2002 .then_with(|| a.severity.cmp(&b.severity))
2003 .then_with(|| a.message.cmp(&b.message))
2004 }
2005
2006 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2007
2008 diagnostics.sort_unstable_by(|a, b| {
2009 Ordering::Equal
2010 .then_with(|| a.range.start.cmp(&b.range.start))
2011 .then_with(|| b.range.end.cmp(&a.range.end))
2012 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2013 });
2014
2015 let mut sanitized_diagnostics = Vec::new();
2016 let edits_since_save = Patch::new(
2017 snapshot
2018 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2019 .collect(),
2020 );
2021 for entry in diagnostics {
2022 let start;
2023 let end;
2024 if entry.diagnostic.is_disk_based {
2025 // Some diagnostics are based on files on disk instead of buffers'
2026 // current contents. Adjust these diagnostics' ranges to reflect
2027 // any unsaved edits.
2028 start = edits_since_save.old_to_new(entry.range.start);
2029 end = edits_since_save.old_to_new(entry.range.end);
2030 } else {
2031 start = entry.range.start;
2032 end = entry.range.end;
2033 }
2034
2035 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2036 ..snapshot.clip_point_utf16(end, Bias::Right);
2037
2038 // Expand empty ranges by one character
2039 if range.start == range.end {
2040 range.end.column += 1;
2041 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2042 if range.start == range.end && range.end.column > 0 {
2043 range.start.column -= 1;
2044 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2045 }
2046 }
2047
2048 sanitized_diagnostics.push(DiagnosticEntry {
2049 range,
2050 diagnostic: entry.diagnostic,
2051 });
2052 }
2053 drop(edits_since_save);
2054
2055 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2056 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2057 Ok(())
2058 }
2059
2060 pub fn reload_buffers(
2061 &self,
2062 buffers: HashSet<ModelHandle<Buffer>>,
2063 push_to_history: bool,
2064 cx: &mut ModelContext<Self>,
2065 ) -> Task<Result<ProjectTransaction>> {
2066 let mut local_buffers = Vec::new();
2067 let mut remote_buffers = None;
2068 for buffer_handle in buffers {
2069 let buffer = buffer_handle.read(cx);
2070 if buffer.is_dirty() {
2071 if let Some(file) = File::from_dyn(buffer.file()) {
2072 if file.is_local() {
2073 local_buffers.push(buffer_handle);
2074 } else {
2075 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2076 }
2077 }
2078 }
2079 }
2080
2081 let remote_buffers = self.remote_id().zip(remote_buffers);
2082 let client = self.client.clone();
2083
2084 cx.spawn(|this, mut cx| async move {
2085 let mut project_transaction = ProjectTransaction::default();
2086
2087 if let Some((project_id, remote_buffers)) = remote_buffers {
2088 let response = client
2089 .request(proto::ReloadBuffers {
2090 project_id,
2091 buffer_ids: remote_buffers
2092 .iter()
2093 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2094 .collect(),
2095 })
2096 .await?
2097 .transaction
2098 .ok_or_else(|| anyhow!("missing transaction"))?;
2099 project_transaction = this
2100 .update(&mut cx, |this, cx| {
2101 this.deserialize_project_transaction(response, push_to_history, cx)
2102 })
2103 .await?;
2104 }
2105
2106 for buffer in local_buffers {
2107 let transaction = buffer
2108 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2109 .await?;
2110 buffer.update(&mut cx, |buffer, cx| {
2111 if let Some(transaction) = transaction {
2112 if !push_to_history {
2113 buffer.forget_transaction(transaction.id);
2114 }
2115 project_transaction.0.insert(cx.handle(), transaction);
2116 }
2117 });
2118 }
2119
2120 Ok(project_transaction)
2121 })
2122 }
2123
2124 pub fn format(
2125 &self,
2126 buffers: HashSet<ModelHandle<Buffer>>,
2127 push_to_history: bool,
2128 cx: &mut ModelContext<Project>,
2129 ) -> Task<Result<ProjectTransaction>> {
2130 let mut local_buffers = Vec::new();
2131 let mut remote_buffers = None;
2132 for buffer_handle in buffers {
2133 let buffer = buffer_handle.read(cx);
2134 if let Some(file) = File::from_dyn(buffer.file()) {
2135 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2136 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2137 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2138 }
2139 } else {
2140 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2141 }
2142 } else {
2143 return Task::ready(Ok(Default::default()));
2144 }
2145 }
2146
2147 let remote_buffers = self.remote_id().zip(remote_buffers);
2148 let client = self.client.clone();
2149
2150 cx.spawn(|this, mut cx| async move {
2151 let mut project_transaction = ProjectTransaction::default();
2152
2153 if let Some((project_id, remote_buffers)) = remote_buffers {
2154 let response = client
2155 .request(proto::FormatBuffers {
2156 project_id,
2157 buffer_ids: remote_buffers
2158 .iter()
2159 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2160 .collect(),
2161 })
2162 .await?
2163 .transaction
2164 .ok_or_else(|| anyhow!("missing transaction"))?;
2165 project_transaction = this
2166 .update(&mut cx, |this, cx| {
2167 this.deserialize_project_transaction(response, push_to_history, cx)
2168 })
2169 .await?;
2170 }
2171
2172 for (buffer, buffer_abs_path, language_server) in local_buffers {
2173 let text_document = lsp::TextDocumentIdentifier::new(
2174 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2175 );
2176 let capabilities = &language_server.capabilities();
2177 let tab_size = cx.update(|cx| {
2178 let language_name = buffer.read(cx).language().map(|language| language.name());
2179 cx.global::<Settings>().tab_size(language_name.as_deref())
2180 });
2181 let lsp_edits = if capabilities
2182 .document_formatting_provider
2183 .as_ref()
2184 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2185 {
2186 language_server
2187 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2188 text_document,
2189 options: lsp::FormattingOptions {
2190 tab_size,
2191 insert_spaces: true,
2192 insert_final_newline: Some(true),
2193 ..Default::default()
2194 },
2195 work_done_progress_params: Default::default(),
2196 })
2197 .await?
2198 } else if capabilities
2199 .document_range_formatting_provider
2200 .as_ref()
2201 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2202 {
2203 let buffer_start = lsp::Position::new(0, 0);
2204 let buffer_end =
2205 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2206 language_server
2207 .request::<lsp::request::RangeFormatting>(
2208 lsp::DocumentRangeFormattingParams {
2209 text_document,
2210 range: lsp::Range::new(buffer_start, buffer_end),
2211 options: lsp::FormattingOptions {
2212 tab_size: 4,
2213 insert_spaces: true,
2214 insert_final_newline: Some(true),
2215 ..Default::default()
2216 },
2217 work_done_progress_params: Default::default(),
2218 },
2219 )
2220 .await?
2221 } else {
2222 continue;
2223 };
2224
2225 if let Some(lsp_edits) = lsp_edits {
2226 let edits = this
2227 .update(&mut cx, |this, cx| {
2228 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2229 })
2230 .await?;
2231 buffer.update(&mut cx, |buffer, cx| {
2232 buffer.finalize_last_transaction();
2233 buffer.start_transaction();
2234 for (range, text) in edits {
2235 buffer.edit([range], text, cx);
2236 }
2237 if buffer.end_transaction(cx).is_some() {
2238 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2239 if !push_to_history {
2240 buffer.forget_transaction(transaction.id);
2241 }
2242 project_transaction.0.insert(cx.handle(), transaction);
2243 }
2244 });
2245 }
2246 }
2247
2248 Ok(project_transaction)
2249 })
2250 }
2251
2252 pub fn definition<T: ToPointUtf16>(
2253 &self,
2254 buffer: &ModelHandle<Buffer>,
2255 position: T,
2256 cx: &mut ModelContext<Self>,
2257 ) -> Task<Result<Vec<Location>>> {
2258 let position = position.to_point_utf16(buffer.read(cx));
2259 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2260 }
2261
2262 pub fn references<T: ToPointUtf16>(
2263 &self,
2264 buffer: &ModelHandle<Buffer>,
2265 position: T,
2266 cx: &mut ModelContext<Self>,
2267 ) -> Task<Result<Vec<Location>>> {
2268 let position = position.to_point_utf16(buffer.read(cx));
2269 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2270 }
2271
2272 pub fn document_highlights<T: ToPointUtf16>(
2273 &self,
2274 buffer: &ModelHandle<Buffer>,
2275 position: T,
2276 cx: &mut ModelContext<Self>,
2277 ) -> Task<Result<Vec<DocumentHighlight>>> {
2278 let position = position.to_point_utf16(buffer.read(cx));
2279
2280 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2281 }
2282
2283 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2284 if self.is_local() {
2285 let mut language_servers = HashMap::default();
2286 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2287 if let Some(worktree) = self
2288 .worktree_for_id(*worktree_id, cx)
2289 .and_then(|worktree| worktree.read(cx).as_local())
2290 {
2291 language_servers
2292 .entry(Arc::as_ptr(language_server))
2293 .or_insert((
2294 lsp_adapter.clone(),
2295 language_server.clone(),
2296 *worktree_id,
2297 worktree.abs_path().clone(),
2298 ));
2299 }
2300 }
2301
2302 let mut requests = Vec::new();
2303 for (_, language_server, _, _) in language_servers.values() {
2304 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2305 lsp::WorkspaceSymbolParams {
2306 query: query.to_string(),
2307 ..Default::default()
2308 },
2309 ));
2310 }
2311
2312 cx.spawn_weak(|this, cx| async move {
2313 let responses = futures::future::try_join_all(requests).await?;
2314
2315 let mut symbols = Vec::new();
2316 if let Some(this) = this.upgrade(&cx) {
2317 this.read_with(&cx, |this, cx| {
2318 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2319 language_servers.into_values().zip(responses)
2320 {
2321 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2322 |lsp_symbol| {
2323 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2324 let mut worktree_id = source_worktree_id;
2325 let path;
2326 if let Some((worktree, rel_path)) =
2327 this.find_local_worktree(&abs_path, cx)
2328 {
2329 worktree_id = worktree.read(cx).id();
2330 path = rel_path;
2331 } else {
2332 path = relativize_path(&worktree_abs_path, &abs_path);
2333 }
2334
2335 let label = this
2336 .languages
2337 .select_language(&path)
2338 .and_then(|language| {
2339 language
2340 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2341 })
2342 .unwrap_or_else(|| {
2343 CodeLabel::plain(lsp_symbol.name.clone(), None)
2344 });
2345 let signature = this.symbol_signature(worktree_id, &path);
2346
2347 Some(Symbol {
2348 source_worktree_id,
2349 worktree_id,
2350 language_server_name: adapter.name(),
2351 name: lsp_symbol.name,
2352 kind: lsp_symbol.kind,
2353 label,
2354 path,
2355 range: range_from_lsp(lsp_symbol.location.range),
2356 signature,
2357 })
2358 },
2359 ));
2360 }
2361 })
2362 }
2363
2364 Ok(symbols)
2365 })
2366 } else if let Some(project_id) = self.remote_id() {
2367 let request = self.client.request(proto::GetProjectSymbols {
2368 project_id,
2369 query: query.to_string(),
2370 });
2371 cx.spawn_weak(|this, cx| async move {
2372 let response = request.await?;
2373 let mut symbols = Vec::new();
2374 if let Some(this) = this.upgrade(&cx) {
2375 this.read_with(&cx, |this, _| {
2376 symbols.extend(
2377 response
2378 .symbols
2379 .into_iter()
2380 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2381 );
2382 })
2383 }
2384 Ok(symbols)
2385 })
2386 } else {
2387 Task::ready(Ok(Default::default()))
2388 }
2389 }
2390
2391 pub fn open_buffer_for_symbol(
2392 &mut self,
2393 symbol: &Symbol,
2394 cx: &mut ModelContext<Self>,
2395 ) -> Task<Result<ModelHandle<Buffer>>> {
2396 if self.is_local() {
2397 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2398 symbol.source_worktree_id,
2399 symbol.language_server_name.clone(),
2400 )) {
2401 server.clone()
2402 } else {
2403 return Task::ready(Err(anyhow!(
2404 "language server for worktree and language not found"
2405 )));
2406 };
2407
2408 let worktree_abs_path = if let Some(worktree_abs_path) = self
2409 .worktree_for_id(symbol.worktree_id, cx)
2410 .and_then(|worktree| worktree.read(cx).as_local())
2411 .map(|local_worktree| local_worktree.abs_path())
2412 {
2413 worktree_abs_path
2414 } else {
2415 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2416 };
2417 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2418 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2419 uri
2420 } else {
2421 return Task::ready(Err(anyhow!("invalid symbol path")));
2422 };
2423
2424 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2425 } else if let Some(project_id) = self.remote_id() {
2426 let request = self.client.request(proto::OpenBufferForSymbol {
2427 project_id,
2428 symbol: Some(serialize_symbol(symbol)),
2429 });
2430 cx.spawn(|this, mut cx| async move {
2431 let response = request.await?;
2432 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2433 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2434 .await
2435 })
2436 } else {
2437 Task::ready(Err(anyhow!("project does not have a remote id")))
2438 }
2439 }
2440
2441 pub fn completions<T: ToPointUtf16>(
2442 &self,
2443 source_buffer_handle: &ModelHandle<Buffer>,
2444 position: T,
2445 cx: &mut ModelContext<Self>,
2446 ) -> Task<Result<Vec<Completion>>> {
2447 let source_buffer_handle = source_buffer_handle.clone();
2448 let source_buffer = source_buffer_handle.read(cx);
2449 let buffer_id = source_buffer.remote_id();
2450 let language = source_buffer.language().cloned();
2451 let worktree;
2452 let buffer_abs_path;
2453 if let Some(file) = File::from_dyn(source_buffer.file()) {
2454 worktree = file.worktree.clone();
2455 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2456 } else {
2457 return Task::ready(Ok(Default::default()));
2458 };
2459
2460 let position = position.to_point_utf16(source_buffer);
2461 let anchor = source_buffer.anchor_after(position);
2462
2463 if worktree.read(cx).as_local().is_some() {
2464 let buffer_abs_path = buffer_abs_path.unwrap();
2465 let (_, lang_server) =
2466 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2467 server.clone()
2468 } else {
2469 return Task::ready(Ok(Default::default()));
2470 };
2471
2472 cx.spawn(|_, cx| async move {
2473 let completions = lang_server
2474 .request::<lsp::request::Completion>(lsp::CompletionParams {
2475 text_document_position: lsp::TextDocumentPositionParams::new(
2476 lsp::TextDocumentIdentifier::new(
2477 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2478 ),
2479 point_to_lsp(position),
2480 ),
2481 context: Default::default(),
2482 work_done_progress_params: Default::default(),
2483 partial_result_params: Default::default(),
2484 })
2485 .await
2486 .context("lsp completion request failed")?;
2487
2488 let completions = if let Some(completions) = completions {
2489 match completions {
2490 lsp::CompletionResponse::Array(completions) => completions,
2491 lsp::CompletionResponse::List(list) => list.items,
2492 }
2493 } else {
2494 Default::default()
2495 };
2496
2497 source_buffer_handle.read_with(&cx, |this, _| {
2498 Ok(completions
2499 .into_iter()
2500 .filter_map(|lsp_completion| {
2501 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2502 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2503 (range_from_lsp(edit.range), edit.new_text.clone())
2504 }
2505 None => {
2506 let clipped_position =
2507 this.clip_point_utf16(position, Bias::Left);
2508 if position != clipped_position {
2509 log::info!("completion out of expected range");
2510 return None;
2511 }
2512 (
2513 this.common_prefix_at(
2514 clipped_position,
2515 &lsp_completion.label,
2516 ),
2517 lsp_completion.label.clone(),
2518 )
2519 }
2520 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2521 log::info!("unsupported insert/replace completion");
2522 return None;
2523 }
2524 };
2525
2526 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2527 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2528 if clipped_start == old_range.start && clipped_end == old_range.end {
2529 Some(Completion {
2530 old_range: this.anchor_before(old_range.start)
2531 ..this.anchor_after(old_range.end),
2532 new_text,
2533 label: language
2534 .as_ref()
2535 .and_then(|l| l.label_for_completion(&lsp_completion))
2536 .unwrap_or_else(|| {
2537 CodeLabel::plain(
2538 lsp_completion.label.clone(),
2539 lsp_completion.filter_text.as_deref(),
2540 )
2541 }),
2542 lsp_completion,
2543 })
2544 } else {
2545 log::info!("completion out of expected range");
2546 None
2547 }
2548 })
2549 .collect())
2550 })
2551 })
2552 } else if let Some(project_id) = self.remote_id() {
2553 let rpc = self.client.clone();
2554 let message = proto::GetCompletions {
2555 project_id,
2556 buffer_id,
2557 position: Some(language::proto::serialize_anchor(&anchor)),
2558 version: serialize_version(&source_buffer.version()),
2559 };
2560 cx.spawn_weak(|_, mut cx| async move {
2561 let response = rpc.request(message).await?;
2562
2563 source_buffer_handle
2564 .update(&mut cx, |buffer, _| {
2565 buffer.wait_for_version(deserialize_version(response.version))
2566 })
2567 .await;
2568
2569 response
2570 .completions
2571 .into_iter()
2572 .map(|completion| {
2573 language::proto::deserialize_completion(completion, language.as_ref())
2574 })
2575 .collect()
2576 })
2577 } else {
2578 Task::ready(Ok(Default::default()))
2579 }
2580 }
2581
2582 pub fn apply_additional_edits_for_completion(
2583 &self,
2584 buffer_handle: ModelHandle<Buffer>,
2585 completion: Completion,
2586 push_to_history: bool,
2587 cx: &mut ModelContext<Self>,
2588 ) -> Task<Result<Option<Transaction>>> {
2589 let buffer = buffer_handle.read(cx);
2590 let buffer_id = buffer.remote_id();
2591
2592 if self.is_local() {
2593 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2594 {
2595 server.clone()
2596 } else {
2597 return Task::ready(Ok(Default::default()));
2598 };
2599
2600 cx.spawn(|this, mut cx| async move {
2601 let resolved_completion = lang_server
2602 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2603 .await?;
2604 if let Some(edits) = resolved_completion.additional_text_edits {
2605 let edits = this
2606 .update(&mut cx, |this, cx| {
2607 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2608 })
2609 .await?;
2610 buffer_handle.update(&mut cx, |buffer, cx| {
2611 buffer.finalize_last_transaction();
2612 buffer.start_transaction();
2613 for (range, text) in edits {
2614 buffer.edit([range], text, cx);
2615 }
2616 let transaction = if buffer.end_transaction(cx).is_some() {
2617 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2618 if !push_to_history {
2619 buffer.forget_transaction(transaction.id);
2620 }
2621 Some(transaction)
2622 } else {
2623 None
2624 };
2625 Ok(transaction)
2626 })
2627 } else {
2628 Ok(None)
2629 }
2630 })
2631 } else if let Some(project_id) = self.remote_id() {
2632 let client = self.client.clone();
2633 cx.spawn(|_, mut cx| async move {
2634 let response = client
2635 .request(proto::ApplyCompletionAdditionalEdits {
2636 project_id,
2637 buffer_id,
2638 completion: Some(language::proto::serialize_completion(&completion)),
2639 })
2640 .await?;
2641
2642 if let Some(transaction) = response.transaction {
2643 let transaction = language::proto::deserialize_transaction(transaction)?;
2644 buffer_handle
2645 .update(&mut cx, |buffer, _| {
2646 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2647 })
2648 .await;
2649 if push_to_history {
2650 buffer_handle.update(&mut cx, |buffer, _| {
2651 buffer.push_transaction(transaction.clone(), Instant::now());
2652 });
2653 }
2654 Ok(Some(transaction))
2655 } else {
2656 Ok(None)
2657 }
2658 })
2659 } else {
2660 Task::ready(Err(anyhow!("project does not have a remote id")))
2661 }
2662 }
2663
2664 pub fn code_actions<T: Clone + ToOffset>(
2665 &self,
2666 buffer_handle: &ModelHandle<Buffer>,
2667 range: Range<T>,
2668 cx: &mut ModelContext<Self>,
2669 ) -> Task<Result<Vec<CodeAction>>> {
2670 let buffer_handle = buffer_handle.clone();
2671 let buffer = buffer_handle.read(cx);
2672 let snapshot = buffer.snapshot();
2673 let relevant_diagnostics = snapshot
2674 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2675 .map(|entry| entry.to_lsp_diagnostic_stub())
2676 .collect();
2677 let buffer_id = buffer.remote_id();
2678 let worktree;
2679 let buffer_abs_path;
2680 if let Some(file) = File::from_dyn(buffer.file()) {
2681 worktree = file.worktree.clone();
2682 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2683 } else {
2684 return Task::ready(Ok(Default::default()));
2685 };
2686 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2687
2688 if worktree.read(cx).as_local().is_some() {
2689 let buffer_abs_path = buffer_abs_path.unwrap();
2690 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2691 {
2692 server.clone()
2693 } else {
2694 return Task::ready(Ok(Default::default()));
2695 };
2696
2697 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2698 cx.foreground().spawn(async move {
2699 if !lang_server.capabilities().code_action_provider.is_some() {
2700 return Ok(Default::default());
2701 }
2702
2703 Ok(lang_server
2704 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2705 text_document: lsp::TextDocumentIdentifier::new(
2706 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2707 ),
2708 range: lsp_range,
2709 work_done_progress_params: Default::default(),
2710 partial_result_params: Default::default(),
2711 context: lsp::CodeActionContext {
2712 diagnostics: relevant_diagnostics,
2713 only: Some(vec![
2714 lsp::CodeActionKind::QUICKFIX,
2715 lsp::CodeActionKind::REFACTOR,
2716 lsp::CodeActionKind::REFACTOR_EXTRACT,
2717 lsp::CodeActionKind::SOURCE,
2718 ]),
2719 },
2720 })
2721 .await?
2722 .unwrap_or_default()
2723 .into_iter()
2724 .filter_map(|entry| {
2725 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2726 Some(CodeAction {
2727 range: range.clone(),
2728 lsp_action,
2729 })
2730 } else {
2731 None
2732 }
2733 })
2734 .collect())
2735 })
2736 } else if let Some(project_id) = self.remote_id() {
2737 let rpc = self.client.clone();
2738 let version = buffer.version();
2739 cx.spawn_weak(|_, mut cx| async move {
2740 let response = rpc
2741 .request(proto::GetCodeActions {
2742 project_id,
2743 buffer_id,
2744 start: Some(language::proto::serialize_anchor(&range.start)),
2745 end: Some(language::proto::serialize_anchor(&range.end)),
2746 version: serialize_version(&version),
2747 })
2748 .await?;
2749
2750 buffer_handle
2751 .update(&mut cx, |buffer, _| {
2752 buffer.wait_for_version(deserialize_version(response.version))
2753 })
2754 .await;
2755
2756 response
2757 .actions
2758 .into_iter()
2759 .map(language::proto::deserialize_code_action)
2760 .collect()
2761 })
2762 } else {
2763 Task::ready(Ok(Default::default()))
2764 }
2765 }
2766
2767 pub fn apply_code_action(
2768 &self,
2769 buffer_handle: ModelHandle<Buffer>,
2770 mut action: CodeAction,
2771 push_to_history: bool,
2772 cx: &mut ModelContext<Self>,
2773 ) -> Task<Result<ProjectTransaction>> {
2774 if self.is_local() {
2775 let buffer = buffer_handle.read(cx);
2776 let (lsp_adapter, lang_server) =
2777 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2778 server.clone()
2779 } else {
2780 return Task::ready(Ok(Default::default()));
2781 };
2782 let range = action.range.to_point_utf16(buffer);
2783
2784 cx.spawn(|this, mut cx| async move {
2785 if let Some(lsp_range) = action
2786 .lsp_action
2787 .data
2788 .as_mut()
2789 .and_then(|d| d.get_mut("codeActionParams"))
2790 .and_then(|d| d.get_mut("range"))
2791 {
2792 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2793 action.lsp_action = lang_server
2794 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2795 .await?;
2796 } else {
2797 let actions = this
2798 .update(&mut cx, |this, cx| {
2799 this.code_actions(&buffer_handle, action.range, cx)
2800 })
2801 .await?;
2802 action.lsp_action = actions
2803 .into_iter()
2804 .find(|a| a.lsp_action.title == action.lsp_action.title)
2805 .ok_or_else(|| anyhow!("code action is outdated"))?
2806 .lsp_action;
2807 }
2808
2809 if let Some(edit) = action.lsp_action.edit {
2810 Self::deserialize_workspace_edit(
2811 this,
2812 edit,
2813 push_to_history,
2814 lsp_adapter,
2815 lang_server,
2816 &mut cx,
2817 )
2818 .await
2819 } else if let Some(command) = action.lsp_action.command {
2820 this.update(&mut cx, |this, _| {
2821 this.last_workspace_edits_by_language_server
2822 .remove(&lang_server.server_id());
2823 });
2824 lang_server
2825 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2826 command: command.command,
2827 arguments: command.arguments.unwrap_or_default(),
2828 ..Default::default()
2829 })
2830 .await?;
2831 Ok(this.update(&mut cx, |this, _| {
2832 this.last_workspace_edits_by_language_server
2833 .remove(&lang_server.server_id())
2834 .unwrap_or_default()
2835 }))
2836 } else {
2837 Ok(ProjectTransaction::default())
2838 }
2839 })
2840 } else if let Some(project_id) = self.remote_id() {
2841 let client = self.client.clone();
2842 let request = proto::ApplyCodeAction {
2843 project_id,
2844 buffer_id: buffer_handle.read(cx).remote_id(),
2845 action: Some(language::proto::serialize_code_action(&action)),
2846 };
2847 cx.spawn(|this, mut cx| async move {
2848 let response = client
2849 .request(request)
2850 .await?
2851 .transaction
2852 .ok_or_else(|| anyhow!("missing transaction"))?;
2853 this.update(&mut cx, |this, cx| {
2854 this.deserialize_project_transaction(response, push_to_history, cx)
2855 })
2856 .await
2857 })
2858 } else {
2859 Task::ready(Err(anyhow!("project does not have a remote id")))
2860 }
2861 }
2862
2863 async fn deserialize_workspace_edit(
2864 this: ModelHandle<Self>,
2865 edit: lsp::WorkspaceEdit,
2866 push_to_history: bool,
2867 lsp_adapter: Arc<dyn LspAdapter>,
2868 language_server: Arc<LanguageServer>,
2869 cx: &mut AsyncAppContext,
2870 ) -> Result<ProjectTransaction> {
2871 let fs = this.read_with(cx, |this, _| this.fs.clone());
2872 let mut operations = Vec::new();
2873 if let Some(document_changes) = edit.document_changes {
2874 match document_changes {
2875 lsp::DocumentChanges::Edits(edits) => {
2876 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2877 }
2878 lsp::DocumentChanges::Operations(ops) => operations = ops,
2879 }
2880 } else if let Some(changes) = edit.changes {
2881 operations.extend(changes.into_iter().map(|(uri, edits)| {
2882 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2883 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2884 uri,
2885 version: None,
2886 },
2887 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2888 })
2889 }));
2890 }
2891
2892 let mut project_transaction = ProjectTransaction::default();
2893 for operation in operations {
2894 match operation {
2895 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2896 let abs_path = op
2897 .uri
2898 .to_file_path()
2899 .map_err(|_| anyhow!("can't convert URI to path"))?;
2900
2901 if let Some(parent_path) = abs_path.parent() {
2902 fs.create_dir(parent_path).await?;
2903 }
2904 if abs_path.ends_with("/") {
2905 fs.create_dir(&abs_path).await?;
2906 } else {
2907 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2908 .await?;
2909 }
2910 }
2911 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2912 let source_abs_path = op
2913 .old_uri
2914 .to_file_path()
2915 .map_err(|_| anyhow!("can't convert URI to path"))?;
2916 let target_abs_path = op
2917 .new_uri
2918 .to_file_path()
2919 .map_err(|_| anyhow!("can't convert URI to path"))?;
2920 fs.rename(
2921 &source_abs_path,
2922 &target_abs_path,
2923 op.options.map(Into::into).unwrap_or_default(),
2924 )
2925 .await?;
2926 }
2927 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2928 let abs_path = op
2929 .uri
2930 .to_file_path()
2931 .map_err(|_| anyhow!("can't convert URI to path"))?;
2932 let options = op.options.map(Into::into).unwrap_or_default();
2933 if abs_path.ends_with("/") {
2934 fs.remove_dir(&abs_path, options).await?;
2935 } else {
2936 fs.remove_file(&abs_path, options).await?;
2937 }
2938 }
2939 lsp::DocumentChangeOperation::Edit(op) => {
2940 let buffer_to_edit = this
2941 .update(cx, |this, cx| {
2942 this.open_local_buffer_via_lsp(
2943 op.text_document.uri,
2944 lsp_adapter.clone(),
2945 language_server.clone(),
2946 cx,
2947 )
2948 })
2949 .await?;
2950
2951 let edits = this
2952 .update(cx, |this, cx| {
2953 let edits = op.edits.into_iter().map(|edit| match edit {
2954 lsp::OneOf::Left(edit) => edit,
2955 lsp::OneOf::Right(edit) => edit.text_edit,
2956 });
2957 this.edits_from_lsp(
2958 &buffer_to_edit,
2959 edits,
2960 op.text_document.version,
2961 cx,
2962 )
2963 })
2964 .await?;
2965
2966 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2967 buffer.finalize_last_transaction();
2968 buffer.start_transaction();
2969 for (range, text) in edits {
2970 buffer.edit([range], text, cx);
2971 }
2972 let transaction = if buffer.end_transaction(cx).is_some() {
2973 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2974 if !push_to_history {
2975 buffer.forget_transaction(transaction.id);
2976 }
2977 Some(transaction)
2978 } else {
2979 None
2980 };
2981
2982 transaction
2983 });
2984 if let Some(transaction) = transaction {
2985 project_transaction.0.insert(buffer_to_edit, transaction);
2986 }
2987 }
2988 }
2989 }
2990
2991 Ok(project_transaction)
2992 }
2993
2994 pub fn prepare_rename<T: ToPointUtf16>(
2995 &self,
2996 buffer: ModelHandle<Buffer>,
2997 position: T,
2998 cx: &mut ModelContext<Self>,
2999 ) -> Task<Result<Option<Range<Anchor>>>> {
3000 let position = position.to_point_utf16(buffer.read(cx));
3001 self.request_lsp(buffer, PrepareRename { position }, cx)
3002 }
3003
3004 pub fn perform_rename<T: ToPointUtf16>(
3005 &self,
3006 buffer: ModelHandle<Buffer>,
3007 position: T,
3008 new_name: String,
3009 push_to_history: bool,
3010 cx: &mut ModelContext<Self>,
3011 ) -> Task<Result<ProjectTransaction>> {
3012 let position = position.to_point_utf16(buffer.read(cx));
3013 self.request_lsp(
3014 buffer,
3015 PerformRename {
3016 position,
3017 new_name,
3018 push_to_history,
3019 },
3020 cx,
3021 )
3022 }
3023
3024 pub fn search(
3025 &self,
3026 query: SearchQuery,
3027 cx: &mut ModelContext<Self>,
3028 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3029 if self.is_local() {
3030 let snapshots = self
3031 .visible_worktrees(cx)
3032 .filter_map(|tree| {
3033 let tree = tree.read(cx).as_local()?;
3034 Some(tree.snapshot())
3035 })
3036 .collect::<Vec<_>>();
3037
3038 let background = cx.background().clone();
3039 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3040 if path_count == 0 {
3041 return Task::ready(Ok(Default::default()));
3042 }
3043 let workers = background.num_cpus().min(path_count);
3044 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3045 cx.background()
3046 .spawn({
3047 let fs = self.fs.clone();
3048 let background = cx.background().clone();
3049 let query = query.clone();
3050 async move {
3051 let fs = &fs;
3052 let query = &query;
3053 let matching_paths_tx = &matching_paths_tx;
3054 let paths_per_worker = (path_count + workers - 1) / workers;
3055 let snapshots = &snapshots;
3056 background
3057 .scoped(|scope| {
3058 for worker_ix in 0..workers {
3059 let worker_start_ix = worker_ix * paths_per_worker;
3060 let worker_end_ix = worker_start_ix + paths_per_worker;
3061 scope.spawn(async move {
3062 let mut snapshot_start_ix = 0;
3063 let mut abs_path = PathBuf::new();
3064 for snapshot in snapshots {
3065 let snapshot_end_ix =
3066 snapshot_start_ix + snapshot.visible_file_count();
3067 if worker_end_ix <= snapshot_start_ix {
3068 break;
3069 } else if worker_start_ix > snapshot_end_ix {
3070 snapshot_start_ix = snapshot_end_ix;
3071 continue;
3072 } else {
3073 let start_in_snapshot = worker_start_ix
3074 .saturating_sub(snapshot_start_ix);
3075 let end_in_snapshot =
3076 cmp::min(worker_end_ix, snapshot_end_ix)
3077 - snapshot_start_ix;
3078
3079 for entry in snapshot
3080 .files(false, start_in_snapshot)
3081 .take(end_in_snapshot - start_in_snapshot)
3082 {
3083 if matching_paths_tx.is_closed() {
3084 break;
3085 }
3086
3087 abs_path.clear();
3088 abs_path.push(&snapshot.abs_path());
3089 abs_path.push(&entry.path);
3090 let matches = if let Some(file) =
3091 fs.open_sync(&abs_path).await.log_err()
3092 {
3093 query.detect(file).unwrap_or(false)
3094 } else {
3095 false
3096 };
3097
3098 if matches {
3099 let project_path =
3100 (snapshot.id(), entry.path.clone());
3101 if matching_paths_tx
3102 .send(project_path)
3103 .await
3104 .is_err()
3105 {
3106 break;
3107 }
3108 }
3109 }
3110
3111 snapshot_start_ix = snapshot_end_ix;
3112 }
3113 }
3114 });
3115 }
3116 })
3117 .await;
3118 }
3119 })
3120 .detach();
3121
3122 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3123 let open_buffers = self
3124 .opened_buffers
3125 .values()
3126 .filter_map(|b| b.upgrade(cx))
3127 .collect::<HashSet<_>>();
3128 cx.spawn(|this, cx| async move {
3129 for buffer in &open_buffers {
3130 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3131 buffers_tx.send((buffer.clone(), snapshot)).await?;
3132 }
3133
3134 let open_buffers = Rc::new(RefCell::new(open_buffers));
3135 while let Some(project_path) = matching_paths_rx.next().await {
3136 if buffers_tx.is_closed() {
3137 break;
3138 }
3139
3140 let this = this.clone();
3141 let open_buffers = open_buffers.clone();
3142 let buffers_tx = buffers_tx.clone();
3143 cx.spawn(|mut cx| async move {
3144 if let Some(buffer) = this
3145 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3146 .await
3147 .log_err()
3148 {
3149 if open_buffers.borrow_mut().insert(buffer.clone()) {
3150 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3151 buffers_tx.send((buffer, snapshot)).await?;
3152 }
3153 }
3154
3155 Ok::<_, anyhow::Error>(())
3156 })
3157 .detach();
3158 }
3159
3160 Ok::<_, anyhow::Error>(())
3161 })
3162 .detach_and_log_err(cx);
3163
3164 let background = cx.background().clone();
3165 cx.background().spawn(async move {
3166 let query = &query;
3167 let mut matched_buffers = Vec::new();
3168 for _ in 0..workers {
3169 matched_buffers.push(HashMap::default());
3170 }
3171 background
3172 .scoped(|scope| {
3173 for worker_matched_buffers in matched_buffers.iter_mut() {
3174 let mut buffers_rx = buffers_rx.clone();
3175 scope.spawn(async move {
3176 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3177 let buffer_matches = query
3178 .search(snapshot.as_rope())
3179 .await
3180 .iter()
3181 .map(|range| {
3182 snapshot.anchor_before(range.start)
3183 ..snapshot.anchor_after(range.end)
3184 })
3185 .collect::<Vec<_>>();
3186 if !buffer_matches.is_empty() {
3187 worker_matched_buffers
3188 .insert(buffer.clone(), buffer_matches);
3189 }
3190 }
3191 });
3192 }
3193 })
3194 .await;
3195 Ok(matched_buffers.into_iter().flatten().collect())
3196 })
3197 } else if let Some(project_id) = self.remote_id() {
3198 let request = self.client.request(query.to_proto(project_id));
3199 cx.spawn(|this, mut cx| async move {
3200 let response = request.await?;
3201 let mut result = HashMap::default();
3202 for location in response.locations {
3203 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3204 let target_buffer = this
3205 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3206 .await?;
3207 let start = location
3208 .start
3209 .and_then(deserialize_anchor)
3210 .ok_or_else(|| anyhow!("missing target start"))?;
3211 let end = location
3212 .end
3213 .and_then(deserialize_anchor)
3214 .ok_or_else(|| anyhow!("missing target end"))?;
3215 result
3216 .entry(target_buffer)
3217 .or_insert(Vec::new())
3218 .push(start..end)
3219 }
3220 Ok(result)
3221 })
3222 } else {
3223 Task::ready(Ok(Default::default()))
3224 }
3225 }
3226
3227 fn request_lsp<R: LspCommand>(
3228 &self,
3229 buffer_handle: ModelHandle<Buffer>,
3230 request: R,
3231 cx: &mut ModelContext<Self>,
3232 ) -> Task<Result<R::Response>>
3233 where
3234 <R::LspRequest as lsp::request::Request>::Result: Send,
3235 {
3236 let buffer = buffer_handle.read(cx);
3237 if self.is_local() {
3238 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3239 if let Some((file, (_, language_server))) =
3240 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3241 {
3242 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3243 return cx.spawn(|this, cx| async move {
3244 if !request.check_capabilities(&language_server.capabilities()) {
3245 return Ok(Default::default());
3246 }
3247
3248 let response = language_server
3249 .request::<R::LspRequest>(lsp_params)
3250 .await
3251 .context("lsp request failed")?;
3252 request
3253 .response_from_lsp(response, this, buffer_handle, cx)
3254 .await
3255 });
3256 }
3257 } else if let Some(project_id) = self.remote_id() {
3258 let rpc = self.client.clone();
3259 let message = request.to_proto(project_id, buffer);
3260 return cx.spawn(|this, cx| async move {
3261 let response = rpc.request(message).await?;
3262 request
3263 .response_from_proto(response, this, buffer_handle, cx)
3264 .await
3265 });
3266 }
3267 Task::ready(Ok(Default::default()))
3268 }
3269
3270 pub fn find_or_create_local_worktree(
3271 &mut self,
3272 abs_path: impl AsRef<Path>,
3273 visible: bool,
3274 cx: &mut ModelContext<Self>,
3275 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3276 let abs_path = abs_path.as_ref();
3277 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3278 Task::ready(Ok((tree.clone(), relative_path.into())))
3279 } else {
3280 let worktree = self.create_local_worktree(abs_path, visible, cx);
3281 cx.foreground()
3282 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3283 }
3284 }
3285
3286 pub fn find_local_worktree(
3287 &self,
3288 abs_path: &Path,
3289 cx: &AppContext,
3290 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3291 for tree in self.worktrees(cx) {
3292 if let Some(relative_path) = tree
3293 .read(cx)
3294 .as_local()
3295 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3296 {
3297 return Some((tree.clone(), relative_path.into()));
3298 }
3299 }
3300 None
3301 }
3302
3303 pub fn is_shared(&self) -> bool {
3304 match &self.client_state {
3305 ProjectClientState::Local { is_shared, .. } => *is_shared,
3306 ProjectClientState::Remote { .. } => false,
3307 }
3308 }
3309
3310 fn create_local_worktree(
3311 &mut self,
3312 abs_path: impl AsRef<Path>,
3313 visible: bool,
3314 cx: &mut ModelContext<Self>,
3315 ) -> Task<Result<ModelHandle<Worktree>>> {
3316 let fs = self.fs.clone();
3317 let client = self.client.clone();
3318 let next_entry_id = self.next_entry_id.clone();
3319 let path: Arc<Path> = abs_path.as_ref().into();
3320 let task = self
3321 .loading_local_worktrees
3322 .entry(path.clone())
3323 .or_insert_with(|| {
3324 cx.spawn(|project, mut cx| {
3325 async move {
3326 let worktree = Worktree::local(
3327 client.clone(),
3328 path.clone(),
3329 visible,
3330 fs,
3331 next_entry_id,
3332 &mut cx,
3333 )
3334 .await;
3335 project.update(&mut cx, |project, _| {
3336 project.loading_local_worktrees.remove(&path);
3337 });
3338 let worktree = worktree?;
3339
3340 let (remote_project_id, is_shared) =
3341 project.update(&mut cx, |project, cx| {
3342 project.add_worktree(&worktree, cx);
3343 (project.remote_id(), project.is_shared())
3344 });
3345
3346 if let Some(project_id) = remote_project_id {
3347 if is_shared {
3348 worktree
3349 .update(&mut cx, |worktree, cx| {
3350 worktree.as_local_mut().unwrap().share(project_id, cx)
3351 })
3352 .await?;
3353 } else {
3354 worktree
3355 .update(&mut cx, |worktree, cx| {
3356 worktree.as_local_mut().unwrap().register(project_id, cx)
3357 })
3358 .await?;
3359 }
3360 }
3361
3362 Ok(worktree)
3363 }
3364 .map_err(|err| Arc::new(err))
3365 })
3366 .shared()
3367 })
3368 .clone();
3369 cx.foreground().spawn(async move {
3370 match task.await {
3371 Ok(worktree) => Ok(worktree),
3372 Err(err) => Err(anyhow!("{}", err)),
3373 }
3374 })
3375 }
3376
3377 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3378 self.worktrees.retain(|worktree| {
3379 worktree
3380 .upgrade(cx)
3381 .map_or(false, |w| w.read(cx).id() != id)
3382 });
3383 cx.notify();
3384 }
3385
3386 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3387 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3388 if worktree.read(cx).is_local() {
3389 cx.subscribe(&worktree, |this, worktree, _, cx| {
3390 this.update_local_worktree_buffers(worktree, cx);
3391 })
3392 .detach();
3393 }
3394
3395 let push_strong_handle = {
3396 let worktree = worktree.read(cx);
3397 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3398 };
3399 if push_strong_handle {
3400 self.worktrees
3401 .push(WorktreeHandle::Strong(worktree.clone()));
3402 } else {
3403 cx.observe_release(&worktree, |this, _, cx| {
3404 this.worktrees
3405 .retain(|worktree| worktree.upgrade(cx).is_some());
3406 cx.notify();
3407 })
3408 .detach();
3409 self.worktrees
3410 .push(WorktreeHandle::Weak(worktree.downgrade()));
3411 }
3412 cx.notify();
3413 }
3414
3415 fn update_local_worktree_buffers(
3416 &mut self,
3417 worktree_handle: ModelHandle<Worktree>,
3418 cx: &mut ModelContext<Self>,
3419 ) {
3420 let snapshot = worktree_handle.read(cx).snapshot();
3421 let mut buffers_to_delete = Vec::new();
3422 let mut renamed_buffers = Vec::new();
3423 for (buffer_id, buffer) in &self.opened_buffers {
3424 if let Some(buffer) = buffer.upgrade(cx) {
3425 buffer.update(cx, |buffer, cx| {
3426 if let Some(old_file) = File::from_dyn(buffer.file()) {
3427 if old_file.worktree != worktree_handle {
3428 return;
3429 }
3430
3431 let new_file = if let Some(entry) = old_file
3432 .entry_id
3433 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3434 {
3435 File {
3436 is_local: true,
3437 entry_id: Some(entry.id),
3438 mtime: entry.mtime,
3439 path: entry.path.clone(),
3440 worktree: worktree_handle.clone(),
3441 }
3442 } else if let Some(entry) =
3443 snapshot.entry_for_path(old_file.path().as_ref())
3444 {
3445 File {
3446 is_local: true,
3447 entry_id: Some(entry.id),
3448 mtime: entry.mtime,
3449 path: entry.path.clone(),
3450 worktree: worktree_handle.clone(),
3451 }
3452 } else {
3453 File {
3454 is_local: true,
3455 entry_id: None,
3456 path: old_file.path().clone(),
3457 mtime: old_file.mtime(),
3458 worktree: worktree_handle.clone(),
3459 }
3460 };
3461
3462 let old_path = old_file.abs_path(cx);
3463 if new_file.abs_path(cx) != old_path {
3464 renamed_buffers.push((cx.handle(), old_path));
3465 }
3466
3467 if let Some(project_id) = self.remote_id() {
3468 self.client
3469 .send(proto::UpdateBufferFile {
3470 project_id,
3471 buffer_id: *buffer_id as u64,
3472 file: Some(new_file.to_proto()),
3473 })
3474 .log_err();
3475 }
3476 buffer.file_updated(Box::new(new_file), cx).detach();
3477 }
3478 });
3479 } else {
3480 buffers_to_delete.push(*buffer_id);
3481 }
3482 }
3483
3484 for buffer_id in buffers_to_delete {
3485 self.opened_buffers.remove(&buffer_id);
3486 }
3487
3488 for (buffer, old_path) in renamed_buffers {
3489 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3490 self.assign_language_to_buffer(&buffer, cx);
3491 self.register_buffer_with_language_server(&buffer, cx);
3492 }
3493 }
3494
3495 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3496 let new_active_entry = entry.and_then(|project_path| {
3497 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3498 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3499 Some(entry.id)
3500 });
3501 if new_active_entry != self.active_entry {
3502 self.active_entry = new_active_entry;
3503 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3504 }
3505 }
3506
3507 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3508 self.language_servers_with_diagnostics_running > 0
3509 }
3510
3511 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3512 let mut summary = DiagnosticSummary::default();
3513 for (_, path_summary) in self.diagnostic_summaries(cx) {
3514 summary.error_count += path_summary.error_count;
3515 summary.warning_count += path_summary.warning_count;
3516 summary.info_count += path_summary.info_count;
3517 summary.hint_count += path_summary.hint_count;
3518 }
3519 summary
3520 }
3521
3522 pub fn diagnostic_summaries<'a>(
3523 &'a self,
3524 cx: &'a AppContext,
3525 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3526 self.worktrees(cx).flat_map(move |worktree| {
3527 let worktree = worktree.read(cx);
3528 let worktree_id = worktree.id();
3529 worktree
3530 .diagnostic_summaries()
3531 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3532 })
3533 }
3534
3535 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3536 self.language_servers_with_diagnostics_running += 1;
3537 if self.language_servers_with_diagnostics_running == 1 {
3538 cx.emit(Event::DiskBasedDiagnosticsStarted);
3539 }
3540 }
3541
3542 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3543 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3544 self.language_servers_with_diagnostics_running -= 1;
3545 if self.language_servers_with_diagnostics_running == 0 {
3546 cx.emit(Event::DiskBasedDiagnosticsFinished);
3547 }
3548 }
3549
3550 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3551 self.active_entry
3552 }
3553
3554 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3555 self.worktree_for_id(path.worktree_id, cx)?
3556 .read(cx)
3557 .entry_for_path(&path.path)
3558 .map(|entry| entry.id)
3559 }
3560
3561 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3562 let worktree = self.worktree_for_entry(entry_id, cx)?;
3563 let worktree = worktree.read(cx);
3564 let worktree_id = worktree.id();
3565 let path = worktree.entry_for_id(entry_id)?.path.clone();
3566 Some(ProjectPath { worktree_id, path })
3567 }
3568
3569 // RPC message handlers
3570
3571 async fn handle_unshare_project(
3572 this: ModelHandle<Self>,
3573 _: TypedEnvelope<proto::UnshareProject>,
3574 _: Arc<Client>,
3575 mut cx: AsyncAppContext,
3576 ) -> Result<()> {
3577 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3578 Ok(())
3579 }
3580
3581 async fn handle_add_collaborator(
3582 this: ModelHandle<Self>,
3583 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3584 _: Arc<Client>,
3585 mut cx: AsyncAppContext,
3586 ) -> Result<()> {
3587 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3588 let collaborator = envelope
3589 .payload
3590 .collaborator
3591 .take()
3592 .ok_or_else(|| anyhow!("empty collaborator"))?;
3593
3594 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3595 this.update(&mut cx, |this, cx| {
3596 this.collaborators
3597 .insert(collaborator.peer_id, collaborator);
3598 cx.notify();
3599 });
3600
3601 Ok(())
3602 }
3603
3604 async fn handle_remove_collaborator(
3605 this: ModelHandle<Self>,
3606 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3607 _: Arc<Client>,
3608 mut cx: AsyncAppContext,
3609 ) -> Result<()> {
3610 this.update(&mut cx, |this, cx| {
3611 let peer_id = PeerId(envelope.payload.peer_id);
3612 let replica_id = this
3613 .collaborators
3614 .remove(&peer_id)
3615 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3616 .replica_id;
3617 for (_, buffer) in &this.opened_buffers {
3618 if let Some(buffer) = buffer.upgrade(cx) {
3619 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3620 }
3621 }
3622 cx.emit(Event::CollaboratorLeft(peer_id));
3623 cx.notify();
3624 Ok(())
3625 })
3626 }
3627
3628 async fn handle_register_worktree(
3629 this: ModelHandle<Self>,
3630 envelope: TypedEnvelope<proto::RegisterWorktree>,
3631 client: Arc<Client>,
3632 mut cx: AsyncAppContext,
3633 ) -> Result<()> {
3634 this.update(&mut cx, |this, cx| {
3635 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3636 let replica_id = this.replica_id();
3637 let worktree = proto::Worktree {
3638 id: envelope.payload.worktree_id,
3639 root_name: envelope.payload.root_name,
3640 entries: Default::default(),
3641 diagnostic_summaries: Default::default(),
3642 visible: envelope.payload.visible,
3643 };
3644 let (worktree, load_task) =
3645 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3646 this.add_worktree(&worktree, cx);
3647 load_task.detach();
3648 Ok(())
3649 })
3650 }
3651
3652 async fn handle_unregister_worktree(
3653 this: ModelHandle<Self>,
3654 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3655 _: Arc<Client>,
3656 mut cx: AsyncAppContext,
3657 ) -> Result<()> {
3658 this.update(&mut cx, |this, cx| {
3659 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3660 this.remove_worktree(worktree_id, cx);
3661 Ok(())
3662 })
3663 }
3664
3665 async fn handle_update_worktree(
3666 this: ModelHandle<Self>,
3667 envelope: TypedEnvelope<proto::UpdateWorktree>,
3668 _: Arc<Client>,
3669 mut cx: AsyncAppContext,
3670 ) -> Result<()> {
3671 this.update(&mut cx, |this, cx| {
3672 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3673 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3674 worktree.update(cx, |worktree, _| {
3675 let worktree = worktree.as_remote_mut().unwrap();
3676 worktree.update_from_remote(envelope)
3677 })?;
3678 }
3679 Ok(())
3680 })
3681 }
3682
3683 async fn handle_update_diagnostic_summary(
3684 this: ModelHandle<Self>,
3685 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3686 _: Arc<Client>,
3687 mut cx: AsyncAppContext,
3688 ) -> Result<()> {
3689 this.update(&mut cx, |this, cx| {
3690 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3691 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3692 if let Some(summary) = envelope.payload.summary {
3693 let project_path = ProjectPath {
3694 worktree_id,
3695 path: Path::new(&summary.path).into(),
3696 };
3697 worktree.update(cx, |worktree, _| {
3698 worktree
3699 .as_remote_mut()
3700 .unwrap()
3701 .update_diagnostic_summary(project_path.path.clone(), &summary);
3702 });
3703 cx.emit(Event::DiagnosticsUpdated(project_path));
3704 }
3705 }
3706 Ok(())
3707 })
3708 }
3709
3710 async fn handle_start_language_server(
3711 this: ModelHandle<Self>,
3712 envelope: TypedEnvelope<proto::StartLanguageServer>,
3713 _: Arc<Client>,
3714 mut cx: AsyncAppContext,
3715 ) -> Result<()> {
3716 let server = envelope
3717 .payload
3718 .server
3719 .ok_or_else(|| anyhow!("invalid server"))?;
3720 this.update(&mut cx, |this, cx| {
3721 this.language_server_statuses.insert(
3722 server.id as usize,
3723 LanguageServerStatus {
3724 name: server.name,
3725 pending_work: Default::default(),
3726 pending_diagnostic_updates: 0,
3727 },
3728 );
3729 cx.notify();
3730 });
3731 Ok(())
3732 }
3733
3734 async fn handle_update_language_server(
3735 this: ModelHandle<Self>,
3736 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3737 _: Arc<Client>,
3738 mut cx: AsyncAppContext,
3739 ) -> Result<()> {
3740 let language_server_id = envelope.payload.language_server_id as usize;
3741 match envelope
3742 .payload
3743 .variant
3744 .ok_or_else(|| anyhow!("invalid variant"))?
3745 {
3746 proto::update_language_server::Variant::WorkStart(payload) => {
3747 this.update(&mut cx, |this, cx| {
3748 this.on_lsp_work_start(language_server_id, payload.token, cx);
3749 })
3750 }
3751 proto::update_language_server::Variant::WorkProgress(payload) => {
3752 this.update(&mut cx, |this, cx| {
3753 this.on_lsp_work_progress(
3754 language_server_id,
3755 payload.token,
3756 LanguageServerProgress {
3757 message: payload.message,
3758 percentage: payload.percentage.map(|p| p as usize),
3759 last_update_at: Instant::now(),
3760 },
3761 cx,
3762 );
3763 })
3764 }
3765 proto::update_language_server::Variant::WorkEnd(payload) => {
3766 this.update(&mut cx, |this, cx| {
3767 this.on_lsp_work_end(language_server_id, payload.token, cx);
3768 })
3769 }
3770 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3771 this.update(&mut cx, |this, cx| {
3772 this.disk_based_diagnostics_started(cx);
3773 })
3774 }
3775 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3776 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3777 }
3778 }
3779
3780 Ok(())
3781 }
3782
3783 async fn handle_update_buffer(
3784 this: ModelHandle<Self>,
3785 envelope: TypedEnvelope<proto::UpdateBuffer>,
3786 _: Arc<Client>,
3787 mut cx: AsyncAppContext,
3788 ) -> Result<()> {
3789 this.update(&mut cx, |this, cx| {
3790 let payload = envelope.payload.clone();
3791 let buffer_id = payload.buffer_id;
3792 let ops = payload
3793 .operations
3794 .into_iter()
3795 .map(|op| language::proto::deserialize_operation(op))
3796 .collect::<Result<Vec<_>, _>>()?;
3797 match this.opened_buffers.entry(buffer_id) {
3798 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3799 OpenBuffer::Strong(buffer) => {
3800 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3801 }
3802 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3803 OpenBuffer::Weak(_) => {}
3804 },
3805 hash_map::Entry::Vacant(e) => {
3806 e.insert(OpenBuffer::Loading(ops));
3807 }
3808 }
3809 Ok(())
3810 })
3811 }
3812
3813 async fn handle_update_buffer_file(
3814 this: ModelHandle<Self>,
3815 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3816 _: Arc<Client>,
3817 mut cx: AsyncAppContext,
3818 ) -> Result<()> {
3819 this.update(&mut cx, |this, cx| {
3820 let payload = envelope.payload.clone();
3821 let buffer_id = payload.buffer_id;
3822 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3823 let worktree = this
3824 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3825 .ok_or_else(|| anyhow!("no such worktree"))?;
3826 let file = File::from_proto(file, worktree.clone(), cx)?;
3827 let buffer = this
3828 .opened_buffers
3829 .get_mut(&buffer_id)
3830 .and_then(|b| b.upgrade(cx))
3831 .ok_or_else(|| anyhow!("no such buffer"))?;
3832 buffer.update(cx, |buffer, cx| {
3833 buffer.file_updated(Box::new(file), cx).detach();
3834 });
3835 Ok(())
3836 })
3837 }
3838
3839 async fn handle_save_buffer(
3840 this: ModelHandle<Self>,
3841 envelope: TypedEnvelope<proto::SaveBuffer>,
3842 _: Arc<Client>,
3843 mut cx: AsyncAppContext,
3844 ) -> Result<proto::BufferSaved> {
3845 let buffer_id = envelope.payload.buffer_id;
3846 let requested_version = deserialize_version(envelope.payload.version);
3847
3848 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3849 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3850 let buffer = this
3851 .opened_buffers
3852 .get(&buffer_id)
3853 .map(|buffer| buffer.upgrade(cx).unwrap())
3854 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3855 Ok::<_, anyhow::Error>((project_id, buffer))
3856 })?;
3857 buffer
3858 .update(&mut cx, |buffer, _| {
3859 buffer.wait_for_version(requested_version)
3860 })
3861 .await;
3862
3863 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3864 Ok(proto::BufferSaved {
3865 project_id,
3866 buffer_id,
3867 version: serialize_version(&saved_version),
3868 mtime: Some(mtime.into()),
3869 })
3870 }
3871
3872 async fn handle_reload_buffers(
3873 this: ModelHandle<Self>,
3874 envelope: TypedEnvelope<proto::ReloadBuffers>,
3875 _: Arc<Client>,
3876 mut cx: AsyncAppContext,
3877 ) -> Result<proto::ReloadBuffersResponse> {
3878 let sender_id = envelope.original_sender_id()?;
3879 let reload = this.update(&mut cx, |this, cx| {
3880 let mut buffers = HashSet::default();
3881 for buffer_id in &envelope.payload.buffer_ids {
3882 buffers.insert(
3883 this.opened_buffers
3884 .get(buffer_id)
3885 .map(|buffer| buffer.upgrade(cx).unwrap())
3886 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3887 );
3888 }
3889 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3890 })?;
3891
3892 let project_transaction = reload.await?;
3893 let project_transaction = this.update(&mut cx, |this, cx| {
3894 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3895 });
3896 Ok(proto::ReloadBuffersResponse {
3897 transaction: Some(project_transaction),
3898 })
3899 }
3900
3901 async fn handle_format_buffers(
3902 this: ModelHandle<Self>,
3903 envelope: TypedEnvelope<proto::FormatBuffers>,
3904 _: Arc<Client>,
3905 mut cx: AsyncAppContext,
3906 ) -> Result<proto::FormatBuffersResponse> {
3907 let sender_id = envelope.original_sender_id()?;
3908 let format = this.update(&mut cx, |this, cx| {
3909 let mut buffers = HashSet::default();
3910 for buffer_id in &envelope.payload.buffer_ids {
3911 buffers.insert(
3912 this.opened_buffers
3913 .get(buffer_id)
3914 .map(|buffer| buffer.upgrade(cx).unwrap())
3915 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3916 );
3917 }
3918 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3919 })?;
3920
3921 let project_transaction = format.await?;
3922 let project_transaction = this.update(&mut cx, |this, cx| {
3923 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3924 });
3925 Ok(proto::FormatBuffersResponse {
3926 transaction: Some(project_transaction),
3927 })
3928 }
3929
3930 async fn handle_get_completions(
3931 this: ModelHandle<Self>,
3932 envelope: TypedEnvelope<proto::GetCompletions>,
3933 _: Arc<Client>,
3934 mut cx: AsyncAppContext,
3935 ) -> Result<proto::GetCompletionsResponse> {
3936 let position = envelope
3937 .payload
3938 .position
3939 .and_then(language::proto::deserialize_anchor)
3940 .ok_or_else(|| anyhow!("invalid position"))?;
3941 let version = deserialize_version(envelope.payload.version);
3942 let buffer = this.read_with(&cx, |this, cx| {
3943 this.opened_buffers
3944 .get(&envelope.payload.buffer_id)
3945 .map(|buffer| buffer.upgrade(cx).unwrap())
3946 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3947 })?;
3948 buffer
3949 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3950 .await;
3951 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3952 let completions = this
3953 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3954 .await?;
3955
3956 Ok(proto::GetCompletionsResponse {
3957 completions: completions
3958 .iter()
3959 .map(language::proto::serialize_completion)
3960 .collect(),
3961 version: serialize_version(&version),
3962 })
3963 }
3964
3965 async fn handle_apply_additional_edits_for_completion(
3966 this: ModelHandle<Self>,
3967 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3968 _: Arc<Client>,
3969 mut cx: AsyncAppContext,
3970 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3971 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3972 let buffer = this
3973 .opened_buffers
3974 .get(&envelope.payload.buffer_id)
3975 .map(|buffer| buffer.upgrade(cx).unwrap())
3976 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3977 let language = buffer.read(cx).language();
3978 let completion = language::proto::deserialize_completion(
3979 envelope
3980 .payload
3981 .completion
3982 .ok_or_else(|| anyhow!("invalid completion"))?,
3983 language,
3984 )?;
3985 Ok::<_, anyhow::Error>(
3986 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3987 )
3988 })?;
3989
3990 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3991 transaction: apply_additional_edits
3992 .await?
3993 .as_ref()
3994 .map(language::proto::serialize_transaction),
3995 })
3996 }
3997
3998 async fn handle_get_code_actions(
3999 this: ModelHandle<Self>,
4000 envelope: TypedEnvelope<proto::GetCodeActions>,
4001 _: Arc<Client>,
4002 mut cx: AsyncAppContext,
4003 ) -> Result<proto::GetCodeActionsResponse> {
4004 let start = envelope
4005 .payload
4006 .start
4007 .and_then(language::proto::deserialize_anchor)
4008 .ok_or_else(|| anyhow!("invalid start"))?;
4009 let end = envelope
4010 .payload
4011 .end
4012 .and_then(language::proto::deserialize_anchor)
4013 .ok_or_else(|| anyhow!("invalid end"))?;
4014 let buffer = this.update(&mut cx, |this, cx| {
4015 this.opened_buffers
4016 .get(&envelope.payload.buffer_id)
4017 .map(|buffer| buffer.upgrade(cx).unwrap())
4018 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4019 })?;
4020 buffer
4021 .update(&mut cx, |buffer, _| {
4022 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4023 })
4024 .await;
4025
4026 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4027 let code_actions = this.update(&mut cx, |this, cx| {
4028 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4029 })?;
4030
4031 Ok(proto::GetCodeActionsResponse {
4032 actions: code_actions
4033 .await?
4034 .iter()
4035 .map(language::proto::serialize_code_action)
4036 .collect(),
4037 version: serialize_version(&version),
4038 })
4039 }
4040
4041 async fn handle_apply_code_action(
4042 this: ModelHandle<Self>,
4043 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4044 _: Arc<Client>,
4045 mut cx: AsyncAppContext,
4046 ) -> Result<proto::ApplyCodeActionResponse> {
4047 let sender_id = envelope.original_sender_id()?;
4048 let action = language::proto::deserialize_code_action(
4049 envelope
4050 .payload
4051 .action
4052 .ok_or_else(|| anyhow!("invalid action"))?,
4053 )?;
4054 let apply_code_action = this.update(&mut cx, |this, cx| {
4055 let buffer = this
4056 .opened_buffers
4057 .get(&envelope.payload.buffer_id)
4058 .map(|buffer| buffer.upgrade(cx).unwrap())
4059 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4060 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4061 })?;
4062
4063 let project_transaction = apply_code_action.await?;
4064 let project_transaction = this.update(&mut cx, |this, cx| {
4065 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4066 });
4067 Ok(proto::ApplyCodeActionResponse {
4068 transaction: Some(project_transaction),
4069 })
4070 }
4071
4072 async fn handle_lsp_command<T: LspCommand>(
4073 this: ModelHandle<Self>,
4074 envelope: TypedEnvelope<T::ProtoRequest>,
4075 _: Arc<Client>,
4076 mut cx: AsyncAppContext,
4077 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4078 where
4079 <T::LspRequest as lsp::request::Request>::Result: Send,
4080 {
4081 let sender_id = envelope.original_sender_id()?;
4082 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4083 let buffer_handle = this.read_with(&cx, |this, _| {
4084 this.opened_buffers
4085 .get(&buffer_id)
4086 .and_then(|buffer| buffer.upgrade(&cx))
4087 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4088 })?;
4089 let request = T::from_proto(
4090 envelope.payload,
4091 this.clone(),
4092 buffer_handle.clone(),
4093 cx.clone(),
4094 )
4095 .await?;
4096 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4097 let response = this
4098 .update(&mut cx, |this, cx| {
4099 this.request_lsp(buffer_handle, request, cx)
4100 })
4101 .await?;
4102 this.update(&mut cx, |this, cx| {
4103 Ok(T::response_to_proto(
4104 response,
4105 this,
4106 sender_id,
4107 &buffer_version,
4108 cx,
4109 ))
4110 })
4111 }
4112
4113 async fn handle_get_project_symbols(
4114 this: ModelHandle<Self>,
4115 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4116 _: Arc<Client>,
4117 mut cx: AsyncAppContext,
4118 ) -> Result<proto::GetProjectSymbolsResponse> {
4119 let symbols = this
4120 .update(&mut cx, |this, cx| {
4121 this.symbols(&envelope.payload.query, cx)
4122 })
4123 .await?;
4124
4125 Ok(proto::GetProjectSymbolsResponse {
4126 symbols: symbols.iter().map(serialize_symbol).collect(),
4127 })
4128 }
4129
4130 async fn handle_search_project(
4131 this: ModelHandle<Self>,
4132 envelope: TypedEnvelope<proto::SearchProject>,
4133 _: Arc<Client>,
4134 mut cx: AsyncAppContext,
4135 ) -> Result<proto::SearchProjectResponse> {
4136 let peer_id = envelope.original_sender_id()?;
4137 let query = SearchQuery::from_proto(envelope.payload)?;
4138 let result = this
4139 .update(&mut cx, |this, cx| this.search(query, cx))
4140 .await?;
4141
4142 this.update(&mut cx, |this, cx| {
4143 let mut locations = Vec::new();
4144 for (buffer, ranges) in result {
4145 for range in ranges {
4146 let start = serialize_anchor(&range.start);
4147 let end = serialize_anchor(&range.end);
4148 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4149 locations.push(proto::Location {
4150 buffer: Some(buffer),
4151 start: Some(start),
4152 end: Some(end),
4153 });
4154 }
4155 }
4156 Ok(proto::SearchProjectResponse { locations })
4157 })
4158 }
4159
4160 async fn handle_open_buffer_for_symbol(
4161 this: ModelHandle<Self>,
4162 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4163 _: Arc<Client>,
4164 mut cx: AsyncAppContext,
4165 ) -> Result<proto::OpenBufferForSymbolResponse> {
4166 let peer_id = envelope.original_sender_id()?;
4167 let symbol = envelope
4168 .payload
4169 .symbol
4170 .ok_or_else(|| anyhow!("invalid symbol"))?;
4171 let symbol = this.read_with(&cx, |this, _| {
4172 let symbol = this.deserialize_symbol(symbol)?;
4173 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4174 if signature == symbol.signature {
4175 Ok(symbol)
4176 } else {
4177 Err(anyhow!("invalid symbol signature"))
4178 }
4179 })?;
4180 let buffer = this
4181 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4182 .await?;
4183
4184 Ok(proto::OpenBufferForSymbolResponse {
4185 buffer: Some(this.update(&mut cx, |this, cx| {
4186 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4187 })),
4188 })
4189 }
4190
4191 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4192 let mut hasher = Sha256::new();
4193 hasher.update(worktree_id.to_proto().to_be_bytes());
4194 hasher.update(path.to_string_lossy().as_bytes());
4195 hasher.update(self.nonce.to_be_bytes());
4196 hasher.finalize().as_slice().try_into().unwrap()
4197 }
4198
4199 async fn handle_open_buffer_by_id(
4200 this: ModelHandle<Self>,
4201 envelope: TypedEnvelope<proto::OpenBufferById>,
4202 _: Arc<Client>,
4203 mut cx: AsyncAppContext,
4204 ) -> Result<proto::OpenBufferResponse> {
4205 let peer_id = envelope.original_sender_id()?;
4206 let buffer = this
4207 .update(&mut cx, |this, cx| {
4208 this.open_buffer_by_id(envelope.payload.id, cx)
4209 })
4210 .await?;
4211 this.update(&mut cx, |this, cx| {
4212 Ok(proto::OpenBufferResponse {
4213 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4214 })
4215 })
4216 }
4217
4218 async fn handle_open_buffer_by_path(
4219 this: ModelHandle<Self>,
4220 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4221 _: Arc<Client>,
4222 mut cx: AsyncAppContext,
4223 ) -> Result<proto::OpenBufferResponse> {
4224 let peer_id = envelope.original_sender_id()?;
4225 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4226 let open_buffer = this.update(&mut cx, |this, cx| {
4227 this.open_buffer(
4228 ProjectPath {
4229 worktree_id,
4230 path: PathBuf::from(envelope.payload.path).into(),
4231 },
4232 cx,
4233 )
4234 });
4235
4236 let buffer = open_buffer.await?;
4237 this.update(&mut cx, |this, cx| {
4238 Ok(proto::OpenBufferResponse {
4239 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4240 })
4241 })
4242 }
4243
4244 fn serialize_project_transaction_for_peer(
4245 &mut self,
4246 project_transaction: ProjectTransaction,
4247 peer_id: PeerId,
4248 cx: &AppContext,
4249 ) -> proto::ProjectTransaction {
4250 let mut serialized_transaction = proto::ProjectTransaction {
4251 buffers: Default::default(),
4252 transactions: Default::default(),
4253 };
4254 for (buffer, transaction) in project_transaction.0 {
4255 serialized_transaction
4256 .buffers
4257 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4258 serialized_transaction
4259 .transactions
4260 .push(language::proto::serialize_transaction(&transaction));
4261 }
4262 serialized_transaction
4263 }
4264
4265 fn deserialize_project_transaction(
4266 &mut self,
4267 message: proto::ProjectTransaction,
4268 push_to_history: bool,
4269 cx: &mut ModelContext<Self>,
4270 ) -> Task<Result<ProjectTransaction>> {
4271 cx.spawn(|this, mut cx| async move {
4272 let mut project_transaction = ProjectTransaction::default();
4273 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4274 let buffer = this
4275 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4276 .await?;
4277 let transaction = language::proto::deserialize_transaction(transaction)?;
4278 project_transaction.0.insert(buffer, transaction);
4279 }
4280
4281 for (buffer, transaction) in &project_transaction.0 {
4282 buffer
4283 .update(&mut cx, |buffer, _| {
4284 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4285 })
4286 .await;
4287
4288 if push_to_history {
4289 buffer.update(&mut cx, |buffer, _| {
4290 buffer.push_transaction(transaction.clone(), Instant::now());
4291 });
4292 }
4293 }
4294
4295 Ok(project_transaction)
4296 })
4297 }
4298
4299 fn serialize_buffer_for_peer(
4300 &mut self,
4301 buffer: &ModelHandle<Buffer>,
4302 peer_id: PeerId,
4303 cx: &AppContext,
4304 ) -> proto::Buffer {
4305 let buffer_id = buffer.read(cx).remote_id();
4306 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4307 if shared_buffers.insert(buffer_id) {
4308 proto::Buffer {
4309 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4310 }
4311 } else {
4312 proto::Buffer {
4313 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4314 }
4315 }
4316 }
4317
4318 fn deserialize_buffer(
4319 &mut self,
4320 buffer: proto::Buffer,
4321 cx: &mut ModelContext<Self>,
4322 ) -> Task<Result<ModelHandle<Buffer>>> {
4323 let replica_id = self.replica_id();
4324
4325 let opened_buffer_tx = self.opened_buffer.0.clone();
4326 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4327 cx.spawn(|this, mut cx| async move {
4328 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4329 proto::buffer::Variant::Id(id) => {
4330 let buffer = loop {
4331 let buffer = this.read_with(&cx, |this, cx| {
4332 this.opened_buffers
4333 .get(&id)
4334 .and_then(|buffer| buffer.upgrade(cx))
4335 });
4336 if let Some(buffer) = buffer {
4337 break buffer;
4338 }
4339 opened_buffer_rx
4340 .next()
4341 .await
4342 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4343 };
4344 Ok(buffer)
4345 }
4346 proto::buffer::Variant::State(mut buffer) => {
4347 let mut buffer_worktree = None;
4348 let mut buffer_file = None;
4349 if let Some(file) = buffer.file.take() {
4350 this.read_with(&cx, |this, cx| {
4351 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4352 let worktree =
4353 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4354 anyhow!("no worktree found for id {}", file.worktree_id)
4355 })?;
4356 buffer_file =
4357 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4358 as Box<dyn language::File>);
4359 buffer_worktree = Some(worktree);
4360 Ok::<_, anyhow::Error>(())
4361 })?;
4362 }
4363
4364 let buffer = cx.add_model(|cx| {
4365 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4366 });
4367
4368 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4369
4370 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4371 Ok(buffer)
4372 }
4373 }
4374 })
4375 }
4376
4377 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4378 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4379 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4380 let start = serialized_symbol
4381 .start
4382 .ok_or_else(|| anyhow!("invalid start"))?;
4383 let end = serialized_symbol
4384 .end
4385 .ok_or_else(|| anyhow!("invalid end"))?;
4386 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4387 let path = PathBuf::from(serialized_symbol.path);
4388 let language = self.languages.select_language(&path);
4389 Ok(Symbol {
4390 source_worktree_id,
4391 worktree_id,
4392 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4393 label: language
4394 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4395 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4396 name: serialized_symbol.name,
4397 path,
4398 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4399 kind,
4400 signature: serialized_symbol
4401 .signature
4402 .try_into()
4403 .map_err(|_| anyhow!("invalid signature"))?,
4404 })
4405 }
4406
4407 async fn handle_buffer_saved(
4408 this: ModelHandle<Self>,
4409 envelope: TypedEnvelope<proto::BufferSaved>,
4410 _: Arc<Client>,
4411 mut cx: AsyncAppContext,
4412 ) -> Result<()> {
4413 let version = deserialize_version(envelope.payload.version);
4414 let mtime = envelope
4415 .payload
4416 .mtime
4417 .ok_or_else(|| anyhow!("missing mtime"))?
4418 .into();
4419
4420 this.update(&mut cx, |this, cx| {
4421 let buffer = this
4422 .opened_buffers
4423 .get(&envelope.payload.buffer_id)
4424 .and_then(|buffer| buffer.upgrade(cx));
4425 if let Some(buffer) = buffer {
4426 buffer.update(cx, |buffer, cx| {
4427 buffer.did_save(version, mtime, None, cx);
4428 });
4429 }
4430 Ok(())
4431 })
4432 }
4433
4434 async fn handle_buffer_reloaded(
4435 this: ModelHandle<Self>,
4436 envelope: TypedEnvelope<proto::BufferReloaded>,
4437 _: Arc<Client>,
4438 mut cx: AsyncAppContext,
4439 ) -> Result<()> {
4440 let payload = envelope.payload.clone();
4441 let version = deserialize_version(payload.version);
4442 let mtime = payload
4443 .mtime
4444 .ok_or_else(|| anyhow!("missing mtime"))?
4445 .into();
4446 this.update(&mut cx, |this, cx| {
4447 let buffer = this
4448 .opened_buffers
4449 .get(&payload.buffer_id)
4450 .and_then(|buffer| buffer.upgrade(cx));
4451 if let Some(buffer) = buffer {
4452 buffer.update(cx, |buffer, cx| {
4453 buffer.did_reload(version, mtime, cx);
4454 });
4455 }
4456 Ok(())
4457 })
4458 }
4459
4460 pub fn match_paths<'a>(
4461 &self,
4462 query: &'a str,
4463 include_ignored: bool,
4464 smart_case: bool,
4465 max_results: usize,
4466 cancel_flag: &'a AtomicBool,
4467 cx: &AppContext,
4468 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4469 let worktrees = self
4470 .worktrees(cx)
4471 .filter(|worktree| worktree.read(cx).is_visible())
4472 .collect::<Vec<_>>();
4473 let include_root_name = worktrees.len() > 1;
4474 let candidate_sets = worktrees
4475 .into_iter()
4476 .map(|worktree| CandidateSet {
4477 snapshot: worktree.read(cx).snapshot(),
4478 include_ignored,
4479 include_root_name,
4480 })
4481 .collect::<Vec<_>>();
4482
4483 let background = cx.background().clone();
4484 async move {
4485 fuzzy::match_paths(
4486 candidate_sets.as_slice(),
4487 query,
4488 smart_case,
4489 max_results,
4490 cancel_flag,
4491 background,
4492 )
4493 .await
4494 }
4495 }
4496
4497 fn edits_from_lsp(
4498 &mut self,
4499 buffer: &ModelHandle<Buffer>,
4500 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4501 version: Option<i32>,
4502 cx: &mut ModelContext<Self>,
4503 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4504 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4505 cx.background().spawn(async move {
4506 let snapshot = snapshot?;
4507 let mut lsp_edits = lsp_edits
4508 .into_iter()
4509 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4510 .peekable();
4511
4512 let mut edits = Vec::new();
4513 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4514 // Combine any LSP edits that are adjacent.
4515 //
4516 // Also, combine LSP edits that are separated from each other by only
4517 // a newline. This is important because for some code actions,
4518 // Rust-analyzer rewrites the entire buffer via a series of edits that
4519 // are separated by unchanged newline characters.
4520 //
4521 // In order for the diffing logic below to work properly, any edits that
4522 // cancel each other out must be combined into one.
4523 while let Some((next_range, next_text)) = lsp_edits.peek() {
4524 if next_range.start > range.end {
4525 if next_range.start.row > range.end.row + 1
4526 || next_range.start.column > 0
4527 || snapshot.clip_point_utf16(
4528 PointUtf16::new(range.end.row, u32::MAX),
4529 Bias::Left,
4530 ) > range.end
4531 {
4532 break;
4533 }
4534 new_text.push('\n');
4535 }
4536 range.end = next_range.end;
4537 new_text.push_str(&next_text);
4538 lsp_edits.next();
4539 }
4540
4541 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4542 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4543 {
4544 return Err(anyhow!("invalid edits received from language server"));
4545 }
4546
4547 // For multiline edits, perform a diff of the old and new text so that
4548 // we can identify the changes more precisely, preserving the locations
4549 // of any anchors positioned in the unchanged regions.
4550 if range.end.row > range.start.row {
4551 let mut offset = range.start.to_offset(&snapshot);
4552 let old_text = snapshot.text_for_range(range).collect::<String>();
4553
4554 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4555 let mut moved_since_edit = true;
4556 for change in diff.iter_all_changes() {
4557 let tag = change.tag();
4558 let value = change.value();
4559 match tag {
4560 ChangeTag::Equal => {
4561 offset += value.len();
4562 moved_since_edit = true;
4563 }
4564 ChangeTag::Delete => {
4565 let start = snapshot.anchor_after(offset);
4566 let end = snapshot.anchor_before(offset + value.len());
4567 if moved_since_edit {
4568 edits.push((start..end, String::new()));
4569 } else {
4570 edits.last_mut().unwrap().0.end = end;
4571 }
4572 offset += value.len();
4573 moved_since_edit = false;
4574 }
4575 ChangeTag::Insert => {
4576 if moved_since_edit {
4577 let anchor = snapshot.anchor_after(offset);
4578 edits.push((anchor.clone()..anchor, value.to_string()));
4579 } else {
4580 edits.last_mut().unwrap().1.push_str(value);
4581 }
4582 moved_since_edit = false;
4583 }
4584 }
4585 }
4586 } else if range.end == range.start {
4587 let anchor = snapshot.anchor_after(range.start);
4588 edits.push((anchor.clone()..anchor, new_text));
4589 } else {
4590 let edit_start = snapshot.anchor_after(range.start);
4591 let edit_end = snapshot.anchor_before(range.end);
4592 edits.push((edit_start..edit_end, new_text));
4593 }
4594 }
4595
4596 Ok(edits)
4597 })
4598 }
4599
4600 fn buffer_snapshot_for_lsp_version(
4601 &mut self,
4602 buffer: &ModelHandle<Buffer>,
4603 version: Option<i32>,
4604 cx: &AppContext,
4605 ) -> Result<TextBufferSnapshot> {
4606 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4607
4608 if let Some(version) = version {
4609 let buffer_id = buffer.read(cx).remote_id();
4610 let snapshots = self
4611 .buffer_snapshots
4612 .get_mut(&buffer_id)
4613 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4614 let mut found_snapshot = None;
4615 snapshots.retain(|(snapshot_version, snapshot)| {
4616 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4617 false
4618 } else {
4619 if *snapshot_version == version {
4620 found_snapshot = Some(snapshot.clone());
4621 }
4622 true
4623 }
4624 });
4625
4626 found_snapshot.ok_or_else(|| {
4627 anyhow!(
4628 "snapshot not found for buffer {} at version {}",
4629 buffer_id,
4630 version
4631 )
4632 })
4633 } else {
4634 Ok((buffer.read(cx)).text_snapshot())
4635 }
4636 }
4637
4638 fn language_server_for_buffer(
4639 &self,
4640 buffer: &Buffer,
4641 cx: &AppContext,
4642 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4643 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4644 let worktree_id = file.worktree_id(cx);
4645 self.language_servers
4646 .get(&(worktree_id, language.lsp_adapter()?.name()))
4647 } else {
4648 None
4649 }
4650 }
4651}
4652
4653impl WorktreeHandle {
4654 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4655 match self {
4656 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4657 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4658 }
4659 }
4660}
4661
4662impl OpenBuffer {
4663 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4664 match self {
4665 OpenBuffer::Strong(handle) => Some(handle.clone()),
4666 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4667 OpenBuffer::Loading(_) => None,
4668 }
4669 }
4670}
4671
4672struct CandidateSet {
4673 snapshot: Snapshot,
4674 include_ignored: bool,
4675 include_root_name: bool,
4676}
4677
4678impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4679 type Candidates = CandidateSetIter<'a>;
4680
4681 fn id(&self) -> usize {
4682 self.snapshot.id().to_usize()
4683 }
4684
4685 fn len(&self) -> usize {
4686 if self.include_ignored {
4687 self.snapshot.file_count()
4688 } else {
4689 self.snapshot.visible_file_count()
4690 }
4691 }
4692
4693 fn prefix(&self) -> Arc<str> {
4694 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4695 self.snapshot.root_name().into()
4696 } else if self.include_root_name {
4697 format!("{}/", self.snapshot.root_name()).into()
4698 } else {
4699 "".into()
4700 }
4701 }
4702
4703 fn candidates(&'a self, start: usize) -> Self::Candidates {
4704 CandidateSetIter {
4705 traversal: self.snapshot.files(self.include_ignored, start),
4706 }
4707 }
4708}
4709
4710struct CandidateSetIter<'a> {
4711 traversal: Traversal<'a>,
4712}
4713
4714impl<'a> Iterator for CandidateSetIter<'a> {
4715 type Item = PathMatchCandidate<'a>;
4716
4717 fn next(&mut self) -> Option<Self::Item> {
4718 self.traversal.next().map(|entry| {
4719 if let EntryKind::File(char_bag) = entry.kind {
4720 PathMatchCandidate {
4721 path: &entry.path,
4722 char_bag,
4723 }
4724 } else {
4725 unreachable!()
4726 }
4727 })
4728 }
4729}
4730
4731impl Entity for Project {
4732 type Event = Event;
4733
4734 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4735 match &self.client_state {
4736 ProjectClientState::Local { remote_id_rx, .. } => {
4737 if let Some(project_id) = *remote_id_rx.borrow() {
4738 self.client
4739 .send(proto::UnregisterProject { project_id })
4740 .log_err();
4741 }
4742 }
4743 ProjectClientState::Remote { remote_id, .. } => {
4744 self.client
4745 .send(proto::LeaveProject {
4746 project_id: *remote_id,
4747 })
4748 .log_err();
4749 }
4750 }
4751 }
4752
4753 fn app_will_quit(
4754 &mut self,
4755 _: &mut MutableAppContext,
4756 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4757 let shutdown_futures = self
4758 .language_servers
4759 .drain()
4760 .filter_map(|(_, (_, server))| server.shutdown())
4761 .collect::<Vec<_>>();
4762 Some(
4763 async move {
4764 futures::future::join_all(shutdown_futures).await;
4765 }
4766 .boxed(),
4767 )
4768 }
4769}
4770
4771impl Collaborator {
4772 fn from_proto(
4773 message: proto::Collaborator,
4774 user_store: &ModelHandle<UserStore>,
4775 cx: &mut AsyncAppContext,
4776 ) -> impl Future<Output = Result<Self>> {
4777 let user = user_store.update(cx, |user_store, cx| {
4778 user_store.fetch_user(message.user_id, cx)
4779 });
4780
4781 async move {
4782 Ok(Self {
4783 peer_id: PeerId(message.peer_id),
4784 user: user.await?,
4785 replica_id: message.replica_id as ReplicaId,
4786 })
4787 }
4788 }
4789}
4790
4791impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4792 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4793 Self {
4794 worktree_id,
4795 path: path.as_ref().into(),
4796 }
4797 }
4798}
4799
4800impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4801 fn from(options: lsp::CreateFileOptions) -> Self {
4802 Self {
4803 overwrite: options.overwrite.unwrap_or(false),
4804 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4805 }
4806 }
4807}
4808
4809impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4810 fn from(options: lsp::RenameFileOptions) -> Self {
4811 Self {
4812 overwrite: options.overwrite.unwrap_or(false),
4813 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4814 }
4815 }
4816}
4817
4818impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4819 fn from(options: lsp::DeleteFileOptions) -> Self {
4820 Self {
4821 recursive: options.recursive.unwrap_or(false),
4822 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4823 }
4824 }
4825}
4826
4827fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4828 proto::Symbol {
4829 source_worktree_id: symbol.source_worktree_id.to_proto(),
4830 worktree_id: symbol.worktree_id.to_proto(),
4831 language_server_name: symbol.language_server_name.0.to_string(),
4832 name: symbol.name.clone(),
4833 kind: unsafe { mem::transmute(symbol.kind) },
4834 path: symbol.path.to_string_lossy().to_string(),
4835 start: Some(proto::Point {
4836 row: symbol.range.start.row,
4837 column: symbol.range.start.column,
4838 }),
4839 end: Some(proto::Point {
4840 row: symbol.range.end.row,
4841 column: symbol.range.end.column,
4842 }),
4843 signature: symbol.signature.to_vec(),
4844 }
4845}
4846
4847fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4848 let mut path_components = path.components();
4849 let mut base_components = base.components();
4850 let mut components: Vec<Component> = Vec::new();
4851 loop {
4852 match (path_components.next(), base_components.next()) {
4853 (None, None) => break,
4854 (Some(a), None) => {
4855 components.push(a);
4856 components.extend(path_components.by_ref());
4857 break;
4858 }
4859 (None, _) => components.push(Component::ParentDir),
4860 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4861 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4862 (Some(a), Some(_)) => {
4863 components.push(Component::ParentDir);
4864 for _ in base_components {
4865 components.push(Component::ParentDir);
4866 }
4867 components.push(a);
4868 components.extend(path_components.by_ref());
4869 break;
4870 }
4871 }
4872 }
4873 components.iter().map(|c| c.as_os_str()).collect()
4874}
4875
4876impl Item for Buffer {
4877 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4878 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4879 }
4880}
4881
4882#[cfg(test)]
4883mod tests {
4884 use super::{Event, *};
4885 use fs::RealFs;
4886 use futures::{future, StreamExt};
4887 use gpui::test::subscribe;
4888 use language::{
4889 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4890 ToPoint,
4891 };
4892 use lsp::Url;
4893 use serde_json::json;
4894 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4895 use unindent::Unindent as _;
4896 use util::{assert_set_eq, test::temp_tree};
4897 use worktree::WorktreeHandle as _;
4898
4899 #[gpui::test]
4900 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4901 let dir = temp_tree(json!({
4902 "root": {
4903 "apple": "",
4904 "banana": {
4905 "carrot": {
4906 "date": "",
4907 "endive": "",
4908 }
4909 },
4910 "fennel": {
4911 "grape": "",
4912 }
4913 }
4914 }));
4915
4916 let root_link_path = dir.path().join("root_link");
4917 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4918 unix::fs::symlink(
4919 &dir.path().join("root/fennel"),
4920 &dir.path().join("root/finnochio"),
4921 )
4922 .unwrap();
4923
4924 let project = Project::test(Arc::new(RealFs), cx);
4925
4926 let (tree, _) = project
4927 .update(cx, |project, cx| {
4928 project.find_or_create_local_worktree(&root_link_path, true, cx)
4929 })
4930 .await
4931 .unwrap();
4932
4933 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4934 .await;
4935 cx.read(|cx| {
4936 let tree = tree.read(cx);
4937 assert_eq!(tree.file_count(), 5);
4938 assert_eq!(
4939 tree.inode_for_path("fennel/grape"),
4940 tree.inode_for_path("finnochio/grape")
4941 );
4942 });
4943
4944 let cancel_flag = Default::default();
4945 let results = project
4946 .read_with(cx, |project, cx| {
4947 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4948 })
4949 .await;
4950 assert_eq!(
4951 results
4952 .into_iter()
4953 .map(|result| result.path)
4954 .collect::<Vec<Arc<Path>>>(),
4955 vec![
4956 PathBuf::from("banana/carrot/date").into(),
4957 PathBuf::from("banana/carrot/endive").into(),
4958 ]
4959 );
4960 }
4961
4962 #[gpui::test]
4963 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4964 cx.foreground().forbid_parking();
4965
4966 let mut rust_language = Language::new(
4967 LanguageConfig {
4968 name: "Rust".into(),
4969 path_suffixes: vec!["rs".to_string()],
4970 ..Default::default()
4971 },
4972 Some(tree_sitter_rust::language()),
4973 );
4974 let mut json_language = Language::new(
4975 LanguageConfig {
4976 name: "JSON".into(),
4977 path_suffixes: vec!["json".to_string()],
4978 ..Default::default()
4979 },
4980 None,
4981 );
4982 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4983 name: "the-rust-language-server",
4984 capabilities: lsp::ServerCapabilities {
4985 completion_provider: Some(lsp::CompletionOptions {
4986 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4987 ..Default::default()
4988 }),
4989 ..Default::default()
4990 },
4991 ..Default::default()
4992 });
4993 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4994 name: "the-json-language-server",
4995 capabilities: lsp::ServerCapabilities {
4996 completion_provider: Some(lsp::CompletionOptions {
4997 trigger_characters: Some(vec![":".to_string()]),
4998 ..Default::default()
4999 }),
5000 ..Default::default()
5001 },
5002 ..Default::default()
5003 });
5004
5005 let fs = FakeFs::new(cx.background());
5006 fs.insert_tree(
5007 "/the-root",
5008 json!({
5009 "test.rs": "const A: i32 = 1;",
5010 "test2.rs": "",
5011 "Cargo.toml": "a = 1",
5012 "package.json": "{\"a\": 1}",
5013 }),
5014 )
5015 .await;
5016
5017 let project = Project::test(fs.clone(), cx);
5018 project.update(cx, |project, _| {
5019 project.languages.add(Arc::new(rust_language));
5020 project.languages.add(Arc::new(json_language));
5021 });
5022
5023 let worktree_id = project
5024 .update(cx, |project, cx| {
5025 project.find_or_create_local_worktree("/the-root", true, cx)
5026 })
5027 .await
5028 .unwrap()
5029 .0
5030 .read_with(cx, |tree, _| tree.id());
5031
5032 // Open a buffer without an associated language server.
5033 let toml_buffer = project
5034 .update(cx, |project, cx| {
5035 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5036 })
5037 .await
5038 .unwrap();
5039
5040 // Open a buffer with an associated language server.
5041 let rust_buffer = project
5042 .update(cx, |project, cx| {
5043 project.open_buffer((worktree_id, "test.rs"), cx)
5044 })
5045 .await
5046 .unwrap();
5047
5048 // A server is started up, and it is notified about Rust files.
5049 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5050 assert_eq!(
5051 fake_rust_server
5052 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5053 .await
5054 .text_document,
5055 lsp::TextDocumentItem {
5056 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5057 version: 0,
5058 text: "const A: i32 = 1;".to_string(),
5059 language_id: Default::default()
5060 }
5061 );
5062
5063 // The buffer is configured based on the language server's capabilities.
5064 rust_buffer.read_with(cx, |buffer, _| {
5065 assert_eq!(
5066 buffer.completion_triggers(),
5067 &[".".to_string(), "::".to_string()]
5068 );
5069 });
5070 toml_buffer.read_with(cx, |buffer, _| {
5071 assert!(buffer.completion_triggers().is_empty());
5072 });
5073
5074 // Edit a buffer. The changes are reported to the language server.
5075 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5076 assert_eq!(
5077 fake_rust_server
5078 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5079 .await
5080 .text_document,
5081 lsp::VersionedTextDocumentIdentifier::new(
5082 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5083 1
5084 )
5085 );
5086
5087 // Open a third buffer with a different associated language server.
5088 let json_buffer = project
5089 .update(cx, |project, cx| {
5090 project.open_buffer((worktree_id, "package.json"), cx)
5091 })
5092 .await
5093 .unwrap();
5094
5095 // A json language server is started up and is only notified about the json buffer.
5096 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5097 assert_eq!(
5098 fake_json_server
5099 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5100 .await
5101 .text_document,
5102 lsp::TextDocumentItem {
5103 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5104 version: 0,
5105 text: "{\"a\": 1}".to_string(),
5106 language_id: Default::default()
5107 }
5108 );
5109
5110 // This buffer is configured based on the second language server's
5111 // capabilities.
5112 json_buffer.read_with(cx, |buffer, _| {
5113 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5114 });
5115
5116 // When opening another buffer whose language server is already running,
5117 // it is also configured based on the existing language server's capabilities.
5118 let rust_buffer2 = project
5119 .update(cx, |project, cx| {
5120 project.open_buffer((worktree_id, "test2.rs"), cx)
5121 })
5122 .await
5123 .unwrap();
5124 rust_buffer2.read_with(cx, |buffer, _| {
5125 assert_eq!(
5126 buffer.completion_triggers(),
5127 &[".".to_string(), "::".to_string()]
5128 );
5129 });
5130
5131 // Changes are reported only to servers matching the buffer's language.
5132 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5133 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5134 assert_eq!(
5135 fake_rust_server
5136 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5137 .await
5138 .text_document,
5139 lsp::VersionedTextDocumentIdentifier::new(
5140 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5141 1
5142 )
5143 );
5144
5145 // Save notifications are reported to all servers.
5146 toml_buffer
5147 .update(cx, |buffer, cx| buffer.save(cx))
5148 .await
5149 .unwrap();
5150 assert_eq!(
5151 fake_rust_server
5152 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5153 .await
5154 .text_document,
5155 lsp::TextDocumentIdentifier::new(
5156 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5157 )
5158 );
5159 assert_eq!(
5160 fake_json_server
5161 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5162 .await
5163 .text_document,
5164 lsp::TextDocumentIdentifier::new(
5165 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5166 )
5167 );
5168
5169 // Renames are reported only to servers matching the buffer's language.
5170 fs.rename(
5171 Path::new("/the-root/test2.rs"),
5172 Path::new("/the-root/test3.rs"),
5173 Default::default(),
5174 )
5175 .await
5176 .unwrap();
5177 assert_eq!(
5178 fake_rust_server
5179 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5180 .await
5181 .text_document,
5182 lsp::TextDocumentIdentifier::new(
5183 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5184 ),
5185 );
5186 assert_eq!(
5187 fake_rust_server
5188 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5189 .await
5190 .text_document,
5191 lsp::TextDocumentItem {
5192 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5193 version: 0,
5194 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5195 language_id: Default::default()
5196 },
5197 );
5198
5199 rust_buffer2.update(cx, |buffer, cx| {
5200 buffer.update_diagnostics(
5201 DiagnosticSet::from_sorted_entries(
5202 vec![DiagnosticEntry {
5203 diagnostic: Default::default(),
5204 range: Anchor::MIN..Anchor::MAX,
5205 }],
5206 &buffer.snapshot(),
5207 ),
5208 cx,
5209 );
5210 assert_eq!(
5211 buffer
5212 .snapshot()
5213 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5214 .count(),
5215 1
5216 );
5217 });
5218
5219 // When the rename changes the extension of the file, the buffer gets closed on the old
5220 // language server and gets opened on the new one.
5221 fs.rename(
5222 Path::new("/the-root/test3.rs"),
5223 Path::new("/the-root/test3.json"),
5224 Default::default(),
5225 )
5226 .await
5227 .unwrap();
5228 assert_eq!(
5229 fake_rust_server
5230 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5231 .await
5232 .text_document,
5233 lsp::TextDocumentIdentifier::new(
5234 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5235 ),
5236 );
5237 assert_eq!(
5238 fake_json_server
5239 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5240 .await
5241 .text_document,
5242 lsp::TextDocumentItem {
5243 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5244 version: 0,
5245 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5246 language_id: Default::default()
5247 },
5248 );
5249 // We clear the diagnostics, since the language has changed.
5250 rust_buffer2.read_with(cx, |buffer, _| {
5251 assert_eq!(
5252 buffer
5253 .snapshot()
5254 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5255 .count(),
5256 0
5257 );
5258 });
5259
5260 // The renamed file's version resets after changing language server.
5261 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5262 assert_eq!(
5263 fake_json_server
5264 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5265 .await
5266 .text_document,
5267 lsp::VersionedTextDocumentIdentifier::new(
5268 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5269 1
5270 )
5271 );
5272
5273 // Restart language servers
5274 project.update(cx, |project, cx| {
5275 project.restart_language_servers_for_buffers(
5276 vec![rust_buffer.clone(), json_buffer.clone()],
5277 cx,
5278 );
5279 });
5280
5281 let mut rust_shutdown_requests = fake_rust_server
5282 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5283 let mut json_shutdown_requests = fake_json_server
5284 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5285 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5286
5287 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5288 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5289
5290 // Ensure rust document is reopened in new rust language server
5291 assert_eq!(
5292 fake_rust_server
5293 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5294 .await
5295 .text_document,
5296 lsp::TextDocumentItem {
5297 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5298 version: 1,
5299 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5300 language_id: Default::default()
5301 }
5302 );
5303
5304 // Ensure json documents are reopened in new json language server
5305 assert_set_eq!(
5306 [
5307 fake_json_server
5308 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5309 .await
5310 .text_document,
5311 fake_json_server
5312 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5313 .await
5314 .text_document,
5315 ],
5316 [
5317 lsp::TextDocumentItem {
5318 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5319 version: 0,
5320 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5321 language_id: Default::default()
5322 },
5323 lsp::TextDocumentItem {
5324 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5325 version: 1,
5326 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5327 language_id: Default::default()
5328 }
5329 ]
5330 );
5331
5332 // Close notifications are reported only to servers matching the buffer's language.
5333 cx.update(|_| drop(json_buffer));
5334 let close_message = lsp::DidCloseTextDocumentParams {
5335 text_document: lsp::TextDocumentIdentifier::new(
5336 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5337 ),
5338 };
5339 assert_eq!(
5340 fake_json_server
5341 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5342 .await,
5343 close_message,
5344 );
5345 }
5346
5347 #[gpui::test]
5348 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5349 cx.foreground().forbid_parking();
5350
5351 let progress_token = "the-progress-token";
5352 let mut language = Language::new(
5353 LanguageConfig {
5354 name: "Rust".into(),
5355 path_suffixes: vec!["rs".to_string()],
5356 ..Default::default()
5357 },
5358 Some(tree_sitter_rust::language()),
5359 );
5360 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5361 disk_based_diagnostics_progress_token: Some(progress_token),
5362 disk_based_diagnostics_sources: &["disk"],
5363 ..Default::default()
5364 });
5365
5366 let fs = FakeFs::new(cx.background());
5367 fs.insert_tree(
5368 "/dir",
5369 json!({
5370 "a.rs": "fn a() { A }",
5371 "b.rs": "const y: i32 = 1",
5372 }),
5373 )
5374 .await;
5375
5376 let project = Project::test(fs, cx);
5377 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5378
5379 let (tree, _) = project
5380 .update(cx, |project, cx| {
5381 project.find_or_create_local_worktree("/dir", true, cx)
5382 })
5383 .await
5384 .unwrap();
5385 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5386
5387 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5388 .await;
5389
5390 // Cause worktree to start the fake language server
5391 let _buffer = project
5392 .update(cx, |project, cx| {
5393 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5394 })
5395 .await
5396 .unwrap();
5397
5398 let mut events = subscribe(&project, cx);
5399
5400 let mut fake_server = fake_servers.next().await.unwrap();
5401 fake_server.start_progress(progress_token).await;
5402 assert_eq!(
5403 events.next().await.unwrap(),
5404 Event::DiskBasedDiagnosticsStarted
5405 );
5406
5407 fake_server.start_progress(progress_token).await;
5408 fake_server.end_progress(progress_token).await;
5409 fake_server.start_progress(progress_token).await;
5410
5411 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5412 lsp::PublishDiagnosticsParams {
5413 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5414 version: None,
5415 diagnostics: vec![lsp::Diagnostic {
5416 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5417 severity: Some(lsp::DiagnosticSeverity::ERROR),
5418 message: "undefined variable 'A'".to_string(),
5419 ..Default::default()
5420 }],
5421 },
5422 );
5423 assert_eq!(
5424 events.next().await.unwrap(),
5425 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5426 );
5427
5428 fake_server.end_progress(progress_token).await;
5429 fake_server.end_progress(progress_token).await;
5430 assert_eq!(
5431 events.next().await.unwrap(),
5432 Event::DiskBasedDiagnosticsUpdated
5433 );
5434 assert_eq!(
5435 events.next().await.unwrap(),
5436 Event::DiskBasedDiagnosticsFinished
5437 );
5438
5439 let buffer = project
5440 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5441 .await
5442 .unwrap();
5443
5444 buffer.read_with(cx, |buffer, _| {
5445 let snapshot = buffer.snapshot();
5446 let diagnostics = snapshot
5447 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5448 .collect::<Vec<_>>();
5449 assert_eq!(
5450 diagnostics,
5451 &[DiagnosticEntry {
5452 range: Point::new(0, 9)..Point::new(0, 10),
5453 diagnostic: Diagnostic {
5454 severity: lsp::DiagnosticSeverity::ERROR,
5455 message: "undefined variable 'A'".to_string(),
5456 group_id: 0,
5457 is_primary: true,
5458 ..Default::default()
5459 }
5460 }]
5461 )
5462 });
5463 }
5464
5465 #[gpui::test]
5466 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5467 cx.foreground().forbid_parking();
5468
5469 let mut language = Language::new(
5470 LanguageConfig {
5471 name: "Rust".into(),
5472 path_suffixes: vec!["rs".to_string()],
5473 ..Default::default()
5474 },
5475 Some(tree_sitter_rust::language()),
5476 );
5477 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5478 disk_based_diagnostics_sources: &["disk"],
5479 ..Default::default()
5480 });
5481
5482 let text = "
5483 fn a() { A }
5484 fn b() { BB }
5485 fn c() { CCC }
5486 "
5487 .unindent();
5488
5489 let fs = FakeFs::new(cx.background());
5490 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5491
5492 let project = Project::test(fs, cx);
5493 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5494
5495 let worktree_id = project
5496 .update(cx, |project, cx| {
5497 project.find_or_create_local_worktree("/dir", true, cx)
5498 })
5499 .await
5500 .unwrap()
5501 .0
5502 .read_with(cx, |tree, _| tree.id());
5503
5504 let buffer = project
5505 .update(cx, |project, cx| {
5506 project.open_buffer((worktree_id, "a.rs"), cx)
5507 })
5508 .await
5509 .unwrap();
5510
5511 let mut fake_server = fake_servers.next().await.unwrap();
5512 let open_notification = fake_server
5513 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5514 .await;
5515
5516 // Edit the buffer, moving the content down
5517 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5518 let change_notification_1 = fake_server
5519 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5520 .await;
5521 assert!(
5522 change_notification_1.text_document.version > open_notification.text_document.version
5523 );
5524
5525 // Report some diagnostics for the initial version of the buffer
5526 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5527 lsp::PublishDiagnosticsParams {
5528 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5529 version: Some(open_notification.text_document.version),
5530 diagnostics: vec![
5531 lsp::Diagnostic {
5532 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5533 severity: Some(DiagnosticSeverity::ERROR),
5534 message: "undefined variable 'A'".to_string(),
5535 source: Some("disk".to_string()),
5536 ..Default::default()
5537 },
5538 lsp::Diagnostic {
5539 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5540 severity: Some(DiagnosticSeverity::ERROR),
5541 message: "undefined variable 'BB'".to_string(),
5542 source: Some("disk".to_string()),
5543 ..Default::default()
5544 },
5545 lsp::Diagnostic {
5546 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5547 severity: Some(DiagnosticSeverity::ERROR),
5548 source: Some("disk".to_string()),
5549 message: "undefined variable 'CCC'".to_string(),
5550 ..Default::default()
5551 },
5552 ],
5553 },
5554 );
5555
5556 // The diagnostics have moved down since they were created.
5557 buffer.next_notification(cx).await;
5558 buffer.read_with(cx, |buffer, _| {
5559 assert_eq!(
5560 buffer
5561 .snapshot()
5562 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5563 .collect::<Vec<_>>(),
5564 &[
5565 DiagnosticEntry {
5566 range: Point::new(3, 9)..Point::new(3, 11),
5567 diagnostic: Diagnostic {
5568 severity: DiagnosticSeverity::ERROR,
5569 message: "undefined variable 'BB'".to_string(),
5570 is_disk_based: true,
5571 group_id: 1,
5572 is_primary: true,
5573 ..Default::default()
5574 },
5575 },
5576 DiagnosticEntry {
5577 range: Point::new(4, 9)..Point::new(4, 12),
5578 diagnostic: Diagnostic {
5579 severity: DiagnosticSeverity::ERROR,
5580 message: "undefined variable 'CCC'".to_string(),
5581 is_disk_based: true,
5582 group_id: 2,
5583 is_primary: true,
5584 ..Default::default()
5585 }
5586 }
5587 ]
5588 );
5589 assert_eq!(
5590 chunks_with_diagnostics(buffer, 0..buffer.len()),
5591 [
5592 ("\n\nfn a() { ".to_string(), None),
5593 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5594 (" }\nfn b() { ".to_string(), None),
5595 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5596 (" }\nfn c() { ".to_string(), None),
5597 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5598 (" }\n".to_string(), None),
5599 ]
5600 );
5601 assert_eq!(
5602 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5603 [
5604 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5605 (" }\nfn c() { ".to_string(), None),
5606 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5607 ]
5608 );
5609 });
5610
5611 // Ensure overlapping diagnostics are highlighted correctly.
5612 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5613 lsp::PublishDiagnosticsParams {
5614 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5615 version: Some(open_notification.text_document.version),
5616 diagnostics: vec![
5617 lsp::Diagnostic {
5618 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5619 severity: Some(DiagnosticSeverity::ERROR),
5620 message: "undefined variable 'A'".to_string(),
5621 source: Some("disk".to_string()),
5622 ..Default::default()
5623 },
5624 lsp::Diagnostic {
5625 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5626 severity: Some(DiagnosticSeverity::WARNING),
5627 message: "unreachable statement".to_string(),
5628 source: Some("disk".to_string()),
5629 ..Default::default()
5630 },
5631 ],
5632 },
5633 );
5634
5635 buffer.next_notification(cx).await;
5636 buffer.read_with(cx, |buffer, _| {
5637 assert_eq!(
5638 buffer
5639 .snapshot()
5640 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5641 .collect::<Vec<_>>(),
5642 &[
5643 DiagnosticEntry {
5644 range: Point::new(2, 9)..Point::new(2, 12),
5645 diagnostic: Diagnostic {
5646 severity: DiagnosticSeverity::WARNING,
5647 message: "unreachable statement".to_string(),
5648 is_disk_based: true,
5649 group_id: 1,
5650 is_primary: true,
5651 ..Default::default()
5652 }
5653 },
5654 DiagnosticEntry {
5655 range: Point::new(2, 9)..Point::new(2, 10),
5656 diagnostic: Diagnostic {
5657 severity: DiagnosticSeverity::ERROR,
5658 message: "undefined variable 'A'".to_string(),
5659 is_disk_based: true,
5660 group_id: 0,
5661 is_primary: true,
5662 ..Default::default()
5663 },
5664 }
5665 ]
5666 );
5667 assert_eq!(
5668 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5669 [
5670 ("fn a() { ".to_string(), None),
5671 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5672 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5673 ("\n".to_string(), None),
5674 ]
5675 );
5676 assert_eq!(
5677 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5678 [
5679 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5680 ("\n".to_string(), None),
5681 ]
5682 );
5683 });
5684
5685 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5686 // changes since the last save.
5687 buffer.update(cx, |buffer, cx| {
5688 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5689 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5690 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5691 });
5692 let change_notification_2 = fake_server
5693 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5694 .await;
5695 assert!(
5696 change_notification_2.text_document.version
5697 > change_notification_1.text_document.version
5698 );
5699
5700 // Handle out-of-order diagnostics
5701 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5702 lsp::PublishDiagnosticsParams {
5703 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5704 version: Some(change_notification_2.text_document.version),
5705 diagnostics: vec![
5706 lsp::Diagnostic {
5707 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5708 severity: Some(DiagnosticSeverity::ERROR),
5709 message: "undefined variable 'BB'".to_string(),
5710 source: Some("disk".to_string()),
5711 ..Default::default()
5712 },
5713 lsp::Diagnostic {
5714 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5715 severity: Some(DiagnosticSeverity::WARNING),
5716 message: "undefined variable 'A'".to_string(),
5717 source: Some("disk".to_string()),
5718 ..Default::default()
5719 },
5720 ],
5721 },
5722 );
5723
5724 buffer.next_notification(cx).await;
5725 buffer.read_with(cx, |buffer, _| {
5726 assert_eq!(
5727 buffer
5728 .snapshot()
5729 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5730 .collect::<Vec<_>>(),
5731 &[
5732 DiagnosticEntry {
5733 range: Point::new(2, 21)..Point::new(2, 22),
5734 diagnostic: Diagnostic {
5735 severity: DiagnosticSeverity::WARNING,
5736 message: "undefined variable 'A'".to_string(),
5737 is_disk_based: true,
5738 group_id: 1,
5739 is_primary: true,
5740 ..Default::default()
5741 }
5742 },
5743 DiagnosticEntry {
5744 range: Point::new(3, 9)..Point::new(3, 14),
5745 diagnostic: Diagnostic {
5746 severity: DiagnosticSeverity::ERROR,
5747 message: "undefined variable 'BB'".to_string(),
5748 is_disk_based: true,
5749 group_id: 0,
5750 is_primary: true,
5751 ..Default::default()
5752 },
5753 }
5754 ]
5755 );
5756 });
5757 }
5758
5759 #[gpui::test]
5760 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5761 cx.foreground().forbid_parking();
5762
5763 let text = concat!(
5764 "let one = ;\n", //
5765 "let two = \n",
5766 "let three = 3;\n",
5767 );
5768
5769 let fs = FakeFs::new(cx.background());
5770 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5771
5772 let project = Project::test(fs, cx);
5773 let worktree_id = project
5774 .update(cx, |project, cx| {
5775 project.find_or_create_local_worktree("/dir", true, cx)
5776 })
5777 .await
5778 .unwrap()
5779 .0
5780 .read_with(cx, |tree, _| tree.id());
5781
5782 let buffer = project
5783 .update(cx, |project, cx| {
5784 project.open_buffer((worktree_id, "a.rs"), cx)
5785 })
5786 .await
5787 .unwrap();
5788
5789 project.update(cx, |project, cx| {
5790 project
5791 .update_buffer_diagnostics(
5792 &buffer,
5793 vec![
5794 DiagnosticEntry {
5795 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5796 diagnostic: Diagnostic {
5797 severity: DiagnosticSeverity::ERROR,
5798 message: "syntax error 1".to_string(),
5799 ..Default::default()
5800 },
5801 },
5802 DiagnosticEntry {
5803 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5804 diagnostic: Diagnostic {
5805 severity: DiagnosticSeverity::ERROR,
5806 message: "syntax error 2".to_string(),
5807 ..Default::default()
5808 },
5809 },
5810 ],
5811 None,
5812 cx,
5813 )
5814 .unwrap();
5815 });
5816
5817 // An empty range is extended forward to include the following character.
5818 // At the end of a line, an empty range is extended backward to include
5819 // the preceding character.
5820 buffer.read_with(cx, |buffer, _| {
5821 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5822 assert_eq!(
5823 chunks
5824 .iter()
5825 .map(|(s, d)| (s.as_str(), *d))
5826 .collect::<Vec<_>>(),
5827 &[
5828 ("let one = ", None),
5829 (";", Some(DiagnosticSeverity::ERROR)),
5830 ("\nlet two =", None),
5831 (" ", Some(DiagnosticSeverity::ERROR)),
5832 ("\nlet three = 3;\n", None)
5833 ]
5834 );
5835 });
5836 }
5837
5838 #[gpui::test]
5839 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5840 cx.foreground().forbid_parking();
5841
5842 let mut language = Language::new(
5843 LanguageConfig {
5844 name: "Rust".into(),
5845 path_suffixes: vec!["rs".to_string()],
5846 ..Default::default()
5847 },
5848 Some(tree_sitter_rust::language()),
5849 );
5850 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5851
5852 let text = "
5853 fn a() {
5854 f1();
5855 }
5856 fn b() {
5857 f2();
5858 }
5859 fn c() {
5860 f3();
5861 }
5862 "
5863 .unindent();
5864
5865 let fs = FakeFs::new(cx.background());
5866 fs.insert_tree(
5867 "/dir",
5868 json!({
5869 "a.rs": text.clone(),
5870 }),
5871 )
5872 .await;
5873
5874 let project = Project::test(fs, cx);
5875 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5876
5877 let worktree_id = project
5878 .update(cx, |project, cx| {
5879 project.find_or_create_local_worktree("/dir", true, cx)
5880 })
5881 .await
5882 .unwrap()
5883 .0
5884 .read_with(cx, |tree, _| tree.id());
5885
5886 let buffer = project
5887 .update(cx, |project, cx| {
5888 project.open_buffer((worktree_id, "a.rs"), cx)
5889 })
5890 .await
5891 .unwrap();
5892
5893 let mut fake_server = fake_servers.next().await.unwrap();
5894 let lsp_document_version = fake_server
5895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5896 .await
5897 .text_document
5898 .version;
5899
5900 // Simulate editing the buffer after the language server computes some edits.
5901 buffer.update(cx, |buffer, cx| {
5902 buffer.edit(
5903 [Point::new(0, 0)..Point::new(0, 0)],
5904 "// above first function\n",
5905 cx,
5906 );
5907 buffer.edit(
5908 [Point::new(2, 0)..Point::new(2, 0)],
5909 " // inside first function\n",
5910 cx,
5911 );
5912 buffer.edit(
5913 [Point::new(6, 4)..Point::new(6, 4)],
5914 "// inside second function ",
5915 cx,
5916 );
5917
5918 assert_eq!(
5919 buffer.text(),
5920 "
5921 // above first function
5922 fn a() {
5923 // inside first function
5924 f1();
5925 }
5926 fn b() {
5927 // inside second function f2();
5928 }
5929 fn c() {
5930 f3();
5931 }
5932 "
5933 .unindent()
5934 );
5935 });
5936
5937 let edits = project
5938 .update(cx, |project, cx| {
5939 project.edits_from_lsp(
5940 &buffer,
5941 vec![
5942 // replace body of first function
5943 lsp::TextEdit {
5944 range: lsp::Range::new(
5945 lsp::Position::new(0, 0),
5946 lsp::Position::new(3, 0),
5947 ),
5948 new_text: "
5949 fn a() {
5950 f10();
5951 }
5952 "
5953 .unindent(),
5954 },
5955 // edit inside second function
5956 lsp::TextEdit {
5957 range: lsp::Range::new(
5958 lsp::Position::new(4, 6),
5959 lsp::Position::new(4, 6),
5960 ),
5961 new_text: "00".into(),
5962 },
5963 // edit inside third function via two distinct edits
5964 lsp::TextEdit {
5965 range: lsp::Range::new(
5966 lsp::Position::new(7, 5),
5967 lsp::Position::new(7, 5),
5968 ),
5969 new_text: "4000".into(),
5970 },
5971 lsp::TextEdit {
5972 range: lsp::Range::new(
5973 lsp::Position::new(7, 5),
5974 lsp::Position::new(7, 6),
5975 ),
5976 new_text: "".into(),
5977 },
5978 ],
5979 Some(lsp_document_version),
5980 cx,
5981 )
5982 })
5983 .await
5984 .unwrap();
5985
5986 buffer.update(cx, |buffer, cx| {
5987 for (range, new_text) in edits {
5988 buffer.edit([range], new_text, cx);
5989 }
5990 assert_eq!(
5991 buffer.text(),
5992 "
5993 // above first function
5994 fn a() {
5995 // inside first function
5996 f10();
5997 }
5998 fn b() {
5999 // inside second function f200();
6000 }
6001 fn c() {
6002 f4000();
6003 }
6004 "
6005 .unindent()
6006 );
6007 });
6008 }
6009
6010 #[gpui::test]
6011 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6012 cx.foreground().forbid_parking();
6013
6014 let text = "
6015 use a::b;
6016 use a::c;
6017
6018 fn f() {
6019 b();
6020 c();
6021 }
6022 "
6023 .unindent();
6024
6025 let fs = FakeFs::new(cx.background());
6026 fs.insert_tree(
6027 "/dir",
6028 json!({
6029 "a.rs": text.clone(),
6030 }),
6031 )
6032 .await;
6033
6034 let project = Project::test(fs, cx);
6035 let worktree_id = project
6036 .update(cx, |project, cx| {
6037 project.find_or_create_local_worktree("/dir", true, cx)
6038 })
6039 .await
6040 .unwrap()
6041 .0
6042 .read_with(cx, |tree, _| tree.id());
6043
6044 let buffer = project
6045 .update(cx, |project, cx| {
6046 project.open_buffer((worktree_id, "a.rs"), cx)
6047 })
6048 .await
6049 .unwrap();
6050
6051 // Simulate the language server sending us a small edit in the form of a very large diff.
6052 // Rust-analyzer does this when performing a merge-imports code action.
6053 let edits = project
6054 .update(cx, |project, cx| {
6055 project.edits_from_lsp(
6056 &buffer,
6057 [
6058 // Replace the first use statement without editing the semicolon.
6059 lsp::TextEdit {
6060 range: lsp::Range::new(
6061 lsp::Position::new(0, 4),
6062 lsp::Position::new(0, 8),
6063 ),
6064 new_text: "a::{b, c}".into(),
6065 },
6066 // Reinsert the remainder of the file between the semicolon and the final
6067 // newline of the file.
6068 lsp::TextEdit {
6069 range: lsp::Range::new(
6070 lsp::Position::new(0, 9),
6071 lsp::Position::new(0, 9),
6072 ),
6073 new_text: "\n\n".into(),
6074 },
6075 lsp::TextEdit {
6076 range: lsp::Range::new(
6077 lsp::Position::new(0, 9),
6078 lsp::Position::new(0, 9),
6079 ),
6080 new_text: "
6081 fn f() {
6082 b();
6083 c();
6084 }"
6085 .unindent(),
6086 },
6087 // Delete everything after the first newline of the file.
6088 lsp::TextEdit {
6089 range: lsp::Range::new(
6090 lsp::Position::new(1, 0),
6091 lsp::Position::new(7, 0),
6092 ),
6093 new_text: "".into(),
6094 },
6095 ],
6096 None,
6097 cx,
6098 )
6099 })
6100 .await
6101 .unwrap();
6102
6103 buffer.update(cx, |buffer, cx| {
6104 let edits = edits
6105 .into_iter()
6106 .map(|(range, text)| {
6107 (
6108 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6109 text,
6110 )
6111 })
6112 .collect::<Vec<_>>();
6113
6114 assert_eq!(
6115 edits,
6116 [
6117 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6118 (Point::new(1, 0)..Point::new(2, 0), "".into())
6119 ]
6120 );
6121
6122 for (range, new_text) in edits {
6123 buffer.edit([range], new_text, cx);
6124 }
6125 assert_eq!(
6126 buffer.text(),
6127 "
6128 use a::{b, c};
6129
6130 fn f() {
6131 b();
6132 c();
6133 }
6134 "
6135 .unindent()
6136 );
6137 });
6138 }
6139
6140 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6141 buffer: &Buffer,
6142 range: Range<T>,
6143 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6144 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6145 for chunk in buffer.snapshot().chunks(range, true) {
6146 if chunks.last().map_or(false, |prev_chunk| {
6147 prev_chunk.1 == chunk.diagnostic_severity
6148 }) {
6149 chunks.last_mut().unwrap().0.push_str(chunk.text);
6150 } else {
6151 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6152 }
6153 }
6154 chunks
6155 }
6156
6157 #[gpui::test]
6158 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6159 let dir = temp_tree(json!({
6160 "root": {
6161 "dir1": {},
6162 "dir2": {
6163 "dir3": {}
6164 }
6165 }
6166 }));
6167
6168 let project = Project::test(Arc::new(RealFs), cx);
6169 let (tree, _) = project
6170 .update(cx, |project, cx| {
6171 project.find_or_create_local_worktree(&dir.path(), true, cx)
6172 })
6173 .await
6174 .unwrap();
6175
6176 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6177 .await;
6178
6179 let cancel_flag = Default::default();
6180 let results = project
6181 .read_with(cx, |project, cx| {
6182 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6183 })
6184 .await;
6185
6186 assert!(results.is_empty());
6187 }
6188
6189 #[gpui::test]
6190 async fn test_definition(cx: &mut gpui::TestAppContext) {
6191 let mut language = Language::new(
6192 LanguageConfig {
6193 name: "Rust".into(),
6194 path_suffixes: vec!["rs".to_string()],
6195 ..Default::default()
6196 },
6197 Some(tree_sitter_rust::language()),
6198 );
6199 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6200
6201 let fs = FakeFs::new(cx.background());
6202 fs.insert_tree(
6203 "/dir",
6204 json!({
6205 "a.rs": "const fn a() { A }",
6206 "b.rs": "const y: i32 = crate::a()",
6207 }),
6208 )
6209 .await;
6210
6211 let project = Project::test(fs, cx);
6212 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6213
6214 let (tree, _) = project
6215 .update(cx, |project, cx| {
6216 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6217 })
6218 .await
6219 .unwrap();
6220 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6221 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6222 .await;
6223
6224 let buffer = project
6225 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6226 .await
6227 .unwrap();
6228
6229 let fake_server = fake_servers.next().await.unwrap();
6230 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6231 let params = params.text_document_position_params;
6232 assert_eq!(
6233 params.text_document.uri.to_file_path().unwrap(),
6234 Path::new("/dir/b.rs"),
6235 );
6236 assert_eq!(params.position, lsp::Position::new(0, 22));
6237
6238 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6239 lsp::Location::new(
6240 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6241 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6242 ),
6243 )))
6244 });
6245
6246 let mut definitions = project
6247 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6248 .await
6249 .unwrap();
6250
6251 assert_eq!(definitions.len(), 1);
6252 let definition = definitions.pop().unwrap();
6253 cx.update(|cx| {
6254 let target_buffer = definition.buffer.read(cx);
6255 assert_eq!(
6256 target_buffer
6257 .file()
6258 .unwrap()
6259 .as_local()
6260 .unwrap()
6261 .abs_path(cx),
6262 Path::new("/dir/a.rs"),
6263 );
6264 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6265 assert_eq!(
6266 list_worktrees(&project, cx),
6267 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6268 );
6269
6270 drop(definition);
6271 });
6272 cx.read(|cx| {
6273 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6274 });
6275
6276 fn list_worktrees<'a>(
6277 project: &'a ModelHandle<Project>,
6278 cx: &'a AppContext,
6279 ) -> Vec<(&'a Path, bool)> {
6280 project
6281 .read(cx)
6282 .worktrees(cx)
6283 .map(|worktree| {
6284 let worktree = worktree.read(cx);
6285 (
6286 worktree.as_local().unwrap().abs_path().as_ref(),
6287 worktree.is_visible(),
6288 )
6289 })
6290 .collect::<Vec<_>>()
6291 }
6292 }
6293
6294 #[gpui::test(iterations = 10)]
6295 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6296 let mut language = Language::new(
6297 LanguageConfig {
6298 name: "TypeScript".into(),
6299 path_suffixes: vec!["ts".to_string()],
6300 ..Default::default()
6301 },
6302 None,
6303 );
6304 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6305
6306 let fs = FakeFs::new(cx.background());
6307 fs.insert_tree(
6308 "/dir",
6309 json!({
6310 "a.ts": "a",
6311 }),
6312 )
6313 .await;
6314
6315 let project = Project::test(fs, cx);
6316 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6317
6318 let (tree, _) = project
6319 .update(cx, |project, cx| {
6320 project.find_or_create_local_worktree("/dir", true, cx)
6321 })
6322 .await
6323 .unwrap();
6324 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6325 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6326 .await;
6327
6328 let buffer = project
6329 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6330 .await
6331 .unwrap();
6332
6333 let fake_server = fake_language_servers.next().await.unwrap();
6334
6335 // Language server returns code actions that contain commands, and not edits.
6336 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6337 fake_server
6338 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6339 Ok(Some(vec![
6340 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6341 title: "The code action".into(),
6342 command: Some(lsp::Command {
6343 title: "The command".into(),
6344 command: "_the/command".into(),
6345 arguments: Some(vec![json!("the-argument")]),
6346 }),
6347 ..Default::default()
6348 }),
6349 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6350 title: "two".into(),
6351 ..Default::default()
6352 }),
6353 ]))
6354 })
6355 .next()
6356 .await;
6357
6358 let action = actions.await.unwrap()[0].clone();
6359 let apply = project.update(cx, |project, cx| {
6360 project.apply_code_action(buffer.clone(), action, true, cx)
6361 });
6362
6363 // Resolving the code action does not populate its edits. In absence of
6364 // edits, we must execute the given command.
6365 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6366 |action, _| async move { Ok(action) },
6367 );
6368
6369 // While executing the command, the language server sends the editor
6370 // a `workspaceEdit` request.
6371 fake_server
6372 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6373 let fake = fake_server.clone();
6374 move |params, _| {
6375 assert_eq!(params.command, "_the/command");
6376 let fake = fake.clone();
6377 async move {
6378 fake.server
6379 .request::<lsp::request::ApplyWorkspaceEdit>(
6380 lsp::ApplyWorkspaceEditParams {
6381 label: None,
6382 edit: lsp::WorkspaceEdit {
6383 changes: Some(
6384 [(
6385 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6386 vec![lsp::TextEdit {
6387 range: lsp::Range::new(
6388 lsp::Position::new(0, 0),
6389 lsp::Position::new(0, 0),
6390 ),
6391 new_text: "X".into(),
6392 }],
6393 )]
6394 .into_iter()
6395 .collect(),
6396 ),
6397 ..Default::default()
6398 },
6399 },
6400 )
6401 .await
6402 .unwrap();
6403 Ok(Some(json!(null)))
6404 }
6405 }
6406 })
6407 .next()
6408 .await;
6409
6410 // Applying the code action returns a project transaction containing the edits
6411 // sent by the language server in its `workspaceEdit` request.
6412 let transaction = apply.await.unwrap();
6413 assert!(transaction.0.contains_key(&buffer));
6414 buffer.update(cx, |buffer, cx| {
6415 assert_eq!(buffer.text(), "Xa");
6416 buffer.undo(cx);
6417 assert_eq!(buffer.text(), "a");
6418 });
6419 }
6420
6421 #[gpui::test]
6422 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6423 let fs = FakeFs::new(cx.background());
6424 fs.insert_tree(
6425 "/dir",
6426 json!({
6427 "file1": "the old contents",
6428 }),
6429 )
6430 .await;
6431
6432 let project = Project::test(fs.clone(), cx);
6433 let worktree_id = project
6434 .update(cx, |p, cx| {
6435 p.find_or_create_local_worktree("/dir", true, cx)
6436 })
6437 .await
6438 .unwrap()
6439 .0
6440 .read_with(cx, |tree, _| tree.id());
6441
6442 let buffer = project
6443 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6444 .await
6445 .unwrap();
6446 buffer
6447 .update(cx, |buffer, cx| {
6448 assert_eq!(buffer.text(), "the old contents");
6449 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6450 buffer.save(cx)
6451 })
6452 .await
6453 .unwrap();
6454
6455 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6456 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6457 }
6458
6459 #[gpui::test]
6460 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6461 let fs = FakeFs::new(cx.background());
6462 fs.insert_tree(
6463 "/dir",
6464 json!({
6465 "file1": "the old contents",
6466 }),
6467 )
6468 .await;
6469
6470 let project = Project::test(fs.clone(), cx);
6471 let worktree_id = project
6472 .update(cx, |p, cx| {
6473 p.find_or_create_local_worktree("/dir/file1", true, cx)
6474 })
6475 .await
6476 .unwrap()
6477 .0
6478 .read_with(cx, |tree, _| tree.id());
6479
6480 let buffer = project
6481 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6482 .await
6483 .unwrap();
6484 buffer
6485 .update(cx, |buffer, cx| {
6486 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6487 buffer.save(cx)
6488 })
6489 .await
6490 .unwrap();
6491
6492 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6493 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6494 }
6495
6496 #[gpui::test]
6497 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6498 let fs = FakeFs::new(cx.background());
6499 fs.insert_tree("/dir", json!({})).await;
6500
6501 let project = Project::test(fs.clone(), cx);
6502 let (worktree, _) = project
6503 .update(cx, |project, cx| {
6504 project.find_or_create_local_worktree("/dir", true, cx)
6505 })
6506 .await
6507 .unwrap();
6508 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6509
6510 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6511 buffer.update(cx, |buffer, cx| {
6512 buffer.edit([0..0], "abc", cx);
6513 assert!(buffer.is_dirty());
6514 assert!(!buffer.has_conflict());
6515 });
6516 project
6517 .update(cx, |project, cx| {
6518 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6519 })
6520 .await
6521 .unwrap();
6522 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6523 buffer.read_with(cx, |buffer, cx| {
6524 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6525 assert!(!buffer.is_dirty());
6526 assert!(!buffer.has_conflict());
6527 });
6528
6529 let opened_buffer = project
6530 .update(cx, |project, cx| {
6531 project.open_buffer((worktree_id, "file1"), cx)
6532 })
6533 .await
6534 .unwrap();
6535 assert_eq!(opened_buffer, buffer);
6536 }
6537
6538 #[gpui::test(retries = 5)]
6539 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6540 let dir = temp_tree(json!({
6541 "a": {
6542 "file1": "",
6543 "file2": "",
6544 "file3": "",
6545 },
6546 "b": {
6547 "c": {
6548 "file4": "",
6549 "file5": "",
6550 }
6551 }
6552 }));
6553
6554 let project = Project::test(Arc::new(RealFs), cx);
6555 let rpc = project.read_with(cx, |p, _| p.client.clone());
6556
6557 let (tree, _) = project
6558 .update(cx, |p, cx| {
6559 p.find_or_create_local_worktree(dir.path(), true, cx)
6560 })
6561 .await
6562 .unwrap();
6563 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6564
6565 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6566 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6567 async move { buffer.await.unwrap() }
6568 };
6569 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6570 tree.read_with(cx, |tree, _| {
6571 tree.entry_for_path(path)
6572 .expect(&format!("no entry for path {}", path))
6573 .id
6574 })
6575 };
6576
6577 let buffer2 = buffer_for_path("a/file2", cx).await;
6578 let buffer3 = buffer_for_path("a/file3", cx).await;
6579 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6580 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6581
6582 let file2_id = id_for_path("a/file2", &cx);
6583 let file3_id = id_for_path("a/file3", &cx);
6584 let file4_id = id_for_path("b/c/file4", &cx);
6585
6586 // Wait for the initial scan.
6587 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6588 .await;
6589
6590 // Create a remote copy of this worktree.
6591 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6592 let (remote, load_task) = cx.update(|cx| {
6593 Worktree::remote(
6594 1,
6595 1,
6596 initial_snapshot.to_proto(&Default::default(), true),
6597 rpc.clone(),
6598 cx,
6599 )
6600 });
6601 load_task.await;
6602
6603 cx.read(|cx| {
6604 assert!(!buffer2.read(cx).is_dirty());
6605 assert!(!buffer3.read(cx).is_dirty());
6606 assert!(!buffer4.read(cx).is_dirty());
6607 assert!(!buffer5.read(cx).is_dirty());
6608 });
6609
6610 // Rename and delete files and directories.
6611 tree.flush_fs_events(&cx).await;
6612 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6613 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6614 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6615 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6616 tree.flush_fs_events(&cx).await;
6617
6618 let expected_paths = vec![
6619 "a",
6620 "a/file1",
6621 "a/file2.new",
6622 "b",
6623 "d",
6624 "d/file3",
6625 "d/file4",
6626 ];
6627
6628 cx.read(|app| {
6629 assert_eq!(
6630 tree.read(app)
6631 .paths()
6632 .map(|p| p.to_str().unwrap())
6633 .collect::<Vec<_>>(),
6634 expected_paths
6635 );
6636
6637 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6638 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6639 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6640
6641 assert_eq!(
6642 buffer2.read(app).file().unwrap().path().as_ref(),
6643 Path::new("a/file2.new")
6644 );
6645 assert_eq!(
6646 buffer3.read(app).file().unwrap().path().as_ref(),
6647 Path::new("d/file3")
6648 );
6649 assert_eq!(
6650 buffer4.read(app).file().unwrap().path().as_ref(),
6651 Path::new("d/file4")
6652 );
6653 assert_eq!(
6654 buffer5.read(app).file().unwrap().path().as_ref(),
6655 Path::new("b/c/file5")
6656 );
6657
6658 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6659 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6660 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6661 assert!(buffer5.read(app).file().unwrap().is_deleted());
6662 });
6663
6664 // Update the remote worktree. Check that it becomes consistent with the
6665 // local worktree.
6666 remote.update(cx, |remote, cx| {
6667 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6668 &initial_snapshot,
6669 1,
6670 1,
6671 true,
6672 );
6673 remote
6674 .as_remote_mut()
6675 .unwrap()
6676 .snapshot
6677 .apply_remote_update(update_message)
6678 .unwrap();
6679
6680 assert_eq!(
6681 remote
6682 .paths()
6683 .map(|p| p.to_str().unwrap())
6684 .collect::<Vec<_>>(),
6685 expected_paths
6686 );
6687 });
6688 }
6689
6690 #[gpui::test]
6691 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6692 let fs = FakeFs::new(cx.background());
6693 fs.insert_tree(
6694 "/the-dir",
6695 json!({
6696 "a.txt": "a-contents",
6697 "b.txt": "b-contents",
6698 }),
6699 )
6700 .await;
6701
6702 let project = Project::test(fs.clone(), cx);
6703 let worktree_id = project
6704 .update(cx, |p, cx| {
6705 p.find_or_create_local_worktree("/the-dir", true, cx)
6706 })
6707 .await
6708 .unwrap()
6709 .0
6710 .read_with(cx, |tree, _| tree.id());
6711
6712 // Spawn multiple tasks to open paths, repeating some paths.
6713 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6714 (
6715 p.open_buffer((worktree_id, "a.txt"), cx),
6716 p.open_buffer((worktree_id, "b.txt"), cx),
6717 p.open_buffer((worktree_id, "a.txt"), cx),
6718 )
6719 });
6720
6721 let buffer_a_1 = buffer_a_1.await.unwrap();
6722 let buffer_a_2 = buffer_a_2.await.unwrap();
6723 let buffer_b = buffer_b.await.unwrap();
6724 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6725 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6726
6727 // There is only one buffer per path.
6728 let buffer_a_id = buffer_a_1.id();
6729 assert_eq!(buffer_a_2.id(), buffer_a_id);
6730
6731 // Open the same path again while it is still open.
6732 drop(buffer_a_1);
6733 let buffer_a_3 = project
6734 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6735 .await
6736 .unwrap();
6737
6738 // There's still only one buffer per path.
6739 assert_eq!(buffer_a_3.id(), buffer_a_id);
6740 }
6741
6742 #[gpui::test]
6743 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6744 use std::fs;
6745
6746 let dir = temp_tree(json!({
6747 "file1": "abc",
6748 "file2": "def",
6749 "file3": "ghi",
6750 }));
6751
6752 let project = Project::test(Arc::new(RealFs), cx);
6753 let (worktree, _) = project
6754 .update(cx, |p, cx| {
6755 p.find_or_create_local_worktree(dir.path(), true, cx)
6756 })
6757 .await
6758 .unwrap();
6759 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6760
6761 worktree.flush_fs_events(&cx).await;
6762 worktree
6763 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6764 .await;
6765
6766 let buffer1 = project
6767 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6768 .await
6769 .unwrap();
6770 let events = Rc::new(RefCell::new(Vec::new()));
6771
6772 // initially, the buffer isn't dirty.
6773 buffer1.update(cx, |buffer, cx| {
6774 cx.subscribe(&buffer1, {
6775 let events = events.clone();
6776 move |_, _, event, _| match event {
6777 BufferEvent::Operation(_) => {}
6778 _ => events.borrow_mut().push(event.clone()),
6779 }
6780 })
6781 .detach();
6782
6783 assert!(!buffer.is_dirty());
6784 assert!(events.borrow().is_empty());
6785
6786 buffer.edit(vec![1..2], "", cx);
6787 });
6788
6789 // after the first edit, the buffer is dirty, and emits a dirtied event.
6790 buffer1.update(cx, |buffer, cx| {
6791 assert!(buffer.text() == "ac");
6792 assert!(buffer.is_dirty());
6793 assert_eq!(
6794 *events.borrow(),
6795 &[language::Event::Edited, language::Event::Dirtied]
6796 );
6797 events.borrow_mut().clear();
6798 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6799 });
6800
6801 // after saving, the buffer is not dirty, and emits a saved event.
6802 buffer1.update(cx, |buffer, cx| {
6803 assert!(!buffer.is_dirty());
6804 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6805 events.borrow_mut().clear();
6806
6807 buffer.edit(vec![1..1], "B", cx);
6808 buffer.edit(vec![2..2], "D", cx);
6809 });
6810
6811 // after editing again, the buffer is dirty, and emits another dirty event.
6812 buffer1.update(cx, |buffer, cx| {
6813 assert!(buffer.text() == "aBDc");
6814 assert!(buffer.is_dirty());
6815 assert_eq!(
6816 *events.borrow(),
6817 &[
6818 language::Event::Edited,
6819 language::Event::Dirtied,
6820 language::Event::Edited,
6821 ],
6822 );
6823 events.borrow_mut().clear();
6824
6825 // TODO - currently, after restoring the buffer to its
6826 // previously-saved state, the is still considered dirty.
6827 buffer.edit([1..3], "", cx);
6828 assert!(buffer.text() == "ac");
6829 assert!(buffer.is_dirty());
6830 });
6831
6832 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6833
6834 // When a file is deleted, the buffer is considered dirty.
6835 let events = Rc::new(RefCell::new(Vec::new()));
6836 let buffer2 = project
6837 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6838 .await
6839 .unwrap();
6840 buffer2.update(cx, |_, cx| {
6841 cx.subscribe(&buffer2, {
6842 let events = events.clone();
6843 move |_, _, event, _| events.borrow_mut().push(event.clone())
6844 })
6845 .detach();
6846 });
6847
6848 fs::remove_file(dir.path().join("file2")).unwrap();
6849 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6850 assert_eq!(
6851 *events.borrow(),
6852 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6853 );
6854
6855 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6856 let events = Rc::new(RefCell::new(Vec::new()));
6857 let buffer3 = project
6858 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6859 .await
6860 .unwrap();
6861 buffer3.update(cx, |_, cx| {
6862 cx.subscribe(&buffer3, {
6863 let events = events.clone();
6864 move |_, _, event, _| events.borrow_mut().push(event.clone())
6865 })
6866 .detach();
6867 });
6868
6869 worktree.flush_fs_events(&cx).await;
6870 buffer3.update(cx, |buffer, cx| {
6871 buffer.edit(Some(0..0), "x", cx);
6872 });
6873 events.borrow_mut().clear();
6874 fs::remove_file(dir.path().join("file3")).unwrap();
6875 buffer3
6876 .condition(&cx, |_, _| !events.borrow().is_empty())
6877 .await;
6878 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6879 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6880 }
6881
6882 #[gpui::test]
6883 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6884 use std::fs;
6885
6886 let initial_contents = "aaa\nbbbbb\nc\n";
6887 let dir = temp_tree(json!({ "the-file": initial_contents }));
6888
6889 let project = Project::test(Arc::new(RealFs), cx);
6890 let (worktree, _) = project
6891 .update(cx, |p, cx| {
6892 p.find_or_create_local_worktree(dir.path(), true, cx)
6893 })
6894 .await
6895 .unwrap();
6896 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6897
6898 worktree
6899 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6900 .await;
6901
6902 let abs_path = dir.path().join("the-file");
6903 let buffer = project
6904 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6905 .await
6906 .unwrap();
6907
6908 // TODO
6909 // Add a cursor on each row.
6910 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6911 // assert!(!buffer.is_dirty());
6912 // buffer.add_selection_set(
6913 // &(0..3)
6914 // .map(|row| Selection {
6915 // id: row as usize,
6916 // start: Point::new(row, 1),
6917 // end: Point::new(row, 1),
6918 // reversed: false,
6919 // goal: SelectionGoal::None,
6920 // })
6921 // .collect::<Vec<_>>(),
6922 // cx,
6923 // )
6924 // });
6925
6926 // Change the file on disk, adding two new lines of text, and removing
6927 // one line.
6928 buffer.read_with(cx, |buffer, _| {
6929 assert!(!buffer.is_dirty());
6930 assert!(!buffer.has_conflict());
6931 });
6932 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6933 fs::write(&abs_path, new_contents).unwrap();
6934
6935 // Because the buffer was not modified, it is reloaded from disk. Its
6936 // contents are edited according to the diff between the old and new
6937 // file contents.
6938 buffer
6939 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6940 .await;
6941
6942 buffer.update(cx, |buffer, _| {
6943 assert_eq!(buffer.text(), new_contents);
6944 assert!(!buffer.is_dirty());
6945 assert!(!buffer.has_conflict());
6946
6947 // TODO
6948 // let cursor_positions = buffer
6949 // .selection_set(selection_set_id)
6950 // .unwrap()
6951 // .selections::<Point>(&*buffer)
6952 // .map(|selection| {
6953 // assert_eq!(selection.start, selection.end);
6954 // selection.start
6955 // })
6956 // .collect::<Vec<_>>();
6957 // assert_eq!(
6958 // cursor_positions,
6959 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6960 // );
6961 });
6962
6963 // Modify the buffer
6964 buffer.update(cx, |buffer, cx| {
6965 buffer.edit(vec![0..0], " ", cx);
6966 assert!(buffer.is_dirty());
6967 assert!(!buffer.has_conflict());
6968 });
6969
6970 // Change the file on disk again, adding blank lines to the beginning.
6971 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6972
6973 // Because the buffer is modified, it doesn't reload from disk, but is
6974 // marked as having a conflict.
6975 buffer
6976 .condition(&cx, |buffer, _| buffer.has_conflict())
6977 .await;
6978 }
6979
6980 #[gpui::test]
6981 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6982 cx.foreground().forbid_parking();
6983
6984 let fs = FakeFs::new(cx.background());
6985 fs.insert_tree(
6986 "/the-dir",
6987 json!({
6988 "a.rs": "
6989 fn foo(mut v: Vec<usize>) {
6990 for x in &v {
6991 v.push(1);
6992 }
6993 }
6994 "
6995 .unindent(),
6996 }),
6997 )
6998 .await;
6999
7000 let project = Project::test(fs.clone(), cx);
7001 let (worktree, _) = project
7002 .update(cx, |p, cx| {
7003 p.find_or_create_local_worktree("/the-dir", true, cx)
7004 })
7005 .await
7006 .unwrap();
7007 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7008
7009 let buffer = project
7010 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7011 .await
7012 .unwrap();
7013
7014 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7015 let message = lsp::PublishDiagnosticsParams {
7016 uri: buffer_uri.clone(),
7017 diagnostics: vec![
7018 lsp::Diagnostic {
7019 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7020 severity: Some(DiagnosticSeverity::WARNING),
7021 message: "error 1".to_string(),
7022 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7023 location: lsp::Location {
7024 uri: buffer_uri.clone(),
7025 range: lsp::Range::new(
7026 lsp::Position::new(1, 8),
7027 lsp::Position::new(1, 9),
7028 ),
7029 },
7030 message: "error 1 hint 1".to_string(),
7031 }]),
7032 ..Default::default()
7033 },
7034 lsp::Diagnostic {
7035 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7036 severity: Some(DiagnosticSeverity::HINT),
7037 message: "error 1 hint 1".to_string(),
7038 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7039 location: lsp::Location {
7040 uri: buffer_uri.clone(),
7041 range: lsp::Range::new(
7042 lsp::Position::new(1, 8),
7043 lsp::Position::new(1, 9),
7044 ),
7045 },
7046 message: "original diagnostic".to_string(),
7047 }]),
7048 ..Default::default()
7049 },
7050 lsp::Diagnostic {
7051 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7052 severity: Some(DiagnosticSeverity::ERROR),
7053 message: "error 2".to_string(),
7054 related_information: Some(vec![
7055 lsp::DiagnosticRelatedInformation {
7056 location: lsp::Location {
7057 uri: buffer_uri.clone(),
7058 range: lsp::Range::new(
7059 lsp::Position::new(1, 13),
7060 lsp::Position::new(1, 15),
7061 ),
7062 },
7063 message: "error 2 hint 1".to_string(),
7064 },
7065 lsp::DiagnosticRelatedInformation {
7066 location: lsp::Location {
7067 uri: buffer_uri.clone(),
7068 range: lsp::Range::new(
7069 lsp::Position::new(1, 13),
7070 lsp::Position::new(1, 15),
7071 ),
7072 },
7073 message: "error 2 hint 2".to_string(),
7074 },
7075 ]),
7076 ..Default::default()
7077 },
7078 lsp::Diagnostic {
7079 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7080 severity: Some(DiagnosticSeverity::HINT),
7081 message: "error 2 hint 1".to_string(),
7082 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7083 location: lsp::Location {
7084 uri: buffer_uri.clone(),
7085 range: lsp::Range::new(
7086 lsp::Position::new(2, 8),
7087 lsp::Position::new(2, 17),
7088 ),
7089 },
7090 message: "original diagnostic".to_string(),
7091 }]),
7092 ..Default::default()
7093 },
7094 lsp::Diagnostic {
7095 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7096 severity: Some(DiagnosticSeverity::HINT),
7097 message: "error 2 hint 2".to_string(),
7098 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7099 location: lsp::Location {
7100 uri: buffer_uri.clone(),
7101 range: lsp::Range::new(
7102 lsp::Position::new(2, 8),
7103 lsp::Position::new(2, 17),
7104 ),
7105 },
7106 message: "original diagnostic".to_string(),
7107 }]),
7108 ..Default::default()
7109 },
7110 ],
7111 version: None,
7112 };
7113
7114 project
7115 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7116 .unwrap();
7117 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7118
7119 assert_eq!(
7120 buffer
7121 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7122 .collect::<Vec<_>>(),
7123 &[
7124 DiagnosticEntry {
7125 range: Point::new(1, 8)..Point::new(1, 9),
7126 diagnostic: Diagnostic {
7127 severity: DiagnosticSeverity::WARNING,
7128 message: "error 1".to_string(),
7129 group_id: 0,
7130 is_primary: true,
7131 ..Default::default()
7132 }
7133 },
7134 DiagnosticEntry {
7135 range: Point::new(1, 8)..Point::new(1, 9),
7136 diagnostic: Diagnostic {
7137 severity: DiagnosticSeverity::HINT,
7138 message: "error 1 hint 1".to_string(),
7139 group_id: 0,
7140 is_primary: false,
7141 ..Default::default()
7142 }
7143 },
7144 DiagnosticEntry {
7145 range: Point::new(1, 13)..Point::new(1, 15),
7146 diagnostic: Diagnostic {
7147 severity: DiagnosticSeverity::HINT,
7148 message: "error 2 hint 1".to_string(),
7149 group_id: 1,
7150 is_primary: false,
7151 ..Default::default()
7152 }
7153 },
7154 DiagnosticEntry {
7155 range: Point::new(1, 13)..Point::new(1, 15),
7156 diagnostic: Diagnostic {
7157 severity: DiagnosticSeverity::HINT,
7158 message: "error 2 hint 2".to_string(),
7159 group_id: 1,
7160 is_primary: false,
7161 ..Default::default()
7162 }
7163 },
7164 DiagnosticEntry {
7165 range: Point::new(2, 8)..Point::new(2, 17),
7166 diagnostic: Diagnostic {
7167 severity: DiagnosticSeverity::ERROR,
7168 message: "error 2".to_string(),
7169 group_id: 1,
7170 is_primary: true,
7171 ..Default::default()
7172 }
7173 }
7174 ]
7175 );
7176
7177 assert_eq!(
7178 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7179 &[
7180 DiagnosticEntry {
7181 range: Point::new(1, 8)..Point::new(1, 9),
7182 diagnostic: Diagnostic {
7183 severity: DiagnosticSeverity::WARNING,
7184 message: "error 1".to_string(),
7185 group_id: 0,
7186 is_primary: true,
7187 ..Default::default()
7188 }
7189 },
7190 DiagnosticEntry {
7191 range: Point::new(1, 8)..Point::new(1, 9),
7192 diagnostic: Diagnostic {
7193 severity: DiagnosticSeverity::HINT,
7194 message: "error 1 hint 1".to_string(),
7195 group_id: 0,
7196 is_primary: false,
7197 ..Default::default()
7198 }
7199 },
7200 ]
7201 );
7202 assert_eq!(
7203 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7204 &[
7205 DiagnosticEntry {
7206 range: Point::new(1, 13)..Point::new(1, 15),
7207 diagnostic: Diagnostic {
7208 severity: DiagnosticSeverity::HINT,
7209 message: "error 2 hint 1".to_string(),
7210 group_id: 1,
7211 is_primary: false,
7212 ..Default::default()
7213 }
7214 },
7215 DiagnosticEntry {
7216 range: Point::new(1, 13)..Point::new(1, 15),
7217 diagnostic: Diagnostic {
7218 severity: DiagnosticSeverity::HINT,
7219 message: "error 2 hint 2".to_string(),
7220 group_id: 1,
7221 is_primary: false,
7222 ..Default::default()
7223 }
7224 },
7225 DiagnosticEntry {
7226 range: Point::new(2, 8)..Point::new(2, 17),
7227 diagnostic: Diagnostic {
7228 severity: DiagnosticSeverity::ERROR,
7229 message: "error 2".to_string(),
7230 group_id: 1,
7231 is_primary: true,
7232 ..Default::default()
7233 }
7234 }
7235 ]
7236 );
7237 }
7238
7239 #[gpui::test]
7240 async fn test_rename(cx: &mut gpui::TestAppContext) {
7241 cx.foreground().forbid_parking();
7242
7243 let mut language = Language::new(
7244 LanguageConfig {
7245 name: "Rust".into(),
7246 path_suffixes: vec!["rs".to_string()],
7247 ..Default::default()
7248 },
7249 Some(tree_sitter_rust::language()),
7250 );
7251 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7252
7253 let fs = FakeFs::new(cx.background());
7254 fs.insert_tree(
7255 "/dir",
7256 json!({
7257 "one.rs": "const ONE: usize = 1;",
7258 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7259 }),
7260 )
7261 .await;
7262
7263 let project = Project::test(fs.clone(), cx);
7264 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7265
7266 let (tree, _) = project
7267 .update(cx, |project, cx| {
7268 project.find_or_create_local_worktree("/dir", true, cx)
7269 })
7270 .await
7271 .unwrap();
7272 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7273 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7274 .await;
7275
7276 let buffer = project
7277 .update(cx, |project, cx| {
7278 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7279 })
7280 .await
7281 .unwrap();
7282
7283 let fake_server = fake_servers.next().await.unwrap();
7284
7285 let response = project.update(cx, |project, cx| {
7286 project.prepare_rename(buffer.clone(), 7, cx)
7287 });
7288 fake_server
7289 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7290 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7291 assert_eq!(params.position, lsp::Position::new(0, 7));
7292 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7293 lsp::Position::new(0, 6),
7294 lsp::Position::new(0, 9),
7295 ))))
7296 })
7297 .next()
7298 .await
7299 .unwrap();
7300 let range = response.await.unwrap().unwrap();
7301 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7302 assert_eq!(range, 6..9);
7303
7304 let response = project.update(cx, |project, cx| {
7305 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7306 });
7307 fake_server
7308 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7309 assert_eq!(
7310 params.text_document_position.text_document.uri.as_str(),
7311 "file:///dir/one.rs"
7312 );
7313 assert_eq!(
7314 params.text_document_position.position,
7315 lsp::Position::new(0, 7)
7316 );
7317 assert_eq!(params.new_name, "THREE");
7318 Ok(Some(lsp::WorkspaceEdit {
7319 changes: Some(
7320 [
7321 (
7322 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7323 vec![lsp::TextEdit::new(
7324 lsp::Range::new(
7325 lsp::Position::new(0, 6),
7326 lsp::Position::new(0, 9),
7327 ),
7328 "THREE".to_string(),
7329 )],
7330 ),
7331 (
7332 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7333 vec![
7334 lsp::TextEdit::new(
7335 lsp::Range::new(
7336 lsp::Position::new(0, 24),
7337 lsp::Position::new(0, 27),
7338 ),
7339 "THREE".to_string(),
7340 ),
7341 lsp::TextEdit::new(
7342 lsp::Range::new(
7343 lsp::Position::new(0, 35),
7344 lsp::Position::new(0, 38),
7345 ),
7346 "THREE".to_string(),
7347 ),
7348 ],
7349 ),
7350 ]
7351 .into_iter()
7352 .collect(),
7353 ),
7354 ..Default::default()
7355 }))
7356 })
7357 .next()
7358 .await
7359 .unwrap();
7360 let mut transaction = response.await.unwrap().0;
7361 assert_eq!(transaction.len(), 2);
7362 assert_eq!(
7363 transaction
7364 .remove_entry(&buffer)
7365 .unwrap()
7366 .0
7367 .read_with(cx, |buffer, _| buffer.text()),
7368 "const THREE: usize = 1;"
7369 );
7370 assert_eq!(
7371 transaction
7372 .into_keys()
7373 .next()
7374 .unwrap()
7375 .read_with(cx, |buffer, _| buffer.text()),
7376 "const TWO: usize = one::THREE + one::THREE;"
7377 );
7378 }
7379
7380 #[gpui::test]
7381 async fn test_search(cx: &mut gpui::TestAppContext) {
7382 let fs = FakeFs::new(cx.background());
7383 fs.insert_tree(
7384 "/dir",
7385 json!({
7386 "one.rs": "const ONE: usize = 1;",
7387 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7388 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7389 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7390 }),
7391 )
7392 .await;
7393 let project = Project::test(fs.clone(), cx);
7394 let (tree, _) = project
7395 .update(cx, |project, cx| {
7396 project.find_or_create_local_worktree("/dir", true, cx)
7397 })
7398 .await
7399 .unwrap();
7400 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7401 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7402 .await;
7403
7404 assert_eq!(
7405 search(&project, SearchQuery::text("TWO", false, true), cx)
7406 .await
7407 .unwrap(),
7408 HashMap::from_iter([
7409 ("two.rs".to_string(), vec![6..9]),
7410 ("three.rs".to_string(), vec![37..40])
7411 ])
7412 );
7413
7414 let buffer_4 = project
7415 .update(cx, |project, cx| {
7416 project.open_buffer((worktree_id, "four.rs"), cx)
7417 })
7418 .await
7419 .unwrap();
7420 buffer_4.update(cx, |buffer, cx| {
7421 buffer.edit([20..28, 31..43], "two::TWO", cx);
7422 });
7423
7424 assert_eq!(
7425 search(&project, SearchQuery::text("TWO", false, true), cx)
7426 .await
7427 .unwrap(),
7428 HashMap::from_iter([
7429 ("two.rs".to_string(), vec![6..9]),
7430 ("three.rs".to_string(), vec![37..40]),
7431 ("four.rs".to_string(), vec![25..28, 36..39])
7432 ])
7433 );
7434
7435 async fn search(
7436 project: &ModelHandle<Project>,
7437 query: SearchQuery,
7438 cx: &mut gpui::TestAppContext,
7439 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7440 let results = project
7441 .update(cx, |project, cx| project.search(query, cx))
7442 .await?;
7443
7444 Ok(results
7445 .into_iter()
7446 .map(|(buffer, ranges)| {
7447 buffer.read_with(cx, |buffer, _| {
7448 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7449 let ranges = ranges
7450 .into_iter()
7451 .map(|range| range.to_offset(buffer))
7452 .collect::<Vec<_>>();
7453 (path, ranges)
7454 })
7455 })
7456 .collect())
7457 }
7458 }
7459}