1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use settings::Settings;
32use sha2::{Digest, Sha256};
33use similar::{ChangeTag, TextDiff};
34use std::{
35 cell::RefCell,
36 cmp::{self, Ordering},
37 convert::TryInto,
38 hash::Hash,
39 mem,
40 ops::Range,
41 path::{Component, Path, PathBuf},
42 rc::Rc,
43 sync::{
44 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
45 Arc,
46 },
47 time::Instant,
48};
49use util::{post_inc, ResultExt, TryFutureExt as _};
50
51pub use fs::*;
52pub use worktree::*;
53
54pub trait Item: Entity {
55 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
56}
57
58pub struct Project {
59 worktrees: Vec<WorktreeHandle>,
60 active_entry: Option<ProjectEntryId>,
61 languages: Arc<LanguageRegistry>,
62 language_servers:
63 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
64 started_language_servers:
65 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
66 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
67 language_server_settings: Arc<Mutex<serde_json::Value>>,
68 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
69 next_language_server_id: usize,
70 client: Arc<client::Client>,
71 next_entry_id: Arc<AtomicUsize>,
72 user_store: ModelHandle<UserStore>,
73 fs: Arc<dyn Fs>,
74 client_state: ProjectClientState,
75 collaborators: HashMap<PeerId, Collaborator>,
76 subscriptions: Vec<client::Subscription>,
77 language_servers_with_diagnostics_running: isize,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136pub struct LanguageServerStatus {
137 pub name: String,
138 pub pending_work: BTreeMap<String, LanguageServerProgress>,
139 pending_diagnostic_updates: isize,
140}
141
142#[derive(Clone, Debug)]
143pub struct LanguageServerProgress {
144 pub message: Option<String>,
145 pub percentage: Option<usize>,
146 pub last_update_at: Instant,
147}
148
149#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
150pub struct ProjectPath {
151 pub worktree_id: WorktreeId,
152 pub path: Arc<Path>,
153}
154
155#[derive(Clone, Debug, Default, PartialEq)]
156pub struct DiagnosticSummary {
157 pub error_count: usize,
158 pub warning_count: usize,
159 pub info_count: usize,
160 pub hint_count: usize,
161}
162
163#[derive(Debug)]
164pub struct Location {
165 pub buffer: ModelHandle<Buffer>,
166 pub range: Range<language::Anchor>,
167}
168
169#[derive(Debug)]
170pub struct DocumentHighlight {
171 pub range: Range<language::Anchor>,
172 pub kind: DocumentHighlightKind,
173}
174
175#[derive(Clone, Debug)]
176pub struct Symbol {
177 pub source_worktree_id: WorktreeId,
178 pub worktree_id: WorktreeId,
179 pub language_server_name: LanguageServerName,
180 pub path: PathBuf,
181 pub label: CodeLabel,
182 pub name: String,
183 pub kind: lsp::SymbolKind,
184 pub range: Range<PointUtf16>,
185 pub signature: [u8; 32],
186}
187
188#[derive(Default)]
189pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
190
191impl DiagnosticSummary {
192 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
193 let mut this = Self {
194 error_count: 0,
195 warning_count: 0,
196 info_count: 0,
197 hint_count: 0,
198 };
199
200 for entry in diagnostics {
201 if entry.diagnostic.is_primary {
202 match entry.diagnostic.severity {
203 DiagnosticSeverity::ERROR => this.error_count += 1,
204 DiagnosticSeverity::WARNING => this.warning_count += 1,
205 DiagnosticSeverity::INFORMATION => this.info_count += 1,
206 DiagnosticSeverity::HINT => this.hint_count += 1,
207 _ => {}
208 }
209 }
210 }
211
212 this
213 }
214
215 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
216 proto::DiagnosticSummary {
217 path: path.to_string_lossy().to_string(),
218 error_count: self.error_count as u32,
219 warning_count: self.warning_count as u32,
220 info_count: self.info_count as u32,
221 hint_count: self.hint_count as u32,
222 }
223 }
224}
225
226#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
227pub struct ProjectEntryId(usize);
228
229impl ProjectEntryId {
230 pub fn new(counter: &AtomicUsize) -> Self {
231 Self(counter.fetch_add(1, SeqCst))
232 }
233
234 pub fn from_proto(id: u64) -> Self {
235 Self(id as usize)
236 }
237
238 pub fn to_proto(&self) -> u64 {
239 self.0 as u64
240 }
241
242 pub fn to_usize(&self) -> usize {
243 self.0
244 }
245}
246
247impl Project {
248 pub fn init(client: &Arc<Client>) {
249 client.add_model_message_handler(Self::handle_add_collaborator);
250 client.add_model_message_handler(Self::handle_buffer_reloaded);
251 client.add_model_message_handler(Self::handle_buffer_saved);
252 client.add_model_message_handler(Self::handle_start_language_server);
253 client.add_model_message_handler(Self::handle_update_language_server);
254 client.add_model_message_handler(Self::handle_remove_collaborator);
255 client.add_model_message_handler(Self::handle_register_worktree);
256 client.add_model_message_handler(Self::handle_unregister_worktree);
257 client.add_model_message_handler(Self::handle_unshare_project);
258 client.add_model_message_handler(Self::handle_update_buffer_file);
259 client.add_model_message_handler(Self::handle_update_buffer);
260 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
261 client.add_model_message_handler(Self::handle_update_worktree);
262 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
263 client.add_model_request_handler(Self::handle_apply_code_action);
264 client.add_model_request_handler(Self::handle_reload_buffers);
265 client.add_model_request_handler(Self::handle_format_buffers);
266 client.add_model_request_handler(Self::handle_get_code_actions);
267 client.add_model_request_handler(Self::handle_get_completions);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
269 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
270 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
271 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
272 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
273 client.add_model_request_handler(Self::handle_search_project);
274 client.add_model_request_handler(Self::handle_get_project_symbols);
275 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
276 client.add_model_request_handler(Self::handle_open_buffer_by_id);
277 client.add_model_request_handler(Self::handle_open_buffer_by_path);
278 client.add_model_request_handler(Self::handle_save_buffer);
279 }
280
281 pub fn local(
282 client: Arc<Client>,
283 user_store: ModelHandle<UserStore>,
284 languages: Arc<LanguageRegistry>,
285 fs: Arc<dyn Fs>,
286 cx: &mut MutableAppContext,
287 ) -> ModelHandle<Self> {
288 cx.add_model(|cx: &mut ModelContext<Self>| {
289 let (remote_id_tx, remote_id_rx) = watch::channel();
290 let _maintain_remote_id_task = cx.spawn_weak({
291 let rpc = client.clone();
292 move |this, mut cx| {
293 async move {
294 let mut status = rpc.status();
295 while let Some(status) = status.next().await {
296 if let Some(this) = this.upgrade(&cx) {
297 if status.is_connected() {
298 this.update(&mut cx, |this, cx| this.register(cx)).await?;
299 } else {
300 this.update(&mut cx, |this, cx| this.unregister(cx));
301 }
302 }
303 }
304 Ok(())
305 }
306 .log_err()
307 }
308 });
309
310 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
311 Self {
312 worktrees: Default::default(),
313 collaborators: Default::default(),
314 opened_buffers: Default::default(),
315 shared_buffers: Default::default(),
316 loading_buffers: Default::default(),
317 loading_local_worktrees: Default::default(),
318 buffer_snapshots: Default::default(),
319 client_state: ProjectClientState::Local {
320 is_shared: false,
321 remote_id_tx,
322 remote_id_rx,
323 _maintain_remote_id_task,
324 },
325 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
326 subscriptions: Vec::new(),
327 active_entry: None,
328 languages,
329 client,
330 user_store,
331 fs,
332 next_entry_id: Default::default(),
333 language_servers_with_diagnostics_running: 0,
334 language_servers: Default::default(),
335 started_language_servers: Default::default(),
336 language_server_statuses: Default::default(),
337 last_workspace_edits_by_language_server: Default::default(),
338 language_server_settings: Default::default(),
339 next_language_server_id: 0,
340 nonce: StdRng::from_entropy().gen(),
341 }
342 })
343 }
344
345 pub async fn remote(
346 remote_id: u64,
347 client: Arc<Client>,
348 user_store: ModelHandle<UserStore>,
349 languages: Arc<LanguageRegistry>,
350 fs: Arc<dyn Fs>,
351 cx: &mut AsyncAppContext,
352 ) -> Result<ModelHandle<Self>> {
353 client.authenticate_and_connect(true, &cx).await?;
354
355 let response = client
356 .request(proto::JoinProject {
357 project_id: remote_id,
358 })
359 .await?;
360
361 let replica_id = response.replica_id as ReplicaId;
362
363 let mut worktrees = Vec::new();
364 for worktree in response.worktrees {
365 let (worktree, load_task) = cx
366 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
367 worktrees.push(worktree);
368 load_task.detach();
369 }
370
371 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
372 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
373 let mut this = Self {
374 worktrees: Vec::new(),
375 loading_buffers: Default::default(),
376 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
377 shared_buffers: Default::default(),
378 loading_local_worktrees: Default::default(),
379 active_entry: None,
380 collaborators: Default::default(),
381 languages,
382 user_store: user_store.clone(),
383 fs,
384 next_entry_id: Default::default(),
385 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
386 client: client.clone(),
387 client_state: ProjectClientState::Remote {
388 sharing_has_stopped: false,
389 remote_id,
390 replica_id,
391 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
392 async move {
393 let mut status = client.status();
394 let is_connected =
395 status.next().await.map_or(false, |s| s.is_connected());
396 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
397 if !is_connected || status.next().await.is_some() {
398 if let Some(this) = this.upgrade(&cx) {
399 this.update(&mut cx, |this, cx| this.project_unshared(cx))
400 }
401 }
402 Ok(())
403 }
404 .log_err()
405 }),
406 },
407 language_servers_with_diagnostics_running: 0,
408 language_servers: Default::default(),
409 started_language_servers: Default::default(),
410 language_server_settings: Default::default(),
411 language_server_statuses: response
412 .language_servers
413 .into_iter()
414 .map(|server| {
415 (
416 server.id as usize,
417 LanguageServerStatus {
418 name: server.name,
419 pending_work: Default::default(),
420 pending_diagnostic_updates: 0,
421 },
422 )
423 })
424 .collect(),
425 last_workspace_edits_by_language_server: Default::default(),
426 next_language_server_id: 0,
427 opened_buffers: Default::default(),
428 buffer_snapshots: Default::default(),
429 nonce: StdRng::from_entropy().gen(),
430 };
431 for worktree in worktrees {
432 this.add_worktree(&worktree, cx);
433 }
434 this
435 });
436
437 let user_ids = response
438 .collaborators
439 .iter()
440 .map(|peer| peer.user_id)
441 .collect();
442 user_store
443 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
444 .await?;
445 let mut collaborators = HashMap::default();
446 for message in response.collaborators {
447 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
448 collaborators.insert(collaborator.peer_id, collaborator);
449 }
450
451 this.update(cx, |this, _| {
452 this.collaborators = collaborators;
453 });
454
455 Ok(this)
456 }
457
458 #[cfg(any(test, feature = "test-support"))]
459 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
460 let languages = Arc::new(LanguageRegistry::test());
461 let http_client = client::test::FakeHttpClient::with_404_response();
462 let client = client::Client::new(http_client.clone());
463 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
464 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
465 }
466
467 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
468 self.opened_buffers
469 .get(&remote_id)
470 .and_then(|buffer| buffer.upgrade(cx))
471 }
472
473 #[cfg(any(test, feature = "test-support"))]
474 pub fn languages(&self) -> &Arc<LanguageRegistry> {
475 &self.languages
476 }
477
478 #[cfg(any(test, feature = "test-support"))]
479 pub fn check_invariants(&self, cx: &AppContext) {
480 if self.is_local() {
481 let mut worktree_root_paths = HashMap::default();
482 for worktree in self.worktrees(cx) {
483 let worktree = worktree.read(cx);
484 let abs_path = worktree.as_local().unwrap().abs_path().clone();
485 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
486 assert_eq!(
487 prev_worktree_id,
488 None,
489 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
490 abs_path,
491 worktree.id(),
492 prev_worktree_id
493 )
494 }
495 } else {
496 let replica_id = self.replica_id();
497 for buffer in self.opened_buffers.values() {
498 if let Some(buffer) = buffer.upgrade(cx) {
499 let buffer = buffer.read(cx);
500 assert_eq!(
501 buffer.deferred_ops_len(),
502 0,
503 "replica {}, buffer {} has deferred operations",
504 replica_id,
505 buffer.remote_id()
506 );
507 }
508 }
509 }
510 }
511
512 #[cfg(any(test, feature = "test-support"))]
513 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
514 let path = path.into();
515 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
516 self.opened_buffers.iter().any(|(_, buffer)| {
517 if let Some(buffer) = buffer.upgrade(cx) {
518 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
519 if file.worktree == worktree && file.path() == &path.path {
520 return true;
521 }
522 }
523 }
524 false
525 })
526 } else {
527 false
528 }
529 }
530
531 pub fn fs(&self) -> &Arc<dyn Fs> {
532 &self.fs
533 }
534
535 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
536 self.unshare(cx);
537 for worktree in &self.worktrees {
538 if let Some(worktree) = worktree.upgrade(cx) {
539 worktree.update(cx, |worktree, _| {
540 worktree.as_local_mut().unwrap().unregister();
541 });
542 }
543 }
544
545 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
546 *remote_id_tx.borrow_mut() = None;
547 }
548
549 self.subscriptions.clear();
550 }
551
552 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
553 self.unregister(cx);
554
555 let response = self.client.request(proto::RegisterProject {});
556 cx.spawn(|this, mut cx| async move {
557 let remote_id = response.await?.project_id;
558
559 let mut registrations = Vec::new();
560 this.update(&mut cx, |this, cx| {
561 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
562 *remote_id_tx.borrow_mut() = Some(remote_id);
563 }
564
565 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
566
567 this.subscriptions
568 .push(this.client.add_model_for_remote_entity(remote_id, cx));
569
570 for worktree in &this.worktrees {
571 if let Some(worktree) = worktree.upgrade(cx) {
572 registrations.push(worktree.update(cx, |worktree, cx| {
573 let worktree = worktree.as_local_mut().unwrap();
574 worktree.register(remote_id, cx)
575 }));
576 }
577 }
578 });
579
580 futures::future::try_join_all(registrations).await?;
581 Ok(())
582 })
583 }
584
585 pub fn remote_id(&self) -> Option<u64> {
586 match &self.client_state {
587 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
588 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
589 }
590 }
591
592 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
593 let mut id = None;
594 let mut watch = None;
595 match &self.client_state {
596 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
597 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
598 }
599
600 async move {
601 if let Some(id) = id {
602 return id;
603 }
604 let mut watch = watch.unwrap();
605 loop {
606 let id = *watch.borrow();
607 if let Some(id) = id {
608 return id;
609 }
610 watch.next().await;
611 }
612 }
613 }
614
615 pub fn replica_id(&self) -> ReplicaId {
616 match &self.client_state {
617 ProjectClientState::Local { .. } => 0,
618 ProjectClientState::Remote { replica_id, .. } => *replica_id,
619 }
620 }
621
622 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
623 &self.collaborators
624 }
625
626 pub fn worktrees<'a>(
627 &'a self,
628 cx: &'a AppContext,
629 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
630 self.worktrees
631 .iter()
632 .filter_map(move |worktree| worktree.upgrade(cx))
633 }
634
635 pub fn visible_worktrees<'a>(
636 &'a self,
637 cx: &'a AppContext,
638 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
639 self.worktrees.iter().filter_map(|worktree| {
640 worktree.upgrade(cx).and_then(|worktree| {
641 if worktree.read(cx).is_visible() {
642 Some(worktree)
643 } else {
644 None
645 }
646 })
647 })
648 }
649
650 pub fn worktree_for_id(
651 &self,
652 id: WorktreeId,
653 cx: &AppContext,
654 ) -> Option<ModelHandle<Worktree>> {
655 self.worktrees(cx)
656 .find(|worktree| worktree.read(cx).id() == id)
657 }
658
659 pub fn worktree_for_entry(
660 &self,
661 entry_id: ProjectEntryId,
662 cx: &AppContext,
663 ) -> Option<ModelHandle<Worktree>> {
664 self.worktrees(cx)
665 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
666 }
667
668 pub fn worktree_id_for_entry(
669 &self,
670 entry_id: ProjectEntryId,
671 cx: &AppContext,
672 ) -> Option<WorktreeId> {
673 self.worktree_for_entry(entry_id, cx)
674 .map(|worktree| worktree.read(cx).id())
675 }
676
677 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
678 let rpc = self.client.clone();
679 cx.spawn(|this, mut cx| async move {
680 let project_id = this.update(&mut cx, |this, cx| {
681 if let ProjectClientState::Local {
682 is_shared,
683 remote_id_rx,
684 ..
685 } = &mut this.client_state
686 {
687 *is_shared = true;
688
689 for open_buffer in this.opened_buffers.values_mut() {
690 match open_buffer {
691 OpenBuffer::Strong(_) => {}
692 OpenBuffer::Weak(buffer) => {
693 if let Some(buffer) = buffer.upgrade(cx) {
694 *open_buffer = OpenBuffer::Strong(buffer);
695 }
696 }
697 OpenBuffer::Loading(_) => unreachable!(),
698 }
699 }
700
701 for worktree_handle in this.worktrees.iter_mut() {
702 match worktree_handle {
703 WorktreeHandle::Strong(_) => {}
704 WorktreeHandle::Weak(worktree) => {
705 if let Some(worktree) = worktree.upgrade(cx) {
706 *worktree_handle = WorktreeHandle::Strong(worktree);
707 }
708 }
709 }
710 }
711
712 remote_id_rx
713 .borrow()
714 .ok_or_else(|| anyhow!("no project id"))
715 } else {
716 Err(anyhow!("can't share a remote project"))
717 }
718 })?;
719
720 rpc.request(proto::ShareProject { project_id }).await?;
721
722 let mut tasks = Vec::new();
723 this.update(&mut cx, |this, cx| {
724 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
725 worktree.update(cx, |worktree, cx| {
726 let worktree = worktree.as_local_mut().unwrap();
727 tasks.push(worktree.share(project_id, cx));
728 });
729 }
730 });
731 for task in tasks {
732 task.await?;
733 }
734 this.update(&mut cx, |_, cx| cx.notify());
735 Ok(())
736 })
737 }
738
739 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
740 let rpc = self.client.clone();
741
742 if let ProjectClientState::Local {
743 is_shared,
744 remote_id_rx,
745 ..
746 } = &mut self.client_state
747 {
748 if !*is_shared {
749 return;
750 }
751
752 *is_shared = false;
753 self.collaborators.clear();
754 self.shared_buffers.clear();
755 for worktree_handle in self.worktrees.iter_mut() {
756 if let WorktreeHandle::Strong(worktree) = worktree_handle {
757 let is_visible = worktree.update(cx, |worktree, _| {
758 worktree.as_local_mut().unwrap().unshare();
759 worktree.is_visible()
760 });
761 if !is_visible {
762 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
763 }
764 }
765 }
766
767 for open_buffer in self.opened_buffers.values_mut() {
768 match open_buffer {
769 OpenBuffer::Strong(buffer) => {
770 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
771 }
772 _ => {}
773 }
774 }
775
776 if let Some(project_id) = *remote_id_rx.borrow() {
777 rpc.send(proto::UnshareProject { project_id }).log_err();
778 }
779
780 cx.notify();
781 } else {
782 log::error!("attempted to unshare a remote project");
783 }
784 }
785
786 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
787 if let ProjectClientState::Remote {
788 sharing_has_stopped,
789 ..
790 } = &mut self.client_state
791 {
792 *sharing_has_stopped = true;
793 self.collaborators.clear();
794 cx.notify();
795 }
796 }
797
798 pub fn is_read_only(&self) -> bool {
799 match &self.client_state {
800 ProjectClientState::Local { .. } => false,
801 ProjectClientState::Remote {
802 sharing_has_stopped,
803 ..
804 } => *sharing_has_stopped,
805 }
806 }
807
808 pub fn is_local(&self) -> bool {
809 match &self.client_state {
810 ProjectClientState::Local { .. } => true,
811 ProjectClientState::Remote { .. } => false,
812 }
813 }
814
815 pub fn is_remote(&self) -> bool {
816 !self.is_local()
817 }
818
819 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
820 if self.is_remote() {
821 return Err(anyhow!("creating buffers as a guest is not supported yet"));
822 }
823
824 let buffer = cx.add_model(|cx| {
825 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
826 });
827 self.register_buffer(&buffer, cx)?;
828 Ok(buffer)
829 }
830
831 pub fn open_path(
832 &mut self,
833 path: impl Into<ProjectPath>,
834 cx: &mut ModelContext<Self>,
835 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
836 let task = self.open_buffer(path, cx);
837 cx.spawn_weak(|_, cx| async move {
838 let buffer = task.await?;
839 let project_entry_id = buffer
840 .read_with(&cx, |buffer, cx| {
841 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
842 })
843 .ok_or_else(|| anyhow!("no project entry"))?;
844 Ok((project_entry_id, buffer.into()))
845 })
846 }
847
848 pub fn open_buffer(
849 &mut self,
850 path: impl Into<ProjectPath>,
851 cx: &mut ModelContext<Self>,
852 ) -> Task<Result<ModelHandle<Buffer>>> {
853 let project_path = path.into();
854 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
855 worktree
856 } else {
857 return Task::ready(Err(anyhow!("no such worktree")));
858 };
859
860 // If there is already a buffer for the given path, then return it.
861 let existing_buffer = self.get_open_buffer(&project_path, cx);
862 if let Some(existing_buffer) = existing_buffer {
863 return Task::ready(Ok(existing_buffer));
864 }
865
866 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
867 // If the given path is already being loaded, then wait for that existing
868 // task to complete and return the same buffer.
869 hash_map::Entry::Occupied(e) => e.get().clone(),
870
871 // Otherwise, record the fact that this path is now being loaded.
872 hash_map::Entry::Vacant(entry) => {
873 let (mut tx, rx) = postage::watch::channel();
874 entry.insert(rx.clone());
875
876 let load_buffer = if worktree.read(cx).is_local() {
877 self.open_local_buffer(&project_path.path, &worktree, cx)
878 } else {
879 self.open_remote_buffer(&project_path.path, &worktree, cx)
880 };
881
882 cx.spawn(move |this, mut cx| async move {
883 let load_result = load_buffer.await;
884 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
885 // Record the fact that the buffer is no longer loading.
886 this.loading_buffers.remove(&project_path);
887 let buffer = load_result.map_err(Arc::new)?;
888 Ok(buffer)
889 }));
890 })
891 .detach();
892 rx
893 }
894 };
895
896 cx.foreground().spawn(async move {
897 loop {
898 if let Some(result) = loading_watch.borrow().as_ref() {
899 match result {
900 Ok(buffer) => return Ok(buffer.clone()),
901 Err(error) => return Err(anyhow!("{}", error)),
902 }
903 }
904 loading_watch.next().await;
905 }
906 })
907 }
908
909 fn open_local_buffer(
910 &mut self,
911 path: &Arc<Path>,
912 worktree: &ModelHandle<Worktree>,
913 cx: &mut ModelContext<Self>,
914 ) -> Task<Result<ModelHandle<Buffer>>> {
915 let load_buffer = worktree.update(cx, |worktree, cx| {
916 let worktree = worktree.as_local_mut().unwrap();
917 worktree.load_buffer(path, cx)
918 });
919 cx.spawn(|this, mut cx| async move {
920 let buffer = load_buffer.await?;
921 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
922 Ok(buffer)
923 })
924 }
925
926 fn open_remote_buffer(
927 &mut self,
928 path: &Arc<Path>,
929 worktree: &ModelHandle<Worktree>,
930 cx: &mut ModelContext<Self>,
931 ) -> Task<Result<ModelHandle<Buffer>>> {
932 let rpc = self.client.clone();
933 let project_id = self.remote_id().unwrap();
934 let remote_worktree_id = worktree.read(cx).id();
935 let path = path.clone();
936 let path_string = path.to_string_lossy().to_string();
937 cx.spawn(|this, mut cx| async move {
938 let response = rpc
939 .request(proto::OpenBufferByPath {
940 project_id,
941 worktree_id: remote_worktree_id.to_proto(),
942 path: path_string,
943 })
944 .await?;
945 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
946 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
947 .await
948 })
949 }
950
951 fn open_local_buffer_via_lsp(
952 &mut self,
953 abs_path: lsp::Url,
954 lsp_adapter: Arc<dyn LspAdapter>,
955 lsp_server: Arc<LanguageServer>,
956 cx: &mut ModelContext<Self>,
957 ) -> Task<Result<ModelHandle<Buffer>>> {
958 cx.spawn(|this, mut cx| async move {
959 let abs_path = abs_path
960 .to_file_path()
961 .map_err(|_| anyhow!("can't convert URI to path"))?;
962 let (worktree, relative_path) = if let Some(result) =
963 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
964 {
965 result
966 } else {
967 let worktree = this
968 .update(&mut cx, |this, cx| {
969 this.create_local_worktree(&abs_path, false, cx)
970 })
971 .await?;
972 this.update(&mut cx, |this, cx| {
973 this.language_servers.insert(
974 (worktree.read(cx).id(), lsp_adapter.name()),
975 (lsp_adapter, lsp_server),
976 );
977 });
978 (worktree, PathBuf::new())
979 };
980
981 let project_path = ProjectPath {
982 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
983 path: relative_path.into(),
984 };
985 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
986 .await
987 })
988 }
989
990 pub fn open_buffer_by_id(
991 &mut self,
992 id: u64,
993 cx: &mut ModelContext<Self>,
994 ) -> Task<Result<ModelHandle<Buffer>>> {
995 if let Some(buffer) = self.buffer_for_id(id, cx) {
996 Task::ready(Ok(buffer))
997 } else if self.is_local() {
998 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
999 } else if let Some(project_id) = self.remote_id() {
1000 let request = self
1001 .client
1002 .request(proto::OpenBufferById { project_id, id });
1003 cx.spawn(|this, mut cx| async move {
1004 let buffer = request
1005 .await?
1006 .buffer
1007 .ok_or_else(|| anyhow!("invalid buffer"))?;
1008 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1009 .await
1010 })
1011 } else {
1012 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1013 }
1014 }
1015
1016 pub fn save_buffer_as(
1017 &mut self,
1018 buffer: ModelHandle<Buffer>,
1019 abs_path: PathBuf,
1020 cx: &mut ModelContext<Project>,
1021 ) -> Task<Result<()>> {
1022 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1023 let old_path =
1024 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1025 cx.spawn(|this, mut cx| async move {
1026 if let Some(old_path) = old_path {
1027 this.update(&mut cx, |this, cx| {
1028 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1029 });
1030 }
1031 let (worktree, path) = worktree_task.await?;
1032 worktree
1033 .update(&mut cx, |worktree, cx| {
1034 worktree
1035 .as_local_mut()
1036 .unwrap()
1037 .save_buffer_as(buffer.clone(), path, cx)
1038 })
1039 .await?;
1040 this.update(&mut cx, |this, cx| {
1041 this.assign_language_to_buffer(&buffer, cx);
1042 this.register_buffer_with_language_server(&buffer, cx);
1043 });
1044 Ok(())
1045 })
1046 }
1047
1048 pub fn get_open_buffer(
1049 &mut self,
1050 path: &ProjectPath,
1051 cx: &mut ModelContext<Self>,
1052 ) -> Option<ModelHandle<Buffer>> {
1053 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1054 self.opened_buffers.values().find_map(|buffer| {
1055 let buffer = buffer.upgrade(cx)?;
1056 let file = File::from_dyn(buffer.read(cx).file())?;
1057 if file.worktree == worktree && file.path() == &path.path {
1058 Some(buffer)
1059 } else {
1060 None
1061 }
1062 })
1063 }
1064
1065 fn register_buffer(
1066 &mut self,
1067 buffer: &ModelHandle<Buffer>,
1068 cx: &mut ModelContext<Self>,
1069 ) -> Result<()> {
1070 let remote_id = buffer.read(cx).remote_id();
1071 let open_buffer = if self.is_remote() || self.is_shared() {
1072 OpenBuffer::Strong(buffer.clone())
1073 } else {
1074 OpenBuffer::Weak(buffer.downgrade())
1075 };
1076
1077 match self.opened_buffers.insert(remote_id, open_buffer) {
1078 None => {}
1079 Some(OpenBuffer::Loading(operations)) => {
1080 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1081 }
1082 Some(OpenBuffer::Weak(existing_handle)) => {
1083 if existing_handle.upgrade(cx).is_some() {
1084 Err(anyhow!(
1085 "already registered buffer with remote id {}",
1086 remote_id
1087 ))?
1088 }
1089 }
1090 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1091 "already registered buffer with remote id {}",
1092 remote_id
1093 ))?,
1094 }
1095 cx.subscribe(buffer, |this, buffer, event, cx| {
1096 this.on_buffer_event(buffer, event, cx);
1097 })
1098 .detach();
1099
1100 self.assign_language_to_buffer(buffer, cx);
1101 self.register_buffer_with_language_server(buffer, cx);
1102 cx.observe_release(buffer, |this, buffer, cx| {
1103 if let Some(file) = File::from_dyn(buffer.file()) {
1104 if file.is_local() {
1105 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1106 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1107 server
1108 .notify::<lsp::notification::DidCloseTextDocument>(
1109 lsp::DidCloseTextDocumentParams {
1110 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1111 },
1112 )
1113 .log_err();
1114 }
1115 }
1116 }
1117 })
1118 .detach();
1119
1120 Ok(())
1121 }
1122
1123 fn register_buffer_with_language_server(
1124 &mut self,
1125 buffer_handle: &ModelHandle<Buffer>,
1126 cx: &mut ModelContext<Self>,
1127 ) {
1128 let buffer = buffer_handle.read(cx);
1129 let buffer_id = buffer.remote_id();
1130 if let Some(file) = File::from_dyn(buffer.file()) {
1131 if file.is_local() {
1132 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1133 let initial_snapshot = buffer.text_snapshot();
1134 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1135
1136 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1137 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1138 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1139 .log_err();
1140 }
1141 }
1142
1143 if let Some((_, server)) = language_server {
1144 server
1145 .notify::<lsp::notification::DidOpenTextDocument>(
1146 lsp::DidOpenTextDocumentParams {
1147 text_document: lsp::TextDocumentItem::new(
1148 uri,
1149 Default::default(),
1150 0,
1151 initial_snapshot.text(),
1152 ),
1153 }
1154 .clone(),
1155 )
1156 .log_err();
1157 buffer_handle.update(cx, |buffer, cx| {
1158 buffer.set_completion_triggers(
1159 server
1160 .capabilities()
1161 .completion_provider
1162 .as_ref()
1163 .and_then(|provider| provider.trigger_characters.clone())
1164 .unwrap_or(Vec::new()),
1165 cx,
1166 )
1167 });
1168 self.buffer_snapshots
1169 .insert(buffer_id, vec![(0, initial_snapshot)]);
1170 }
1171 }
1172 }
1173 }
1174
1175 fn unregister_buffer_from_language_server(
1176 &mut self,
1177 buffer: &ModelHandle<Buffer>,
1178 old_path: PathBuf,
1179 cx: &mut ModelContext<Self>,
1180 ) {
1181 buffer.update(cx, |buffer, cx| {
1182 buffer.update_diagnostics(Default::default(), cx);
1183 self.buffer_snapshots.remove(&buffer.remote_id());
1184 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1185 language_server
1186 .notify::<lsp::notification::DidCloseTextDocument>(
1187 lsp::DidCloseTextDocumentParams {
1188 text_document: lsp::TextDocumentIdentifier::new(
1189 lsp::Url::from_file_path(old_path).unwrap(),
1190 ),
1191 },
1192 )
1193 .log_err();
1194 }
1195 });
1196 }
1197
1198 fn on_buffer_event(
1199 &mut self,
1200 buffer: ModelHandle<Buffer>,
1201 event: &BufferEvent,
1202 cx: &mut ModelContext<Self>,
1203 ) -> Option<()> {
1204 match event {
1205 BufferEvent::Operation(operation) => {
1206 let project_id = self.remote_id()?;
1207 let request = self.client.request(proto::UpdateBuffer {
1208 project_id,
1209 buffer_id: buffer.read(cx).remote_id(),
1210 operations: vec![language::proto::serialize_operation(&operation)],
1211 });
1212 cx.background().spawn(request).detach_and_log_err(cx);
1213 }
1214 BufferEvent::Edited { .. } => {
1215 let (_, language_server) = self
1216 .language_server_for_buffer(buffer.read(cx), cx)?
1217 .clone();
1218 let buffer = buffer.read(cx);
1219 let file = File::from_dyn(buffer.file())?;
1220 let abs_path = file.as_local()?.abs_path(cx);
1221 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1222 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1223 let (version, prev_snapshot) = buffer_snapshots.last()?;
1224 let next_snapshot = buffer.text_snapshot();
1225 let next_version = version + 1;
1226
1227 let content_changes = buffer
1228 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1229 .map(|edit| {
1230 let edit_start = edit.new.start.0;
1231 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1232 let new_text = next_snapshot
1233 .text_for_range(edit.new.start.1..edit.new.end.1)
1234 .collect();
1235 lsp::TextDocumentContentChangeEvent {
1236 range: Some(lsp::Range::new(
1237 point_to_lsp(edit_start),
1238 point_to_lsp(edit_end),
1239 )),
1240 range_length: None,
1241 text: new_text,
1242 }
1243 })
1244 .collect();
1245
1246 buffer_snapshots.push((next_version, next_snapshot));
1247
1248 language_server
1249 .notify::<lsp::notification::DidChangeTextDocument>(
1250 lsp::DidChangeTextDocumentParams {
1251 text_document: lsp::VersionedTextDocumentIdentifier::new(
1252 uri,
1253 next_version,
1254 ),
1255 content_changes,
1256 },
1257 )
1258 .log_err();
1259 }
1260 BufferEvent::Saved => {
1261 let file = File::from_dyn(buffer.read(cx).file())?;
1262 let worktree_id = file.worktree_id(cx);
1263 let abs_path = file.as_local()?.abs_path(cx);
1264 let text_document = lsp::TextDocumentIdentifier {
1265 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1266 };
1267
1268 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1269 server
1270 .notify::<lsp::notification::DidSaveTextDocument>(
1271 lsp::DidSaveTextDocumentParams {
1272 text_document: text_document.clone(),
1273 text: None,
1274 },
1275 )
1276 .log_err();
1277 }
1278 }
1279 _ => {}
1280 }
1281
1282 None
1283 }
1284
1285 fn language_servers_for_worktree(
1286 &self,
1287 worktree_id: WorktreeId,
1288 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1289 self.language_servers.iter().filter_map(
1290 move |((language_server_worktree_id, _), server)| {
1291 if *language_server_worktree_id == worktree_id {
1292 Some(server)
1293 } else {
1294 None
1295 }
1296 },
1297 )
1298 }
1299
1300 fn assign_language_to_buffer(
1301 &mut self,
1302 buffer: &ModelHandle<Buffer>,
1303 cx: &mut ModelContext<Self>,
1304 ) -> Option<()> {
1305 // If the buffer has a language, set it and start the language server if we haven't already.
1306 let full_path = buffer.read(cx).file()?.full_path(cx);
1307 let language = self.languages.select_language(&full_path)?;
1308 buffer.update(cx, |buffer, cx| {
1309 buffer.set_language(Some(language.clone()), cx);
1310 });
1311
1312 let file = File::from_dyn(buffer.read(cx).file())?;
1313 let worktree = file.worktree.read(cx).as_local()?;
1314 let worktree_id = worktree.id();
1315 let worktree_abs_path = worktree.abs_path().clone();
1316 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1317
1318 None
1319 }
1320
1321 fn start_language_server(
1322 &mut self,
1323 worktree_id: WorktreeId,
1324 worktree_path: Arc<Path>,
1325 language: Arc<Language>,
1326 cx: &mut ModelContext<Self>,
1327 ) {
1328 let adapter = if let Some(adapter) = language.lsp_adapter() {
1329 adapter
1330 } else {
1331 return;
1332 };
1333 let key = (worktree_id, adapter.name());
1334 self.started_language_servers
1335 .entry(key.clone())
1336 .or_insert_with(|| {
1337 let server_id = post_inc(&mut self.next_language_server_id);
1338 let language_server = self.languages.start_language_server(
1339 server_id,
1340 language.clone(),
1341 worktree_path,
1342 self.client.http_client(),
1343 cx,
1344 );
1345 cx.spawn_weak(|this, mut cx| async move {
1346 let language_server = language_server?.await.log_err()?;
1347 let language_server = language_server
1348 .initialize(adapter.initialization_options())
1349 .await
1350 .log_err()?;
1351 let this = this.upgrade(&cx)?;
1352 let disk_based_diagnostics_progress_token =
1353 adapter.disk_based_diagnostics_progress_token();
1354
1355 language_server
1356 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1357 let this = this.downgrade();
1358 let adapter = adapter.clone();
1359 move |params, mut cx| {
1360 if let Some(this) = this.upgrade(&cx) {
1361 this.update(&mut cx, |this, cx| {
1362 this.on_lsp_diagnostics_published(
1363 server_id,
1364 params,
1365 &adapter,
1366 disk_based_diagnostics_progress_token,
1367 cx,
1368 );
1369 });
1370 }
1371 }
1372 })
1373 .detach();
1374
1375 language_server
1376 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1377 let settings = this
1378 .read_with(&cx, |this, _| this.language_server_settings.clone());
1379 move |params, _| {
1380 let settings = settings.lock().clone();
1381 async move {
1382 Ok(params
1383 .items
1384 .into_iter()
1385 .map(|item| {
1386 if let Some(section) = &item.section {
1387 settings
1388 .get(section)
1389 .cloned()
1390 .unwrap_or(serde_json::Value::Null)
1391 } else {
1392 settings.clone()
1393 }
1394 })
1395 .collect())
1396 }
1397 }
1398 })
1399 .detach();
1400
1401 language_server
1402 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1403 let this = this.downgrade();
1404 let adapter = adapter.clone();
1405 let language_server = language_server.clone();
1406 move |params, cx| {
1407 Self::on_lsp_workspace_edit(
1408 this,
1409 params,
1410 server_id,
1411 adapter.clone(),
1412 language_server.clone(),
1413 cx,
1414 )
1415 }
1416 })
1417 .detach();
1418
1419 language_server
1420 .on_notification::<lsp::notification::Progress, _>({
1421 let this = this.downgrade();
1422 move |params, mut cx| {
1423 if let Some(this) = this.upgrade(&cx) {
1424 this.update(&mut cx, |this, cx| {
1425 this.on_lsp_progress(
1426 params,
1427 server_id,
1428 disk_based_diagnostics_progress_token,
1429 cx,
1430 );
1431 });
1432 }
1433 }
1434 })
1435 .detach();
1436
1437 this.update(&mut cx, |this, cx| {
1438 this.language_servers
1439 .insert(key.clone(), (adapter, language_server.clone()));
1440 this.language_server_statuses.insert(
1441 server_id,
1442 LanguageServerStatus {
1443 name: language_server.name().to_string(),
1444 pending_work: Default::default(),
1445 pending_diagnostic_updates: 0,
1446 },
1447 );
1448 language_server
1449 .notify::<lsp::notification::DidChangeConfiguration>(
1450 lsp::DidChangeConfigurationParams {
1451 settings: this.language_server_settings.lock().clone(),
1452 },
1453 )
1454 .ok();
1455
1456 if let Some(project_id) = this.remote_id() {
1457 this.client
1458 .send(proto::StartLanguageServer {
1459 project_id,
1460 server: Some(proto::LanguageServer {
1461 id: server_id as u64,
1462 name: language_server.name().to_string(),
1463 }),
1464 })
1465 .log_err();
1466 }
1467
1468 // Tell the language server about every open buffer in the worktree that matches the language.
1469 for buffer in this.opened_buffers.values() {
1470 if let Some(buffer_handle) = buffer.upgrade(cx) {
1471 let buffer = buffer_handle.read(cx);
1472 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1473 file
1474 } else {
1475 continue;
1476 };
1477 let language = if let Some(language) = buffer.language() {
1478 language
1479 } else {
1480 continue;
1481 };
1482 if file.worktree.read(cx).id() != key.0
1483 || language.lsp_adapter().map(|a| a.name())
1484 != Some(key.1.clone())
1485 {
1486 continue;
1487 }
1488
1489 let file = file.as_local()?;
1490 let versions = this
1491 .buffer_snapshots
1492 .entry(buffer.remote_id())
1493 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1494 let (version, initial_snapshot) = versions.last().unwrap();
1495 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1496 language_server
1497 .notify::<lsp::notification::DidOpenTextDocument>(
1498 lsp::DidOpenTextDocumentParams {
1499 text_document: lsp::TextDocumentItem::new(
1500 uri,
1501 Default::default(),
1502 *version,
1503 initial_snapshot.text(),
1504 ),
1505 },
1506 )
1507 .log_err()?;
1508 buffer_handle.update(cx, |buffer, cx| {
1509 buffer.set_completion_triggers(
1510 language_server
1511 .capabilities()
1512 .completion_provider
1513 .as_ref()
1514 .and_then(|provider| {
1515 provider.trigger_characters.clone()
1516 })
1517 .unwrap_or(Vec::new()),
1518 cx,
1519 )
1520 });
1521 }
1522 }
1523
1524 cx.notify();
1525 Some(())
1526 });
1527
1528 Some(language_server)
1529 })
1530 });
1531 }
1532
1533 pub fn restart_language_servers_for_buffers(
1534 &mut self,
1535 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1536 cx: &mut ModelContext<Self>,
1537 ) -> Option<()> {
1538 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1539 .into_iter()
1540 .filter_map(|buffer| {
1541 let file = File::from_dyn(buffer.read(cx).file())?;
1542 let worktree = file.worktree.read(cx).as_local()?;
1543 let worktree_id = worktree.id();
1544 let worktree_abs_path = worktree.abs_path().clone();
1545 let full_path = file.full_path(cx);
1546 Some((worktree_id, worktree_abs_path, full_path))
1547 })
1548 .collect();
1549 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1550 let language = self.languages.select_language(&full_path)?;
1551 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1552 }
1553
1554 None
1555 }
1556
1557 fn restart_language_server(
1558 &mut self,
1559 worktree_id: WorktreeId,
1560 worktree_path: Arc<Path>,
1561 language: Arc<Language>,
1562 cx: &mut ModelContext<Self>,
1563 ) {
1564 let adapter = if let Some(adapter) = language.lsp_adapter() {
1565 adapter
1566 } else {
1567 return;
1568 };
1569 let key = (worktree_id, adapter.name());
1570 let server_to_shutdown = self.language_servers.remove(&key);
1571 self.started_language_servers.remove(&key);
1572 server_to_shutdown
1573 .as_ref()
1574 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1575 cx.spawn_weak(|this, mut cx| async move {
1576 if let Some(this) = this.upgrade(&cx) {
1577 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1578 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1579 shutdown_task.await;
1580 }
1581 }
1582
1583 this.update(&mut cx, |this, cx| {
1584 this.start_language_server(worktree_id, worktree_path, language, cx);
1585 });
1586 }
1587 })
1588 .detach();
1589 }
1590
1591 fn on_lsp_diagnostics_published(
1592 &mut self,
1593 server_id: usize,
1594 mut params: lsp::PublishDiagnosticsParams,
1595 adapter: &Arc<dyn LspAdapter>,
1596 disk_based_diagnostics_progress_token: Option<&str>,
1597 cx: &mut ModelContext<Self>,
1598 ) {
1599 adapter.process_diagnostics(&mut params);
1600 if disk_based_diagnostics_progress_token.is_none() {
1601 self.disk_based_diagnostics_started(cx);
1602 self.broadcast_language_server_update(
1603 server_id,
1604 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1605 proto::LspDiskBasedDiagnosticsUpdating {},
1606 ),
1607 );
1608 }
1609 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1610 .log_err();
1611 if disk_based_diagnostics_progress_token.is_none() {
1612 self.disk_based_diagnostics_finished(cx);
1613 self.broadcast_language_server_update(
1614 server_id,
1615 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1616 proto::LspDiskBasedDiagnosticsUpdated {},
1617 ),
1618 );
1619 }
1620 }
1621
1622 fn on_lsp_progress(
1623 &mut self,
1624 progress: lsp::ProgressParams,
1625 server_id: usize,
1626 disk_based_diagnostics_progress_token: Option<&str>,
1627 cx: &mut ModelContext<Self>,
1628 ) {
1629 let token = match progress.token {
1630 lsp::NumberOrString::String(token) => token,
1631 lsp::NumberOrString::Number(token) => {
1632 log::info!("skipping numeric progress token {}", token);
1633 return;
1634 }
1635 };
1636 let progress = match progress.value {
1637 lsp::ProgressParamsValue::WorkDone(value) => value,
1638 };
1639 let language_server_status =
1640 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1641 status
1642 } else {
1643 return;
1644 };
1645 match progress {
1646 lsp::WorkDoneProgress::Begin(_) => {
1647 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1648 language_server_status.pending_diagnostic_updates += 1;
1649 if language_server_status.pending_diagnostic_updates == 1 {
1650 self.disk_based_diagnostics_started(cx);
1651 self.broadcast_language_server_update(
1652 server_id,
1653 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1654 proto::LspDiskBasedDiagnosticsUpdating {},
1655 ),
1656 );
1657 }
1658 } else {
1659 self.on_lsp_work_start(server_id, token.clone(), cx);
1660 self.broadcast_language_server_update(
1661 server_id,
1662 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1663 token,
1664 }),
1665 );
1666 }
1667 }
1668 lsp::WorkDoneProgress::Report(report) => {
1669 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1670 self.on_lsp_work_progress(
1671 server_id,
1672 token.clone(),
1673 LanguageServerProgress {
1674 message: report.message.clone(),
1675 percentage: report.percentage.map(|p| p as usize),
1676 last_update_at: Instant::now(),
1677 },
1678 cx,
1679 );
1680 self.broadcast_language_server_update(
1681 server_id,
1682 proto::update_language_server::Variant::WorkProgress(
1683 proto::LspWorkProgress {
1684 token,
1685 message: report.message,
1686 percentage: report.percentage.map(|p| p as u32),
1687 },
1688 ),
1689 );
1690 }
1691 }
1692 lsp::WorkDoneProgress::End(_) => {
1693 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1694 language_server_status.pending_diagnostic_updates -= 1;
1695 if language_server_status.pending_diagnostic_updates == 0 {
1696 self.disk_based_diagnostics_finished(cx);
1697 self.broadcast_language_server_update(
1698 server_id,
1699 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1700 proto::LspDiskBasedDiagnosticsUpdated {},
1701 ),
1702 );
1703 }
1704 } else {
1705 self.on_lsp_work_end(server_id, token.clone(), cx);
1706 self.broadcast_language_server_update(
1707 server_id,
1708 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1709 token,
1710 }),
1711 );
1712 }
1713 }
1714 }
1715 }
1716
1717 fn on_lsp_work_start(
1718 &mut self,
1719 language_server_id: usize,
1720 token: String,
1721 cx: &mut ModelContext<Self>,
1722 ) {
1723 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1724 status.pending_work.insert(
1725 token,
1726 LanguageServerProgress {
1727 message: None,
1728 percentage: None,
1729 last_update_at: Instant::now(),
1730 },
1731 );
1732 cx.notify();
1733 }
1734 }
1735
1736 fn on_lsp_work_progress(
1737 &mut self,
1738 language_server_id: usize,
1739 token: String,
1740 progress: LanguageServerProgress,
1741 cx: &mut ModelContext<Self>,
1742 ) {
1743 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1744 status.pending_work.insert(token, progress);
1745 cx.notify();
1746 }
1747 }
1748
1749 fn on_lsp_work_end(
1750 &mut self,
1751 language_server_id: usize,
1752 token: String,
1753 cx: &mut ModelContext<Self>,
1754 ) {
1755 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1756 status.pending_work.remove(&token);
1757 cx.notify();
1758 }
1759 }
1760
1761 async fn on_lsp_workspace_edit(
1762 this: WeakModelHandle<Self>,
1763 params: lsp::ApplyWorkspaceEditParams,
1764 server_id: usize,
1765 adapter: Arc<dyn LspAdapter>,
1766 language_server: Arc<LanguageServer>,
1767 mut cx: AsyncAppContext,
1768 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1769 let this = this
1770 .upgrade(&cx)
1771 .ok_or_else(|| anyhow!("project project closed"))?;
1772 let transaction = Self::deserialize_workspace_edit(
1773 this.clone(),
1774 params.edit,
1775 true,
1776 adapter.clone(),
1777 language_server.clone(),
1778 &mut cx,
1779 )
1780 .await
1781 .log_err();
1782 this.update(&mut cx, |this, _| {
1783 if let Some(transaction) = transaction {
1784 this.last_workspace_edits_by_language_server
1785 .insert(server_id, transaction);
1786 }
1787 });
1788 Ok(lsp::ApplyWorkspaceEditResponse {
1789 applied: true,
1790 failed_change: None,
1791 failure_reason: None,
1792 })
1793 }
1794
1795 fn broadcast_language_server_update(
1796 &self,
1797 language_server_id: usize,
1798 event: proto::update_language_server::Variant,
1799 ) {
1800 if let Some(project_id) = self.remote_id() {
1801 self.client
1802 .send(proto::UpdateLanguageServer {
1803 project_id,
1804 language_server_id: language_server_id as u64,
1805 variant: Some(event),
1806 })
1807 .log_err();
1808 }
1809 }
1810
1811 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1812 for (_, server) in self.language_servers.values() {
1813 server
1814 .notify::<lsp::notification::DidChangeConfiguration>(
1815 lsp::DidChangeConfigurationParams {
1816 settings: settings.clone(),
1817 },
1818 )
1819 .ok();
1820 }
1821 *self.language_server_settings.lock() = settings;
1822 }
1823
1824 pub fn language_server_statuses(
1825 &self,
1826 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1827 self.language_server_statuses.values()
1828 }
1829
1830 pub fn update_diagnostics(
1831 &mut self,
1832 params: lsp::PublishDiagnosticsParams,
1833 disk_based_sources: &[&str],
1834 cx: &mut ModelContext<Self>,
1835 ) -> Result<()> {
1836 let abs_path = params
1837 .uri
1838 .to_file_path()
1839 .map_err(|_| anyhow!("URI is not a file"))?;
1840 let mut next_group_id = 0;
1841 let mut diagnostics = Vec::default();
1842 let mut primary_diagnostic_group_ids = HashMap::default();
1843 let mut sources_by_group_id = HashMap::default();
1844 let mut supporting_diagnostics = HashMap::default();
1845 for diagnostic in ¶ms.diagnostics {
1846 let source = diagnostic.source.as_ref();
1847 let code = diagnostic.code.as_ref().map(|code| match code {
1848 lsp::NumberOrString::Number(code) => code.to_string(),
1849 lsp::NumberOrString::String(code) => code.clone(),
1850 });
1851 let range = range_from_lsp(diagnostic.range);
1852 let is_supporting = diagnostic
1853 .related_information
1854 .as_ref()
1855 .map_or(false, |infos| {
1856 infos.iter().any(|info| {
1857 primary_diagnostic_group_ids.contains_key(&(
1858 source,
1859 code.clone(),
1860 range_from_lsp(info.location.range),
1861 ))
1862 })
1863 });
1864
1865 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1866 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1867 });
1868
1869 if is_supporting {
1870 supporting_diagnostics.insert(
1871 (source, code.clone(), range),
1872 (diagnostic.severity, is_unnecessary),
1873 );
1874 } else {
1875 let group_id = post_inc(&mut next_group_id);
1876 let is_disk_based = source.map_or(false, |source| {
1877 disk_based_sources.contains(&source.as_str())
1878 });
1879
1880 sources_by_group_id.insert(group_id, source);
1881 primary_diagnostic_group_ids
1882 .insert((source, code.clone(), range.clone()), group_id);
1883
1884 diagnostics.push(DiagnosticEntry {
1885 range,
1886 diagnostic: Diagnostic {
1887 code: code.clone(),
1888 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1889 message: diagnostic.message.clone(),
1890 group_id,
1891 is_primary: true,
1892 is_valid: true,
1893 is_disk_based,
1894 is_unnecessary,
1895 },
1896 });
1897 if let Some(infos) = &diagnostic.related_information {
1898 for info in infos {
1899 if info.location.uri == params.uri && !info.message.is_empty() {
1900 let range = range_from_lsp(info.location.range);
1901 diagnostics.push(DiagnosticEntry {
1902 range,
1903 diagnostic: Diagnostic {
1904 code: code.clone(),
1905 severity: DiagnosticSeverity::INFORMATION,
1906 message: info.message.clone(),
1907 group_id,
1908 is_primary: false,
1909 is_valid: true,
1910 is_disk_based,
1911 is_unnecessary: false,
1912 },
1913 });
1914 }
1915 }
1916 }
1917 }
1918 }
1919
1920 for entry in &mut diagnostics {
1921 let diagnostic = &mut entry.diagnostic;
1922 if !diagnostic.is_primary {
1923 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1924 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1925 source,
1926 diagnostic.code.clone(),
1927 entry.range.clone(),
1928 )) {
1929 if let Some(severity) = severity {
1930 diagnostic.severity = severity;
1931 }
1932 diagnostic.is_unnecessary = is_unnecessary;
1933 }
1934 }
1935 }
1936
1937 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1938 Ok(())
1939 }
1940
1941 pub fn update_diagnostic_entries(
1942 &mut self,
1943 abs_path: PathBuf,
1944 version: Option<i32>,
1945 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1946 cx: &mut ModelContext<Project>,
1947 ) -> Result<(), anyhow::Error> {
1948 let (worktree, relative_path) = self
1949 .find_local_worktree(&abs_path, cx)
1950 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1951 if !worktree.read(cx).is_visible() {
1952 return Ok(());
1953 }
1954
1955 let project_path = ProjectPath {
1956 worktree_id: worktree.read(cx).id(),
1957 path: relative_path.into(),
1958 };
1959
1960 for buffer in self.opened_buffers.values() {
1961 if let Some(buffer) = buffer.upgrade(cx) {
1962 if buffer
1963 .read(cx)
1964 .file()
1965 .map_or(false, |file| *file.path() == project_path.path)
1966 {
1967 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1968 break;
1969 }
1970 }
1971 }
1972 worktree.update(cx, |worktree, cx| {
1973 worktree
1974 .as_local_mut()
1975 .ok_or_else(|| anyhow!("not a local worktree"))?
1976 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1977 })?;
1978 cx.emit(Event::DiagnosticsUpdated(project_path));
1979 Ok(())
1980 }
1981
1982 fn update_buffer_diagnostics(
1983 &mut self,
1984 buffer: &ModelHandle<Buffer>,
1985 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1986 version: Option<i32>,
1987 cx: &mut ModelContext<Self>,
1988 ) -> Result<()> {
1989 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1990 Ordering::Equal
1991 .then_with(|| b.is_primary.cmp(&a.is_primary))
1992 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1993 .then_with(|| a.severity.cmp(&b.severity))
1994 .then_with(|| a.message.cmp(&b.message))
1995 }
1996
1997 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1998
1999 diagnostics.sort_unstable_by(|a, b| {
2000 Ordering::Equal
2001 .then_with(|| a.range.start.cmp(&b.range.start))
2002 .then_with(|| b.range.end.cmp(&a.range.end))
2003 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2004 });
2005
2006 let mut sanitized_diagnostics = Vec::new();
2007 let edits_since_save = Patch::new(
2008 snapshot
2009 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2010 .collect(),
2011 );
2012 for entry in diagnostics {
2013 let start;
2014 let end;
2015 if entry.diagnostic.is_disk_based {
2016 // Some diagnostics are based on files on disk instead of buffers'
2017 // current contents. Adjust these diagnostics' ranges to reflect
2018 // any unsaved edits.
2019 start = edits_since_save.old_to_new(entry.range.start);
2020 end = edits_since_save.old_to_new(entry.range.end);
2021 } else {
2022 start = entry.range.start;
2023 end = entry.range.end;
2024 }
2025
2026 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2027 ..snapshot.clip_point_utf16(end, Bias::Right);
2028
2029 // Expand empty ranges by one character
2030 if range.start == range.end {
2031 range.end.column += 1;
2032 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2033 if range.start == range.end && range.end.column > 0 {
2034 range.start.column -= 1;
2035 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2036 }
2037 }
2038
2039 sanitized_diagnostics.push(DiagnosticEntry {
2040 range,
2041 diagnostic: entry.diagnostic,
2042 });
2043 }
2044 drop(edits_since_save);
2045
2046 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2047 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2048 Ok(())
2049 }
2050
2051 pub fn reload_buffers(
2052 &self,
2053 buffers: HashSet<ModelHandle<Buffer>>,
2054 push_to_history: bool,
2055 cx: &mut ModelContext<Self>,
2056 ) -> Task<Result<ProjectTransaction>> {
2057 let mut local_buffers = Vec::new();
2058 let mut remote_buffers = None;
2059 for buffer_handle in buffers {
2060 let buffer = buffer_handle.read(cx);
2061 if buffer.is_dirty() {
2062 if let Some(file) = File::from_dyn(buffer.file()) {
2063 if file.is_local() {
2064 local_buffers.push(buffer_handle);
2065 } else {
2066 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2067 }
2068 }
2069 }
2070 }
2071
2072 let remote_buffers = self.remote_id().zip(remote_buffers);
2073 let client = self.client.clone();
2074
2075 cx.spawn(|this, mut cx| async move {
2076 let mut project_transaction = ProjectTransaction::default();
2077
2078 if let Some((project_id, remote_buffers)) = remote_buffers {
2079 let response = client
2080 .request(proto::ReloadBuffers {
2081 project_id,
2082 buffer_ids: remote_buffers
2083 .iter()
2084 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2085 .collect(),
2086 })
2087 .await?
2088 .transaction
2089 .ok_or_else(|| anyhow!("missing transaction"))?;
2090 project_transaction = this
2091 .update(&mut cx, |this, cx| {
2092 this.deserialize_project_transaction(response, push_to_history, cx)
2093 })
2094 .await?;
2095 }
2096
2097 for buffer in local_buffers {
2098 let transaction = buffer
2099 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2100 .await?;
2101 buffer.update(&mut cx, |buffer, cx| {
2102 if let Some(transaction) = transaction {
2103 if !push_to_history {
2104 buffer.forget_transaction(transaction.id);
2105 }
2106 project_transaction.0.insert(cx.handle(), transaction);
2107 }
2108 });
2109 }
2110
2111 Ok(project_transaction)
2112 })
2113 }
2114
2115 pub fn format(
2116 &self,
2117 buffers: HashSet<ModelHandle<Buffer>>,
2118 push_to_history: bool,
2119 cx: &mut ModelContext<Project>,
2120 ) -> Task<Result<ProjectTransaction>> {
2121 let mut local_buffers = Vec::new();
2122 let mut remote_buffers = None;
2123 for buffer_handle in buffers {
2124 let buffer = buffer_handle.read(cx);
2125 if let Some(file) = File::from_dyn(buffer.file()) {
2126 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2127 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2128 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2129 }
2130 } else {
2131 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2132 }
2133 } else {
2134 return Task::ready(Ok(Default::default()));
2135 }
2136 }
2137
2138 let remote_buffers = self.remote_id().zip(remote_buffers);
2139 let client = self.client.clone();
2140
2141 cx.spawn(|this, mut cx| async move {
2142 let mut project_transaction = ProjectTransaction::default();
2143
2144 if let Some((project_id, remote_buffers)) = remote_buffers {
2145 let response = client
2146 .request(proto::FormatBuffers {
2147 project_id,
2148 buffer_ids: remote_buffers
2149 .iter()
2150 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2151 .collect(),
2152 })
2153 .await?
2154 .transaction
2155 .ok_or_else(|| anyhow!("missing transaction"))?;
2156 project_transaction = this
2157 .update(&mut cx, |this, cx| {
2158 this.deserialize_project_transaction(response, push_to_history, cx)
2159 })
2160 .await?;
2161 }
2162
2163 for (buffer, buffer_abs_path, language_server) in local_buffers {
2164 let text_document = lsp::TextDocumentIdentifier::new(
2165 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2166 );
2167 let capabilities = &language_server.capabilities();
2168 let tab_size = cx.update(|cx| {
2169 let language_name = buffer.read(cx).language().map(|language| language.name());
2170 cx.global::<Settings>().tab_size(language_name.as_deref())
2171 });
2172 let lsp_edits = if capabilities
2173 .document_formatting_provider
2174 .as_ref()
2175 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2176 {
2177 language_server
2178 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2179 text_document,
2180 options: lsp::FormattingOptions {
2181 tab_size,
2182 insert_spaces: true,
2183 insert_final_newline: Some(true),
2184 ..Default::default()
2185 },
2186 work_done_progress_params: Default::default(),
2187 })
2188 .await?
2189 } else if capabilities
2190 .document_range_formatting_provider
2191 .as_ref()
2192 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2193 {
2194 let buffer_start = lsp::Position::new(0, 0);
2195 let buffer_end =
2196 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2197 language_server
2198 .request::<lsp::request::RangeFormatting>(
2199 lsp::DocumentRangeFormattingParams {
2200 text_document,
2201 range: lsp::Range::new(buffer_start, buffer_end),
2202 options: lsp::FormattingOptions {
2203 tab_size: 4,
2204 insert_spaces: true,
2205 insert_final_newline: Some(true),
2206 ..Default::default()
2207 },
2208 work_done_progress_params: Default::default(),
2209 },
2210 )
2211 .await?
2212 } else {
2213 continue;
2214 };
2215
2216 if let Some(lsp_edits) = lsp_edits {
2217 let edits = this
2218 .update(&mut cx, |this, cx| {
2219 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2220 })
2221 .await?;
2222 buffer.update(&mut cx, |buffer, cx| {
2223 buffer.finalize_last_transaction();
2224 buffer.start_transaction();
2225 for (range, text) in edits {
2226 buffer.edit([range], text, cx);
2227 }
2228 if buffer.end_transaction(cx).is_some() {
2229 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2230 if !push_to_history {
2231 buffer.forget_transaction(transaction.id);
2232 }
2233 project_transaction.0.insert(cx.handle(), transaction);
2234 }
2235 });
2236 }
2237 }
2238
2239 Ok(project_transaction)
2240 })
2241 }
2242
2243 pub fn definition<T: ToPointUtf16>(
2244 &self,
2245 buffer: &ModelHandle<Buffer>,
2246 position: T,
2247 cx: &mut ModelContext<Self>,
2248 ) -> Task<Result<Vec<Location>>> {
2249 let position = position.to_point_utf16(buffer.read(cx));
2250 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2251 }
2252
2253 pub fn references<T: ToPointUtf16>(
2254 &self,
2255 buffer: &ModelHandle<Buffer>,
2256 position: T,
2257 cx: &mut ModelContext<Self>,
2258 ) -> Task<Result<Vec<Location>>> {
2259 let position = position.to_point_utf16(buffer.read(cx));
2260 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2261 }
2262
2263 pub fn document_highlights<T: ToPointUtf16>(
2264 &self,
2265 buffer: &ModelHandle<Buffer>,
2266 position: T,
2267 cx: &mut ModelContext<Self>,
2268 ) -> Task<Result<Vec<DocumentHighlight>>> {
2269 let position = position.to_point_utf16(buffer.read(cx));
2270
2271 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2272 }
2273
2274 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2275 if self.is_local() {
2276 let mut language_servers = HashMap::default();
2277 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2278 if let Some(worktree) = self
2279 .worktree_for_id(*worktree_id, cx)
2280 .and_then(|worktree| worktree.read(cx).as_local())
2281 {
2282 language_servers
2283 .entry(Arc::as_ptr(language_server))
2284 .or_insert((
2285 lsp_adapter.clone(),
2286 language_server.clone(),
2287 *worktree_id,
2288 worktree.abs_path().clone(),
2289 ));
2290 }
2291 }
2292
2293 let mut requests = Vec::new();
2294 for (_, language_server, _, _) in language_servers.values() {
2295 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2296 lsp::WorkspaceSymbolParams {
2297 query: query.to_string(),
2298 ..Default::default()
2299 },
2300 ));
2301 }
2302
2303 cx.spawn_weak(|this, cx| async move {
2304 let responses = futures::future::try_join_all(requests).await?;
2305
2306 let mut symbols = Vec::new();
2307 if let Some(this) = this.upgrade(&cx) {
2308 this.read_with(&cx, |this, cx| {
2309 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2310 language_servers.into_values().zip(responses)
2311 {
2312 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2313 |lsp_symbol| {
2314 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2315 let mut worktree_id = source_worktree_id;
2316 let path;
2317 if let Some((worktree, rel_path)) =
2318 this.find_local_worktree(&abs_path, cx)
2319 {
2320 worktree_id = worktree.read(cx).id();
2321 path = rel_path;
2322 } else {
2323 path = relativize_path(&worktree_abs_path, &abs_path);
2324 }
2325
2326 let label = this
2327 .languages
2328 .select_language(&path)
2329 .and_then(|language| {
2330 language
2331 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2332 })
2333 .unwrap_or_else(|| {
2334 CodeLabel::plain(lsp_symbol.name.clone(), None)
2335 });
2336 let signature = this.symbol_signature(worktree_id, &path);
2337
2338 Some(Symbol {
2339 source_worktree_id,
2340 worktree_id,
2341 language_server_name: adapter.name(),
2342 name: lsp_symbol.name,
2343 kind: lsp_symbol.kind,
2344 label,
2345 path,
2346 range: range_from_lsp(lsp_symbol.location.range),
2347 signature,
2348 })
2349 },
2350 ));
2351 }
2352 })
2353 }
2354
2355 Ok(symbols)
2356 })
2357 } else if let Some(project_id) = self.remote_id() {
2358 let request = self.client.request(proto::GetProjectSymbols {
2359 project_id,
2360 query: query.to_string(),
2361 });
2362 cx.spawn_weak(|this, cx| async move {
2363 let response = request.await?;
2364 let mut symbols = Vec::new();
2365 if let Some(this) = this.upgrade(&cx) {
2366 this.read_with(&cx, |this, _| {
2367 symbols.extend(
2368 response
2369 .symbols
2370 .into_iter()
2371 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2372 );
2373 })
2374 }
2375 Ok(symbols)
2376 })
2377 } else {
2378 Task::ready(Ok(Default::default()))
2379 }
2380 }
2381
2382 pub fn open_buffer_for_symbol(
2383 &mut self,
2384 symbol: &Symbol,
2385 cx: &mut ModelContext<Self>,
2386 ) -> Task<Result<ModelHandle<Buffer>>> {
2387 if self.is_local() {
2388 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2389 symbol.source_worktree_id,
2390 symbol.language_server_name.clone(),
2391 )) {
2392 server.clone()
2393 } else {
2394 return Task::ready(Err(anyhow!(
2395 "language server for worktree and language not found"
2396 )));
2397 };
2398
2399 let worktree_abs_path = if let Some(worktree_abs_path) = self
2400 .worktree_for_id(symbol.worktree_id, cx)
2401 .and_then(|worktree| worktree.read(cx).as_local())
2402 .map(|local_worktree| local_worktree.abs_path())
2403 {
2404 worktree_abs_path
2405 } else {
2406 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2407 };
2408 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2409 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2410 uri
2411 } else {
2412 return Task::ready(Err(anyhow!("invalid symbol path")));
2413 };
2414
2415 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2416 } else if let Some(project_id) = self.remote_id() {
2417 let request = self.client.request(proto::OpenBufferForSymbol {
2418 project_id,
2419 symbol: Some(serialize_symbol(symbol)),
2420 });
2421 cx.spawn(|this, mut cx| async move {
2422 let response = request.await?;
2423 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2424 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2425 .await
2426 })
2427 } else {
2428 Task::ready(Err(anyhow!("project does not have a remote id")))
2429 }
2430 }
2431
2432 pub fn completions<T: ToPointUtf16>(
2433 &self,
2434 source_buffer_handle: &ModelHandle<Buffer>,
2435 position: T,
2436 cx: &mut ModelContext<Self>,
2437 ) -> Task<Result<Vec<Completion>>> {
2438 let source_buffer_handle = source_buffer_handle.clone();
2439 let source_buffer = source_buffer_handle.read(cx);
2440 let buffer_id = source_buffer.remote_id();
2441 let language = source_buffer.language().cloned();
2442 let worktree;
2443 let buffer_abs_path;
2444 if let Some(file) = File::from_dyn(source_buffer.file()) {
2445 worktree = file.worktree.clone();
2446 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2447 } else {
2448 return Task::ready(Ok(Default::default()));
2449 };
2450
2451 let position = position.to_point_utf16(source_buffer);
2452 let anchor = source_buffer.anchor_after(position);
2453
2454 if worktree.read(cx).as_local().is_some() {
2455 let buffer_abs_path = buffer_abs_path.unwrap();
2456 let (_, lang_server) =
2457 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2458 server.clone()
2459 } else {
2460 return Task::ready(Ok(Default::default()));
2461 };
2462
2463 cx.spawn(|_, cx| async move {
2464 let completions = lang_server
2465 .request::<lsp::request::Completion>(lsp::CompletionParams {
2466 text_document_position: lsp::TextDocumentPositionParams::new(
2467 lsp::TextDocumentIdentifier::new(
2468 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2469 ),
2470 point_to_lsp(position),
2471 ),
2472 context: Default::default(),
2473 work_done_progress_params: Default::default(),
2474 partial_result_params: Default::default(),
2475 })
2476 .await
2477 .context("lsp completion request failed")?;
2478
2479 let completions = if let Some(completions) = completions {
2480 match completions {
2481 lsp::CompletionResponse::Array(completions) => completions,
2482 lsp::CompletionResponse::List(list) => list.items,
2483 }
2484 } else {
2485 Default::default()
2486 };
2487
2488 source_buffer_handle.read_with(&cx, |this, _| {
2489 Ok(completions
2490 .into_iter()
2491 .filter_map(|lsp_completion| {
2492 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2493 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2494 (range_from_lsp(edit.range), edit.new_text.clone())
2495 }
2496 None => {
2497 let clipped_position =
2498 this.clip_point_utf16(position, Bias::Left);
2499 if position != clipped_position {
2500 log::info!("completion out of expected range");
2501 return None;
2502 }
2503 (
2504 this.common_prefix_at(
2505 clipped_position,
2506 &lsp_completion.label,
2507 ),
2508 lsp_completion.label.clone(),
2509 )
2510 }
2511 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2512 log::info!("unsupported insert/replace completion");
2513 return None;
2514 }
2515 };
2516
2517 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2518 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2519 if clipped_start == old_range.start && clipped_end == old_range.end {
2520 Some(Completion {
2521 old_range: this.anchor_before(old_range.start)
2522 ..this.anchor_after(old_range.end),
2523 new_text,
2524 label: language
2525 .as_ref()
2526 .and_then(|l| l.label_for_completion(&lsp_completion))
2527 .unwrap_or_else(|| {
2528 CodeLabel::plain(
2529 lsp_completion.label.clone(),
2530 lsp_completion.filter_text.as_deref(),
2531 )
2532 }),
2533 lsp_completion,
2534 })
2535 } else {
2536 log::info!("completion out of expected range");
2537 None
2538 }
2539 })
2540 .collect())
2541 })
2542 })
2543 } else if let Some(project_id) = self.remote_id() {
2544 let rpc = self.client.clone();
2545 let message = proto::GetCompletions {
2546 project_id,
2547 buffer_id,
2548 position: Some(language::proto::serialize_anchor(&anchor)),
2549 version: serialize_version(&source_buffer.version()),
2550 };
2551 cx.spawn_weak(|_, mut cx| async move {
2552 let response = rpc.request(message).await?;
2553
2554 source_buffer_handle
2555 .update(&mut cx, |buffer, _| {
2556 buffer.wait_for_version(deserialize_version(response.version))
2557 })
2558 .await;
2559
2560 response
2561 .completions
2562 .into_iter()
2563 .map(|completion| {
2564 language::proto::deserialize_completion(completion, language.as_ref())
2565 })
2566 .collect()
2567 })
2568 } else {
2569 Task::ready(Ok(Default::default()))
2570 }
2571 }
2572
2573 pub fn apply_additional_edits_for_completion(
2574 &self,
2575 buffer_handle: ModelHandle<Buffer>,
2576 completion: Completion,
2577 push_to_history: bool,
2578 cx: &mut ModelContext<Self>,
2579 ) -> Task<Result<Option<Transaction>>> {
2580 let buffer = buffer_handle.read(cx);
2581 let buffer_id = buffer.remote_id();
2582
2583 if self.is_local() {
2584 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2585 {
2586 server.clone()
2587 } else {
2588 return Task::ready(Ok(Default::default()));
2589 };
2590
2591 cx.spawn(|this, mut cx| async move {
2592 let resolved_completion = lang_server
2593 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2594 .await?;
2595 if let Some(edits) = resolved_completion.additional_text_edits {
2596 let edits = this
2597 .update(&mut cx, |this, cx| {
2598 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2599 })
2600 .await?;
2601 buffer_handle.update(&mut cx, |buffer, cx| {
2602 buffer.finalize_last_transaction();
2603 buffer.start_transaction();
2604 for (range, text) in edits {
2605 buffer.edit([range], text, cx);
2606 }
2607 let transaction = if buffer.end_transaction(cx).is_some() {
2608 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2609 if !push_to_history {
2610 buffer.forget_transaction(transaction.id);
2611 }
2612 Some(transaction)
2613 } else {
2614 None
2615 };
2616 Ok(transaction)
2617 })
2618 } else {
2619 Ok(None)
2620 }
2621 })
2622 } else if let Some(project_id) = self.remote_id() {
2623 let client = self.client.clone();
2624 cx.spawn(|_, mut cx| async move {
2625 let response = client
2626 .request(proto::ApplyCompletionAdditionalEdits {
2627 project_id,
2628 buffer_id,
2629 completion: Some(language::proto::serialize_completion(&completion)),
2630 })
2631 .await?;
2632
2633 if let Some(transaction) = response.transaction {
2634 let transaction = language::proto::deserialize_transaction(transaction)?;
2635 buffer_handle
2636 .update(&mut cx, |buffer, _| {
2637 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2638 })
2639 .await;
2640 if push_to_history {
2641 buffer_handle.update(&mut cx, |buffer, _| {
2642 buffer.push_transaction(transaction.clone(), Instant::now());
2643 });
2644 }
2645 Ok(Some(transaction))
2646 } else {
2647 Ok(None)
2648 }
2649 })
2650 } else {
2651 Task::ready(Err(anyhow!("project does not have a remote id")))
2652 }
2653 }
2654
2655 pub fn code_actions<T: Clone + ToOffset>(
2656 &self,
2657 buffer_handle: &ModelHandle<Buffer>,
2658 range: Range<T>,
2659 cx: &mut ModelContext<Self>,
2660 ) -> Task<Result<Vec<CodeAction>>> {
2661 let buffer_handle = buffer_handle.clone();
2662 let buffer = buffer_handle.read(cx);
2663 let snapshot = buffer.snapshot();
2664 let relevant_diagnostics = snapshot
2665 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2666 .map(|entry| entry.to_lsp_diagnostic_stub())
2667 .collect();
2668 let buffer_id = buffer.remote_id();
2669 let worktree;
2670 let buffer_abs_path;
2671 if let Some(file) = File::from_dyn(buffer.file()) {
2672 worktree = file.worktree.clone();
2673 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2674 } else {
2675 return Task::ready(Ok(Default::default()));
2676 };
2677 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2678
2679 if worktree.read(cx).as_local().is_some() {
2680 let buffer_abs_path = buffer_abs_path.unwrap();
2681 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2682 {
2683 server.clone()
2684 } else {
2685 return Task::ready(Ok(Default::default()));
2686 };
2687
2688 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2689 cx.foreground().spawn(async move {
2690 if !lang_server.capabilities().code_action_provider.is_some() {
2691 return Ok(Default::default());
2692 }
2693
2694 Ok(lang_server
2695 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2696 text_document: lsp::TextDocumentIdentifier::new(
2697 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2698 ),
2699 range: lsp_range,
2700 work_done_progress_params: Default::default(),
2701 partial_result_params: Default::default(),
2702 context: lsp::CodeActionContext {
2703 diagnostics: relevant_diagnostics,
2704 only: Some(vec![
2705 lsp::CodeActionKind::QUICKFIX,
2706 lsp::CodeActionKind::REFACTOR,
2707 lsp::CodeActionKind::REFACTOR_EXTRACT,
2708 lsp::CodeActionKind::SOURCE,
2709 ]),
2710 },
2711 })
2712 .await?
2713 .unwrap_or_default()
2714 .into_iter()
2715 .filter_map(|entry| {
2716 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2717 Some(CodeAction {
2718 range: range.clone(),
2719 lsp_action,
2720 })
2721 } else {
2722 None
2723 }
2724 })
2725 .collect())
2726 })
2727 } else if let Some(project_id) = self.remote_id() {
2728 let rpc = self.client.clone();
2729 let version = buffer.version();
2730 cx.spawn_weak(|_, mut cx| async move {
2731 let response = rpc
2732 .request(proto::GetCodeActions {
2733 project_id,
2734 buffer_id,
2735 start: Some(language::proto::serialize_anchor(&range.start)),
2736 end: Some(language::proto::serialize_anchor(&range.end)),
2737 version: serialize_version(&version),
2738 })
2739 .await?;
2740
2741 buffer_handle
2742 .update(&mut cx, |buffer, _| {
2743 buffer.wait_for_version(deserialize_version(response.version))
2744 })
2745 .await;
2746
2747 response
2748 .actions
2749 .into_iter()
2750 .map(language::proto::deserialize_code_action)
2751 .collect()
2752 })
2753 } else {
2754 Task::ready(Ok(Default::default()))
2755 }
2756 }
2757
2758 pub fn apply_code_action(
2759 &self,
2760 buffer_handle: ModelHandle<Buffer>,
2761 mut action: CodeAction,
2762 push_to_history: bool,
2763 cx: &mut ModelContext<Self>,
2764 ) -> Task<Result<ProjectTransaction>> {
2765 if self.is_local() {
2766 let buffer = buffer_handle.read(cx);
2767 let (lsp_adapter, lang_server) =
2768 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2769 server.clone()
2770 } else {
2771 return Task::ready(Ok(Default::default()));
2772 };
2773 let range = action.range.to_point_utf16(buffer);
2774
2775 cx.spawn(|this, mut cx| async move {
2776 if let Some(lsp_range) = action
2777 .lsp_action
2778 .data
2779 .as_mut()
2780 .and_then(|d| d.get_mut("codeActionParams"))
2781 .and_then(|d| d.get_mut("range"))
2782 {
2783 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2784 action.lsp_action = lang_server
2785 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2786 .await?;
2787 } else {
2788 let actions = this
2789 .update(&mut cx, |this, cx| {
2790 this.code_actions(&buffer_handle, action.range, cx)
2791 })
2792 .await?;
2793 action.lsp_action = actions
2794 .into_iter()
2795 .find(|a| a.lsp_action.title == action.lsp_action.title)
2796 .ok_or_else(|| anyhow!("code action is outdated"))?
2797 .lsp_action;
2798 }
2799
2800 if let Some(edit) = action.lsp_action.edit {
2801 Self::deserialize_workspace_edit(
2802 this,
2803 edit,
2804 push_to_history,
2805 lsp_adapter,
2806 lang_server,
2807 &mut cx,
2808 )
2809 .await
2810 } else if let Some(command) = action.lsp_action.command {
2811 this.update(&mut cx, |this, _| {
2812 this.last_workspace_edits_by_language_server
2813 .remove(&lang_server.server_id());
2814 });
2815 lang_server
2816 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2817 command: command.command,
2818 arguments: command.arguments.unwrap_or_default(),
2819 ..Default::default()
2820 })
2821 .await?;
2822 Ok(this.update(&mut cx, |this, _| {
2823 this.last_workspace_edits_by_language_server
2824 .remove(&lang_server.server_id())
2825 .unwrap_or_default()
2826 }))
2827 } else {
2828 Ok(ProjectTransaction::default())
2829 }
2830 })
2831 } else if let Some(project_id) = self.remote_id() {
2832 let client = self.client.clone();
2833 let request = proto::ApplyCodeAction {
2834 project_id,
2835 buffer_id: buffer_handle.read(cx).remote_id(),
2836 action: Some(language::proto::serialize_code_action(&action)),
2837 };
2838 cx.spawn(|this, mut cx| async move {
2839 let response = client
2840 .request(request)
2841 .await?
2842 .transaction
2843 .ok_or_else(|| anyhow!("missing transaction"))?;
2844 this.update(&mut cx, |this, cx| {
2845 this.deserialize_project_transaction(response, push_to_history, cx)
2846 })
2847 .await
2848 })
2849 } else {
2850 Task::ready(Err(anyhow!("project does not have a remote id")))
2851 }
2852 }
2853
2854 async fn deserialize_workspace_edit(
2855 this: ModelHandle<Self>,
2856 edit: lsp::WorkspaceEdit,
2857 push_to_history: bool,
2858 lsp_adapter: Arc<dyn LspAdapter>,
2859 language_server: Arc<LanguageServer>,
2860 cx: &mut AsyncAppContext,
2861 ) -> Result<ProjectTransaction> {
2862 let fs = this.read_with(cx, |this, _| this.fs.clone());
2863 let mut operations = Vec::new();
2864 if let Some(document_changes) = edit.document_changes {
2865 match document_changes {
2866 lsp::DocumentChanges::Edits(edits) => {
2867 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2868 }
2869 lsp::DocumentChanges::Operations(ops) => operations = ops,
2870 }
2871 } else if let Some(changes) = edit.changes {
2872 operations.extend(changes.into_iter().map(|(uri, edits)| {
2873 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2874 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2875 uri,
2876 version: None,
2877 },
2878 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2879 })
2880 }));
2881 }
2882
2883 let mut project_transaction = ProjectTransaction::default();
2884 for operation in operations {
2885 match operation {
2886 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2887 let abs_path = op
2888 .uri
2889 .to_file_path()
2890 .map_err(|_| anyhow!("can't convert URI to path"))?;
2891
2892 if let Some(parent_path) = abs_path.parent() {
2893 fs.create_dir(parent_path).await?;
2894 }
2895 if abs_path.ends_with("/") {
2896 fs.create_dir(&abs_path).await?;
2897 } else {
2898 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2899 .await?;
2900 }
2901 }
2902 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2903 let source_abs_path = op
2904 .old_uri
2905 .to_file_path()
2906 .map_err(|_| anyhow!("can't convert URI to path"))?;
2907 let target_abs_path = op
2908 .new_uri
2909 .to_file_path()
2910 .map_err(|_| anyhow!("can't convert URI to path"))?;
2911 fs.rename(
2912 &source_abs_path,
2913 &target_abs_path,
2914 op.options.map(Into::into).unwrap_or_default(),
2915 )
2916 .await?;
2917 }
2918 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2919 let abs_path = op
2920 .uri
2921 .to_file_path()
2922 .map_err(|_| anyhow!("can't convert URI to path"))?;
2923 let options = op.options.map(Into::into).unwrap_or_default();
2924 if abs_path.ends_with("/") {
2925 fs.remove_dir(&abs_path, options).await?;
2926 } else {
2927 fs.remove_file(&abs_path, options).await?;
2928 }
2929 }
2930 lsp::DocumentChangeOperation::Edit(op) => {
2931 let buffer_to_edit = this
2932 .update(cx, |this, cx| {
2933 this.open_local_buffer_via_lsp(
2934 op.text_document.uri,
2935 lsp_adapter.clone(),
2936 language_server.clone(),
2937 cx,
2938 )
2939 })
2940 .await?;
2941
2942 let edits = this
2943 .update(cx, |this, cx| {
2944 let edits = op.edits.into_iter().map(|edit| match edit {
2945 lsp::OneOf::Left(edit) => edit,
2946 lsp::OneOf::Right(edit) => edit.text_edit,
2947 });
2948 this.edits_from_lsp(
2949 &buffer_to_edit,
2950 edits,
2951 op.text_document.version,
2952 cx,
2953 )
2954 })
2955 .await?;
2956
2957 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2958 buffer.finalize_last_transaction();
2959 buffer.start_transaction();
2960 for (range, text) in edits {
2961 buffer.edit([range], text, cx);
2962 }
2963 let transaction = if buffer.end_transaction(cx).is_some() {
2964 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2965 if !push_to_history {
2966 buffer.forget_transaction(transaction.id);
2967 }
2968 Some(transaction)
2969 } else {
2970 None
2971 };
2972
2973 transaction
2974 });
2975 if let Some(transaction) = transaction {
2976 project_transaction.0.insert(buffer_to_edit, transaction);
2977 }
2978 }
2979 }
2980 }
2981
2982 Ok(project_transaction)
2983 }
2984
2985 pub fn prepare_rename<T: ToPointUtf16>(
2986 &self,
2987 buffer: ModelHandle<Buffer>,
2988 position: T,
2989 cx: &mut ModelContext<Self>,
2990 ) -> Task<Result<Option<Range<Anchor>>>> {
2991 let position = position.to_point_utf16(buffer.read(cx));
2992 self.request_lsp(buffer, PrepareRename { position }, cx)
2993 }
2994
2995 pub fn perform_rename<T: ToPointUtf16>(
2996 &self,
2997 buffer: ModelHandle<Buffer>,
2998 position: T,
2999 new_name: String,
3000 push_to_history: bool,
3001 cx: &mut ModelContext<Self>,
3002 ) -> Task<Result<ProjectTransaction>> {
3003 let position = position.to_point_utf16(buffer.read(cx));
3004 self.request_lsp(
3005 buffer,
3006 PerformRename {
3007 position,
3008 new_name,
3009 push_to_history,
3010 },
3011 cx,
3012 )
3013 }
3014
3015 pub fn search(
3016 &self,
3017 query: SearchQuery,
3018 cx: &mut ModelContext<Self>,
3019 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3020 if self.is_local() {
3021 let snapshots = self
3022 .visible_worktrees(cx)
3023 .filter_map(|tree| {
3024 let tree = tree.read(cx).as_local()?;
3025 Some(tree.snapshot())
3026 })
3027 .collect::<Vec<_>>();
3028
3029 let background = cx.background().clone();
3030 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3031 if path_count == 0 {
3032 return Task::ready(Ok(Default::default()));
3033 }
3034 let workers = background.num_cpus().min(path_count);
3035 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3036 cx.background()
3037 .spawn({
3038 let fs = self.fs.clone();
3039 let background = cx.background().clone();
3040 let query = query.clone();
3041 async move {
3042 let fs = &fs;
3043 let query = &query;
3044 let matching_paths_tx = &matching_paths_tx;
3045 let paths_per_worker = (path_count + workers - 1) / workers;
3046 let snapshots = &snapshots;
3047 background
3048 .scoped(|scope| {
3049 for worker_ix in 0..workers {
3050 let worker_start_ix = worker_ix * paths_per_worker;
3051 let worker_end_ix = worker_start_ix + paths_per_worker;
3052 scope.spawn(async move {
3053 let mut snapshot_start_ix = 0;
3054 let mut abs_path = PathBuf::new();
3055 for snapshot in snapshots {
3056 let snapshot_end_ix =
3057 snapshot_start_ix + snapshot.visible_file_count();
3058 if worker_end_ix <= snapshot_start_ix {
3059 break;
3060 } else if worker_start_ix > snapshot_end_ix {
3061 snapshot_start_ix = snapshot_end_ix;
3062 continue;
3063 } else {
3064 let start_in_snapshot = worker_start_ix
3065 .saturating_sub(snapshot_start_ix);
3066 let end_in_snapshot =
3067 cmp::min(worker_end_ix, snapshot_end_ix)
3068 - snapshot_start_ix;
3069
3070 for entry in snapshot
3071 .files(false, start_in_snapshot)
3072 .take(end_in_snapshot - start_in_snapshot)
3073 {
3074 if matching_paths_tx.is_closed() {
3075 break;
3076 }
3077
3078 abs_path.clear();
3079 abs_path.push(&snapshot.abs_path());
3080 abs_path.push(&entry.path);
3081 let matches = if let Some(file) =
3082 fs.open_sync(&abs_path).await.log_err()
3083 {
3084 query.detect(file).unwrap_or(false)
3085 } else {
3086 false
3087 };
3088
3089 if matches {
3090 let project_path =
3091 (snapshot.id(), entry.path.clone());
3092 if matching_paths_tx
3093 .send(project_path)
3094 .await
3095 .is_err()
3096 {
3097 break;
3098 }
3099 }
3100 }
3101
3102 snapshot_start_ix = snapshot_end_ix;
3103 }
3104 }
3105 });
3106 }
3107 })
3108 .await;
3109 }
3110 })
3111 .detach();
3112
3113 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3114 let open_buffers = self
3115 .opened_buffers
3116 .values()
3117 .filter_map(|b| b.upgrade(cx))
3118 .collect::<HashSet<_>>();
3119 cx.spawn(|this, cx| async move {
3120 for buffer in &open_buffers {
3121 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3122 buffers_tx.send((buffer.clone(), snapshot)).await?;
3123 }
3124
3125 let open_buffers = Rc::new(RefCell::new(open_buffers));
3126 while let Some(project_path) = matching_paths_rx.next().await {
3127 if buffers_tx.is_closed() {
3128 break;
3129 }
3130
3131 let this = this.clone();
3132 let open_buffers = open_buffers.clone();
3133 let buffers_tx = buffers_tx.clone();
3134 cx.spawn(|mut cx| async move {
3135 if let Some(buffer) = this
3136 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3137 .await
3138 .log_err()
3139 {
3140 if open_buffers.borrow_mut().insert(buffer.clone()) {
3141 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3142 buffers_tx.send((buffer, snapshot)).await?;
3143 }
3144 }
3145
3146 Ok::<_, anyhow::Error>(())
3147 })
3148 .detach();
3149 }
3150
3151 Ok::<_, anyhow::Error>(())
3152 })
3153 .detach_and_log_err(cx);
3154
3155 let background = cx.background().clone();
3156 cx.background().spawn(async move {
3157 let query = &query;
3158 let mut matched_buffers = Vec::new();
3159 for _ in 0..workers {
3160 matched_buffers.push(HashMap::default());
3161 }
3162 background
3163 .scoped(|scope| {
3164 for worker_matched_buffers in matched_buffers.iter_mut() {
3165 let mut buffers_rx = buffers_rx.clone();
3166 scope.spawn(async move {
3167 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3168 let buffer_matches = query
3169 .search(snapshot.as_rope())
3170 .await
3171 .iter()
3172 .map(|range| {
3173 snapshot.anchor_before(range.start)
3174 ..snapshot.anchor_after(range.end)
3175 })
3176 .collect::<Vec<_>>();
3177 if !buffer_matches.is_empty() {
3178 worker_matched_buffers
3179 .insert(buffer.clone(), buffer_matches);
3180 }
3181 }
3182 });
3183 }
3184 })
3185 .await;
3186 Ok(matched_buffers.into_iter().flatten().collect())
3187 })
3188 } else if let Some(project_id) = self.remote_id() {
3189 let request = self.client.request(query.to_proto(project_id));
3190 cx.spawn(|this, mut cx| async move {
3191 let response = request.await?;
3192 let mut result = HashMap::default();
3193 for location in response.locations {
3194 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3195 let target_buffer = this
3196 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3197 .await?;
3198 let start = location
3199 .start
3200 .and_then(deserialize_anchor)
3201 .ok_or_else(|| anyhow!("missing target start"))?;
3202 let end = location
3203 .end
3204 .and_then(deserialize_anchor)
3205 .ok_or_else(|| anyhow!("missing target end"))?;
3206 result
3207 .entry(target_buffer)
3208 .or_insert(Vec::new())
3209 .push(start..end)
3210 }
3211 Ok(result)
3212 })
3213 } else {
3214 Task::ready(Ok(Default::default()))
3215 }
3216 }
3217
3218 fn request_lsp<R: LspCommand>(
3219 &self,
3220 buffer_handle: ModelHandle<Buffer>,
3221 request: R,
3222 cx: &mut ModelContext<Self>,
3223 ) -> Task<Result<R::Response>>
3224 where
3225 <R::LspRequest as lsp::request::Request>::Result: Send,
3226 {
3227 let buffer = buffer_handle.read(cx);
3228 if self.is_local() {
3229 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3230 if let Some((file, (_, language_server))) =
3231 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3232 {
3233 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3234 return cx.spawn(|this, cx| async move {
3235 if !request.check_capabilities(&language_server.capabilities()) {
3236 return Ok(Default::default());
3237 }
3238
3239 let response = language_server
3240 .request::<R::LspRequest>(lsp_params)
3241 .await
3242 .context("lsp request failed")?;
3243 request
3244 .response_from_lsp(response, this, buffer_handle, cx)
3245 .await
3246 });
3247 }
3248 } else if let Some(project_id) = self.remote_id() {
3249 let rpc = self.client.clone();
3250 let message = request.to_proto(project_id, buffer);
3251 return cx.spawn(|this, cx| async move {
3252 let response = rpc.request(message).await?;
3253 request
3254 .response_from_proto(response, this, buffer_handle, cx)
3255 .await
3256 });
3257 }
3258 Task::ready(Ok(Default::default()))
3259 }
3260
3261 pub fn find_or_create_local_worktree(
3262 &mut self,
3263 abs_path: impl AsRef<Path>,
3264 visible: bool,
3265 cx: &mut ModelContext<Self>,
3266 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3267 let abs_path = abs_path.as_ref();
3268 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3269 Task::ready(Ok((tree.clone(), relative_path.into())))
3270 } else {
3271 let worktree = self.create_local_worktree(abs_path, visible, cx);
3272 cx.foreground()
3273 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3274 }
3275 }
3276
3277 pub fn find_local_worktree(
3278 &self,
3279 abs_path: &Path,
3280 cx: &AppContext,
3281 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3282 for tree in self.worktrees(cx) {
3283 if let Some(relative_path) = tree
3284 .read(cx)
3285 .as_local()
3286 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3287 {
3288 return Some((tree.clone(), relative_path.into()));
3289 }
3290 }
3291 None
3292 }
3293
3294 pub fn is_shared(&self) -> bool {
3295 match &self.client_state {
3296 ProjectClientState::Local { is_shared, .. } => *is_shared,
3297 ProjectClientState::Remote { .. } => false,
3298 }
3299 }
3300
3301 fn create_local_worktree(
3302 &mut self,
3303 abs_path: impl AsRef<Path>,
3304 visible: bool,
3305 cx: &mut ModelContext<Self>,
3306 ) -> Task<Result<ModelHandle<Worktree>>> {
3307 let fs = self.fs.clone();
3308 let client = self.client.clone();
3309 let next_entry_id = self.next_entry_id.clone();
3310 let path: Arc<Path> = abs_path.as_ref().into();
3311 let task = self
3312 .loading_local_worktrees
3313 .entry(path.clone())
3314 .or_insert_with(|| {
3315 cx.spawn(|project, mut cx| {
3316 async move {
3317 let worktree = Worktree::local(
3318 client.clone(),
3319 path.clone(),
3320 visible,
3321 fs,
3322 next_entry_id,
3323 &mut cx,
3324 )
3325 .await;
3326 project.update(&mut cx, |project, _| {
3327 project.loading_local_worktrees.remove(&path);
3328 });
3329 let worktree = worktree?;
3330
3331 let (remote_project_id, is_shared) =
3332 project.update(&mut cx, |project, cx| {
3333 project.add_worktree(&worktree, cx);
3334 (project.remote_id(), project.is_shared())
3335 });
3336
3337 if let Some(project_id) = remote_project_id {
3338 if is_shared {
3339 worktree
3340 .update(&mut cx, |worktree, cx| {
3341 worktree.as_local_mut().unwrap().share(project_id, cx)
3342 })
3343 .await?;
3344 } else {
3345 worktree
3346 .update(&mut cx, |worktree, cx| {
3347 worktree.as_local_mut().unwrap().register(project_id, cx)
3348 })
3349 .await?;
3350 }
3351 }
3352
3353 Ok(worktree)
3354 }
3355 .map_err(|err| Arc::new(err))
3356 })
3357 .shared()
3358 })
3359 .clone();
3360 cx.foreground().spawn(async move {
3361 match task.await {
3362 Ok(worktree) => Ok(worktree),
3363 Err(err) => Err(anyhow!("{}", err)),
3364 }
3365 })
3366 }
3367
3368 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3369 self.worktrees.retain(|worktree| {
3370 worktree
3371 .upgrade(cx)
3372 .map_or(false, |w| w.read(cx).id() != id)
3373 });
3374 cx.notify();
3375 }
3376
3377 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3378 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3379 if worktree.read(cx).is_local() {
3380 cx.subscribe(&worktree, |this, worktree, _, cx| {
3381 this.update_local_worktree_buffers(worktree, cx);
3382 })
3383 .detach();
3384 }
3385
3386 let push_strong_handle = {
3387 let worktree = worktree.read(cx);
3388 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3389 };
3390 if push_strong_handle {
3391 self.worktrees
3392 .push(WorktreeHandle::Strong(worktree.clone()));
3393 } else {
3394 cx.observe_release(&worktree, |this, _, cx| {
3395 this.worktrees
3396 .retain(|worktree| worktree.upgrade(cx).is_some());
3397 cx.notify();
3398 })
3399 .detach();
3400 self.worktrees
3401 .push(WorktreeHandle::Weak(worktree.downgrade()));
3402 }
3403 cx.notify();
3404 }
3405
3406 fn update_local_worktree_buffers(
3407 &mut self,
3408 worktree_handle: ModelHandle<Worktree>,
3409 cx: &mut ModelContext<Self>,
3410 ) {
3411 let snapshot = worktree_handle.read(cx).snapshot();
3412 let mut buffers_to_delete = Vec::new();
3413 let mut renamed_buffers = Vec::new();
3414 for (buffer_id, buffer) in &self.opened_buffers {
3415 if let Some(buffer) = buffer.upgrade(cx) {
3416 buffer.update(cx, |buffer, cx| {
3417 if let Some(old_file) = File::from_dyn(buffer.file()) {
3418 if old_file.worktree != worktree_handle {
3419 return;
3420 }
3421
3422 let new_file = if let Some(entry) = old_file
3423 .entry_id
3424 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3425 {
3426 File {
3427 is_local: true,
3428 entry_id: Some(entry.id),
3429 mtime: entry.mtime,
3430 path: entry.path.clone(),
3431 worktree: worktree_handle.clone(),
3432 }
3433 } else if let Some(entry) =
3434 snapshot.entry_for_path(old_file.path().as_ref())
3435 {
3436 File {
3437 is_local: true,
3438 entry_id: Some(entry.id),
3439 mtime: entry.mtime,
3440 path: entry.path.clone(),
3441 worktree: worktree_handle.clone(),
3442 }
3443 } else {
3444 File {
3445 is_local: true,
3446 entry_id: None,
3447 path: old_file.path().clone(),
3448 mtime: old_file.mtime(),
3449 worktree: worktree_handle.clone(),
3450 }
3451 };
3452
3453 let old_path = old_file.abs_path(cx);
3454 if new_file.abs_path(cx) != old_path {
3455 renamed_buffers.push((cx.handle(), old_path));
3456 }
3457
3458 if let Some(project_id) = self.remote_id() {
3459 self.client
3460 .send(proto::UpdateBufferFile {
3461 project_id,
3462 buffer_id: *buffer_id as u64,
3463 file: Some(new_file.to_proto()),
3464 })
3465 .log_err();
3466 }
3467 buffer.file_updated(Box::new(new_file), cx).detach();
3468 }
3469 });
3470 } else {
3471 buffers_to_delete.push(*buffer_id);
3472 }
3473 }
3474
3475 for buffer_id in buffers_to_delete {
3476 self.opened_buffers.remove(&buffer_id);
3477 }
3478
3479 for (buffer, old_path) in renamed_buffers {
3480 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3481 self.assign_language_to_buffer(&buffer, cx);
3482 self.register_buffer_with_language_server(&buffer, cx);
3483 }
3484 }
3485
3486 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3487 let new_active_entry = entry.and_then(|project_path| {
3488 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3489 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3490 Some(entry.id)
3491 });
3492 if new_active_entry != self.active_entry {
3493 self.active_entry = new_active_entry;
3494 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3495 }
3496 }
3497
3498 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3499 self.language_servers_with_diagnostics_running > 0
3500 }
3501
3502 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3503 let mut summary = DiagnosticSummary::default();
3504 for (_, path_summary) in self.diagnostic_summaries(cx) {
3505 summary.error_count += path_summary.error_count;
3506 summary.warning_count += path_summary.warning_count;
3507 summary.info_count += path_summary.info_count;
3508 summary.hint_count += path_summary.hint_count;
3509 }
3510 summary
3511 }
3512
3513 pub fn diagnostic_summaries<'a>(
3514 &'a self,
3515 cx: &'a AppContext,
3516 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3517 self.worktrees(cx).flat_map(move |worktree| {
3518 let worktree = worktree.read(cx);
3519 let worktree_id = worktree.id();
3520 worktree
3521 .diagnostic_summaries()
3522 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3523 })
3524 }
3525
3526 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3527 self.language_servers_with_diagnostics_running += 1;
3528 if self.language_servers_with_diagnostics_running == 1 {
3529 cx.emit(Event::DiskBasedDiagnosticsStarted);
3530 }
3531 }
3532
3533 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3534 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3535 self.language_servers_with_diagnostics_running -= 1;
3536 if self.language_servers_with_diagnostics_running == 0 {
3537 cx.emit(Event::DiskBasedDiagnosticsFinished);
3538 }
3539 }
3540
3541 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3542 self.active_entry
3543 }
3544
3545 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3546 self.worktree_for_id(path.worktree_id, cx)?
3547 .read(cx)
3548 .entry_for_path(&path.path)
3549 .map(|entry| entry.id)
3550 }
3551
3552 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3553 let worktree = self.worktree_for_entry(entry_id, cx)?;
3554 let worktree = worktree.read(cx);
3555 let worktree_id = worktree.id();
3556 let path = worktree.entry_for_id(entry_id)?.path.clone();
3557 Some(ProjectPath { worktree_id, path })
3558 }
3559
3560 // RPC message handlers
3561
3562 async fn handle_unshare_project(
3563 this: ModelHandle<Self>,
3564 _: TypedEnvelope<proto::UnshareProject>,
3565 _: Arc<Client>,
3566 mut cx: AsyncAppContext,
3567 ) -> Result<()> {
3568 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3569 Ok(())
3570 }
3571
3572 async fn handle_add_collaborator(
3573 this: ModelHandle<Self>,
3574 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3575 _: Arc<Client>,
3576 mut cx: AsyncAppContext,
3577 ) -> Result<()> {
3578 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3579 let collaborator = envelope
3580 .payload
3581 .collaborator
3582 .take()
3583 .ok_or_else(|| anyhow!("empty collaborator"))?;
3584
3585 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3586 this.update(&mut cx, |this, cx| {
3587 this.collaborators
3588 .insert(collaborator.peer_id, collaborator);
3589 cx.notify();
3590 });
3591
3592 Ok(())
3593 }
3594
3595 async fn handle_remove_collaborator(
3596 this: ModelHandle<Self>,
3597 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3598 _: Arc<Client>,
3599 mut cx: AsyncAppContext,
3600 ) -> Result<()> {
3601 this.update(&mut cx, |this, cx| {
3602 let peer_id = PeerId(envelope.payload.peer_id);
3603 let replica_id = this
3604 .collaborators
3605 .remove(&peer_id)
3606 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3607 .replica_id;
3608 for (_, buffer) in &this.opened_buffers {
3609 if let Some(buffer) = buffer.upgrade(cx) {
3610 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3611 }
3612 }
3613 cx.emit(Event::CollaboratorLeft(peer_id));
3614 cx.notify();
3615 Ok(())
3616 })
3617 }
3618
3619 async fn handle_register_worktree(
3620 this: ModelHandle<Self>,
3621 envelope: TypedEnvelope<proto::RegisterWorktree>,
3622 client: Arc<Client>,
3623 mut cx: AsyncAppContext,
3624 ) -> Result<()> {
3625 this.update(&mut cx, |this, cx| {
3626 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3627 let replica_id = this.replica_id();
3628 let worktree = proto::Worktree {
3629 id: envelope.payload.worktree_id,
3630 root_name: envelope.payload.root_name,
3631 entries: Default::default(),
3632 diagnostic_summaries: Default::default(),
3633 visible: envelope.payload.visible,
3634 };
3635 let (worktree, load_task) =
3636 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3637 this.add_worktree(&worktree, cx);
3638 load_task.detach();
3639 Ok(())
3640 })
3641 }
3642
3643 async fn handle_unregister_worktree(
3644 this: ModelHandle<Self>,
3645 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3646 _: Arc<Client>,
3647 mut cx: AsyncAppContext,
3648 ) -> Result<()> {
3649 this.update(&mut cx, |this, cx| {
3650 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3651 this.remove_worktree(worktree_id, cx);
3652 Ok(())
3653 })
3654 }
3655
3656 async fn handle_update_worktree(
3657 this: ModelHandle<Self>,
3658 envelope: TypedEnvelope<proto::UpdateWorktree>,
3659 _: Arc<Client>,
3660 mut cx: AsyncAppContext,
3661 ) -> Result<()> {
3662 this.update(&mut cx, |this, cx| {
3663 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3664 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3665 worktree.update(cx, |worktree, _| {
3666 let worktree = worktree.as_remote_mut().unwrap();
3667 worktree.update_from_remote(envelope)
3668 })?;
3669 }
3670 Ok(())
3671 })
3672 }
3673
3674 async fn handle_update_diagnostic_summary(
3675 this: ModelHandle<Self>,
3676 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3677 _: Arc<Client>,
3678 mut cx: AsyncAppContext,
3679 ) -> Result<()> {
3680 this.update(&mut cx, |this, cx| {
3681 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3682 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3683 if let Some(summary) = envelope.payload.summary {
3684 let project_path = ProjectPath {
3685 worktree_id,
3686 path: Path::new(&summary.path).into(),
3687 };
3688 worktree.update(cx, |worktree, _| {
3689 worktree
3690 .as_remote_mut()
3691 .unwrap()
3692 .update_diagnostic_summary(project_path.path.clone(), &summary);
3693 });
3694 cx.emit(Event::DiagnosticsUpdated(project_path));
3695 }
3696 }
3697 Ok(())
3698 })
3699 }
3700
3701 async fn handle_start_language_server(
3702 this: ModelHandle<Self>,
3703 envelope: TypedEnvelope<proto::StartLanguageServer>,
3704 _: Arc<Client>,
3705 mut cx: AsyncAppContext,
3706 ) -> Result<()> {
3707 let server = envelope
3708 .payload
3709 .server
3710 .ok_or_else(|| anyhow!("invalid server"))?;
3711 this.update(&mut cx, |this, cx| {
3712 this.language_server_statuses.insert(
3713 server.id as usize,
3714 LanguageServerStatus {
3715 name: server.name,
3716 pending_work: Default::default(),
3717 pending_diagnostic_updates: 0,
3718 },
3719 );
3720 cx.notify();
3721 });
3722 Ok(())
3723 }
3724
3725 async fn handle_update_language_server(
3726 this: ModelHandle<Self>,
3727 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3728 _: Arc<Client>,
3729 mut cx: AsyncAppContext,
3730 ) -> Result<()> {
3731 let language_server_id = envelope.payload.language_server_id as usize;
3732 match envelope
3733 .payload
3734 .variant
3735 .ok_or_else(|| anyhow!("invalid variant"))?
3736 {
3737 proto::update_language_server::Variant::WorkStart(payload) => {
3738 this.update(&mut cx, |this, cx| {
3739 this.on_lsp_work_start(language_server_id, payload.token, cx);
3740 })
3741 }
3742 proto::update_language_server::Variant::WorkProgress(payload) => {
3743 this.update(&mut cx, |this, cx| {
3744 this.on_lsp_work_progress(
3745 language_server_id,
3746 payload.token,
3747 LanguageServerProgress {
3748 message: payload.message,
3749 percentage: payload.percentage.map(|p| p as usize),
3750 last_update_at: Instant::now(),
3751 },
3752 cx,
3753 );
3754 })
3755 }
3756 proto::update_language_server::Variant::WorkEnd(payload) => {
3757 this.update(&mut cx, |this, cx| {
3758 this.on_lsp_work_end(language_server_id, payload.token, cx);
3759 })
3760 }
3761 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3762 this.update(&mut cx, |this, cx| {
3763 this.disk_based_diagnostics_started(cx);
3764 })
3765 }
3766 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3767 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3768 }
3769 }
3770
3771 Ok(())
3772 }
3773
3774 async fn handle_update_buffer(
3775 this: ModelHandle<Self>,
3776 envelope: TypedEnvelope<proto::UpdateBuffer>,
3777 _: Arc<Client>,
3778 mut cx: AsyncAppContext,
3779 ) -> Result<()> {
3780 this.update(&mut cx, |this, cx| {
3781 let payload = envelope.payload.clone();
3782 let buffer_id = payload.buffer_id;
3783 let ops = payload
3784 .operations
3785 .into_iter()
3786 .map(|op| language::proto::deserialize_operation(op))
3787 .collect::<Result<Vec<_>, _>>()?;
3788 match this.opened_buffers.entry(buffer_id) {
3789 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3790 OpenBuffer::Strong(buffer) => {
3791 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3792 }
3793 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3794 OpenBuffer::Weak(_) => {}
3795 },
3796 hash_map::Entry::Vacant(e) => {
3797 e.insert(OpenBuffer::Loading(ops));
3798 }
3799 }
3800 Ok(())
3801 })
3802 }
3803
3804 async fn handle_update_buffer_file(
3805 this: ModelHandle<Self>,
3806 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3807 _: Arc<Client>,
3808 mut cx: AsyncAppContext,
3809 ) -> Result<()> {
3810 this.update(&mut cx, |this, cx| {
3811 let payload = envelope.payload.clone();
3812 let buffer_id = payload.buffer_id;
3813 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3814 let worktree = this
3815 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3816 .ok_or_else(|| anyhow!("no such worktree"))?;
3817 let file = File::from_proto(file, worktree.clone(), cx)?;
3818 let buffer = this
3819 .opened_buffers
3820 .get_mut(&buffer_id)
3821 .and_then(|b| b.upgrade(cx))
3822 .ok_or_else(|| anyhow!("no such buffer"))?;
3823 buffer.update(cx, |buffer, cx| {
3824 buffer.file_updated(Box::new(file), cx).detach();
3825 });
3826 Ok(())
3827 })
3828 }
3829
3830 async fn handle_save_buffer(
3831 this: ModelHandle<Self>,
3832 envelope: TypedEnvelope<proto::SaveBuffer>,
3833 _: Arc<Client>,
3834 mut cx: AsyncAppContext,
3835 ) -> Result<proto::BufferSaved> {
3836 let buffer_id = envelope.payload.buffer_id;
3837 let requested_version = deserialize_version(envelope.payload.version);
3838
3839 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3840 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3841 let buffer = this
3842 .opened_buffers
3843 .get(&buffer_id)
3844 .map(|buffer| buffer.upgrade(cx).unwrap())
3845 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3846 Ok::<_, anyhow::Error>((project_id, buffer))
3847 })?;
3848 buffer
3849 .update(&mut cx, |buffer, _| {
3850 buffer.wait_for_version(requested_version)
3851 })
3852 .await;
3853
3854 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3855 Ok(proto::BufferSaved {
3856 project_id,
3857 buffer_id,
3858 version: serialize_version(&saved_version),
3859 mtime: Some(mtime.into()),
3860 })
3861 }
3862
3863 async fn handle_reload_buffers(
3864 this: ModelHandle<Self>,
3865 envelope: TypedEnvelope<proto::ReloadBuffers>,
3866 _: Arc<Client>,
3867 mut cx: AsyncAppContext,
3868 ) -> Result<proto::ReloadBuffersResponse> {
3869 let sender_id = envelope.original_sender_id()?;
3870 let reload = this.update(&mut cx, |this, cx| {
3871 let mut buffers = HashSet::default();
3872 for buffer_id in &envelope.payload.buffer_ids {
3873 buffers.insert(
3874 this.opened_buffers
3875 .get(buffer_id)
3876 .map(|buffer| buffer.upgrade(cx).unwrap())
3877 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3878 );
3879 }
3880 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3881 })?;
3882
3883 let project_transaction = reload.await?;
3884 let project_transaction = this.update(&mut cx, |this, cx| {
3885 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3886 });
3887 Ok(proto::ReloadBuffersResponse {
3888 transaction: Some(project_transaction),
3889 })
3890 }
3891
3892 async fn handle_format_buffers(
3893 this: ModelHandle<Self>,
3894 envelope: TypedEnvelope<proto::FormatBuffers>,
3895 _: Arc<Client>,
3896 mut cx: AsyncAppContext,
3897 ) -> Result<proto::FormatBuffersResponse> {
3898 let sender_id = envelope.original_sender_id()?;
3899 let format = this.update(&mut cx, |this, cx| {
3900 let mut buffers = HashSet::default();
3901 for buffer_id in &envelope.payload.buffer_ids {
3902 buffers.insert(
3903 this.opened_buffers
3904 .get(buffer_id)
3905 .map(|buffer| buffer.upgrade(cx).unwrap())
3906 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3907 );
3908 }
3909 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3910 })?;
3911
3912 let project_transaction = format.await?;
3913 let project_transaction = this.update(&mut cx, |this, cx| {
3914 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3915 });
3916 Ok(proto::FormatBuffersResponse {
3917 transaction: Some(project_transaction),
3918 })
3919 }
3920
3921 async fn handle_get_completions(
3922 this: ModelHandle<Self>,
3923 envelope: TypedEnvelope<proto::GetCompletions>,
3924 _: Arc<Client>,
3925 mut cx: AsyncAppContext,
3926 ) -> Result<proto::GetCompletionsResponse> {
3927 let position = envelope
3928 .payload
3929 .position
3930 .and_then(language::proto::deserialize_anchor)
3931 .ok_or_else(|| anyhow!("invalid position"))?;
3932 let version = deserialize_version(envelope.payload.version);
3933 let buffer = this.read_with(&cx, |this, cx| {
3934 this.opened_buffers
3935 .get(&envelope.payload.buffer_id)
3936 .map(|buffer| buffer.upgrade(cx).unwrap())
3937 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3938 })?;
3939 buffer
3940 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3941 .await;
3942 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3943 let completions = this
3944 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3945 .await?;
3946
3947 Ok(proto::GetCompletionsResponse {
3948 completions: completions
3949 .iter()
3950 .map(language::proto::serialize_completion)
3951 .collect(),
3952 version: serialize_version(&version),
3953 })
3954 }
3955
3956 async fn handle_apply_additional_edits_for_completion(
3957 this: ModelHandle<Self>,
3958 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3959 _: Arc<Client>,
3960 mut cx: AsyncAppContext,
3961 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3962 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3963 let buffer = this
3964 .opened_buffers
3965 .get(&envelope.payload.buffer_id)
3966 .map(|buffer| buffer.upgrade(cx).unwrap())
3967 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3968 let language = buffer.read(cx).language();
3969 let completion = language::proto::deserialize_completion(
3970 envelope
3971 .payload
3972 .completion
3973 .ok_or_else(|| anyhow!("invalid completion"))?,
3974 language,
3975 )?;
3976 Ok::<_, anyhow::Error>(
3977 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3978 )
3979 })?;
3980
3981 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3982 transaction: apply_additional_edits
3983 .await?
3984 .as_ref()
3985 .map(language::proto::serialize_transaction),
3986 })
3987 }
3988
3989 async fn handle_get_code_actions(
3990 this: ModelHandle<Self>,
3991 envelope: TypedEnvelope<proto::GetCodeActions>,
3992 _: Arc<Client>,
3993 mut cx: AsyncAppContext,
3994 ) -> Result<proto::GetCodeActionsResponse> {
3995 let start = envelope
3996 .payload
3997 .start
3998 .and_then(language::proto::deserialize_anchor)
3999 .ok_or_else(|| anyhow!("invalid start"))?;
4000 let end = envelope
4001 .payload
4002 .end
4003 .and_then(language::proto::deserialize_anchor)
4004 .ok_or_else(|| anyhow!("invalid end"))?;
4005 let buffer = this.update(&mut cx, |this, cx| {
4006 this.opened_buffers
4007 .get(&envelope.payload.buffer_id)
4008 .map(|buffer| buffer.upgrade(cx).unwrap())
4009 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4010 })?;
4011 buffer
4012 .update(&mut cx, |buffer, _| {
4013 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4014 })
4015 .await;
4016
4017 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4018 let code_actions = this.update(&mut cx, |this, cx| {
4019 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4020 })?;
4021
4022 Ok(proto::GetCodeActionsResponse {
4023 actions: code_actions
4024 .await?
4025 .iter()
4026 .map(language::proto::serialize_code_action)
4027 .collect(),
4028 version: serialize_version(&version),
4029 })
4030 }
4031
4032 async fn handle_apply_code_action(
4033 this: ModelHandle<Self>,
4034 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4035 _: Arc<Client>,
4036 mut cx: AsyncAppContext,
4037 ) -> Result<proto::ApplyCodeActionResponse> {
4038 let sender_id = envelope.original_sender_id()?;
4039 let action = language::proto::deserialize_code_action(
4040 envelope
4041 .payload
4042 .action
4043 .ok_or_else(|| anyhow!("invalid action"))?,
4044 )?;
4045 let apply_code_action = this.update(&mut cx, |this, cx| {
4046 let buffer = this
4047 .opened_buffers
4048 .get(&envelope.payload.buffer_id)
4049 .map(|buffer| buffer.upgrade(cx).unwrap())
4050 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4051 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4052 })?;
4053
4054 let project_transaction = apply_code_action.await?;
4055 let project_transaction = this.update(&mut cx, |this, cx| {
4056 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4057 });
4058 Ok(proto::ApplyCodeActionResponse {
4059 transaction: Some(project_transaction),
4060 })
4061 }
4062
4063 async fn handle_lsp_command<T: LspCommand>(
4064 this: ModelHandle<Self>,
4065 envelope: TypedEnvelope<T::ProtoRequest>,
4066 _: Arc<Client>,
4067 mut cx: AsyncAppContext,
4068 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4069 where
4070 <T::LspRequest as lsp::request::Request>::Result: Send,
4071 {
4072 let sender_id = envelope.original_sender_id()?;
4073 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4074 let buffer_handle = this.read_with(&cx, |this, _| {
4075 this.opened_buffers
4076 .get(&buffer_id)
4077 .and_then(|buffer| buffer.upgrade(&cx))
4078 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4079 })?;
4080 let request = T::from_proto(
4081 envelope.payload,
4082 this.clone(),
4083 buffer_handle.clone(),
4084 cx.clone(),
4085 )
4086 .await?;
4087 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4088 let response = this
4089 .update(&mut cx, |this, cx| {
4090 this.request_lsp(buffer_handle, request, cx)
4091 })
4092 .await?;
4093 this.update(&mut cx, |this, cx| {
4094 Ok(T::response_to_proto(
4095 response,
4096 this,
4097 sender_id,
4098 &buffer_version,
4099 cx,
4100 ))
4101 })
4102 }
4103
4104 async fn handle_get_project_symbols(
4105 this: ModelHandle<Self>,
4106 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4107 _: Arc<Client>,
4108 mut cx: AsyncAppContext,
4109 ) -> Result<proto::GetProjectSymbolsResponse> {
4110 let symbols = this
4111 .update(&mut cx, |this, cx| {
4112 this.symbols(&envelope.payload.query, cx)
4113 })
4114 .await?;
4115
4116 Ok(proto::GetProjectSymbolsResponse {
4117 symbols: symbols.iter().map(serialize_symbol).collect(),
4118 })
4119 }
4120
4121 async fn handle_search_project(
4122 this: ModelHandle<Self>,
4123 envelope: TypedEnvelope<proto::SearchProject>,
4124 _: Arc<Client>,
4125 mut cx: AsyncAppContext,
4126 ) -> Result<proto::SearchProjectResponse> {
4127 let peer_id = envelope.original_sender_id()?;
4128 let query = SearchQuery::from_proto(envelope.payload)?;
4129 let result = this
4130 .update(&mut cx, |this, cx| this.search(query, cx))
4131 .await?;
4132
4133 this.update(&mut cx, |this, cx| {
4134 let mut locations = Vec::new();
4135 for (buffer, ranges) in result {
4136 for range in ranges {
4137 let start = serialize_anchor(&range.start);
4138 let end = serialize_anchor(&range.end);
4139 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4140 locations.push(proto::Location {
4141 buffer: Some(buffer),
4142 start: Some(start),
4143 end: Some(end),
4144 });
4145 }
4146 }
4147 Ok(proto::SearchProjectResponse { locations })
4148 })
4149 }
4150
4151 async fn handle_open_buffer_for_symbol(
4152 this: ModelHandle<Self>,
4153 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4154 _: Arc<Client>,
4155 mut cx: AsyncAppContext,
4156 ) -> Result<proto::OpenBufferForSymbolResponse> {
4157 let peer_id = envelope.original_sender_id()?;
4158 let symbol = envelope
4159 .payload
4160 .symbol
4161 .ok_or_else(|| anyhow!("invalid symbol"))?;
4162 let symbol = this.read_with(&cx, |this, _| {
4163 let symbol = this.deserialize_symbol(symbol)?;
4164 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4165 if signature == symbol.signature {
4166 Ok(symbol)
4167 } else {
4168 Err(anyhow!("invalid symbol signature"))
4169 }
4170 })?;
4171 let buffer = this
4172 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4173 .await?;
4174
4175 Ok(proto::OpenBufferForSymbolResponse {
4176 buffer: Some(this.update(&mut cx, |this, cx| {
4177 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4178 })),
4179 })
4180 }
4181
4182 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4183 let mut hasher = Sha256::new();
4184 hasher.update(worktree_id.to_proto().to_be_bytes());
4185 hasher.update(path.to_string_lossy().as_bytes());
4186 hasher.update(self.nonce.to_be_bytes());
4187 hasher.finalize().as_slice().try_into().unwrap()
4188 }
4189
4190 async fn handle_open_buffer_by_id(
4191 this: ModelHandle<Self>,
4192 envelope: TypedEnvelope<proto::OpenBufferById>,
4193 _: Arc<Client>,
4194 mut cx: AsyncAppContext,
4195 ) -> Result<proto::OpenBufferResponse> {
4196 let peer_id = envelope.original_sender_id()?;
4197 let buffer = this
4198 .update(&mut cx, |this, cx| {
4199 this.open_buffer_by_id(envelope.payload.id, cx)
4200 })
4201 .await?;
4202 this.update(&mut cx, |this, cx| {
4203 Ok(proto::OpenBufferResponse {
4204 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4205 })
4206 })
4207 }
4208
4209 async fn handle_open_buffer_by_path(
4210 this: ModelHandle<Self>,
4211 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4212 _: Arc<Client>,
4213 mut cx: AsyncAppContext,
4214 ) -> Result<proto::OpenBufferResponse> {
4215 let peer_id = envelope.original_sender_id()?;
4216 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4217 let open_buffer = this.update(&mut cx, |this, cx| {
4218 this.open_buffer(
4219 ProjectPath {
4220 worktree_id,
4221 path: PathBuf::from(envelope.payload.path).into(),
4222 },
4223 cx,
4224 )
4225 });
4226
4227 let buffer = open_buffer.await?;
4228 this.update(&mut cx, |this, cx| {
4229 Ok(proto::OpenBufferResponse {
4230 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4231 })
4232 })
4233 }
4234
4235 fn serialize_project_transaction_for_peer(
4236 &mut self,
4237 project_transaction: ProjectTransaction,
4238 peer_id: PeerId,
4239 cx: &AppContext,
4240 ) -> proto::ProjectTransaction {
4241 let mut serialized_transaction = proto::ProjectTransaction {
4242 buffers: Default::default(),
4243 transactions: Default::default(),
4244 };
4245 for (buffer, transaction) in project_transaction.0 {
4246 serialized_transaction
4247 .buffers
4248 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4249 serialized_transaction
4250 .transactions
4251 .push(language::proto::serialize_transaction(&transaction));
4252 }
4253 serialized_transaction
4254 }
4255
4256 fn deserialize_project_transaction(
4257 &mut self,
4258 message: proto::ProjectTransaction,
4259 push_to_history: bool,
4260 cx: &mut ModelContext<Self>,
4261 ) -> Task<Result<ProjectTransaction>> {
4262 cx.spawn(|this, mut cx| async move {
4263 let mut project_transaction = ProjectTransaction::default();
4264 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4265 let buffer = this
4266 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4267 .await?;
4268 let transaction = language::proto::deserialize_transaction(transaction)?;
4269 project_transaction.0.insert(buffer, transaction);
4270 }
4271
4272 for (buffer, transaction) in &project_transaction.0 {
4273 buffer
4274 .update(&mut cx, |buffer, _| {
4275 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4276 })
4277 .await;
4278
4279 if push_to_history {
4280 buffer.update(&mut cx, |buffer, _| {
4281 buffer.push_transaction(transaction.clone(), Instant::now());
4282 });
4283 }
4284 }
4285
4286 Ok(project_transaction)
4287 })
4288 }
4289
4290 fn serialize_buffer_for_peer(
4291 &mut self,
4292 buffer: &ModelHandle<Buffer>,
4293 peer_id: PeerId,
4294 cx: &AppContext,
4295 ) -> proto::Buffer {
4296 let buffer_id = buffer.read(cx).remote_id();
4297 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4298 if shared_buffers.insert(buffer_id) {
4299 proto::Buffer {
4300 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4301 }
4302 } else {
4303 proto::Buffer {
4304 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4305 }
4306 }
4307 }
4308
4309 fn deserialize_buffer(
4310 &mut self,
4311 buffer: proto::Buffer,
4312 cx: &mut ModelContext<Self>,
4313 ) -> Task<Result<ModelHandle<Buffer>>> {
4314 let replica_id = self.replica_id();
4315
4316 let opened_buffer_tx = self.opened_buffer.0.clone();
4317 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4318 cx.spawn(|this, mut cx| async move {
4319 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4320 proto::buffer::Variant::Id(id) => {
4321 let buffer = loop {
4322 let buffer = this.read_with(&cx, |this, cx| {
4323 this.opened_buffers
4324 .get(&id)
4325 .and_then(|buffer| buffer.upgrade(cx))
4326 });
4327 if let Some(buffer) = buffer {
4328 break buffer;
4329 }
4330 opened_buffer_rx
4331 .next()
4332 .await
4333 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4334 };
4335 Ok(buffer)
4336 }
4337 proto::buffer::Variant::State(mut buffer) => {
4338 let mut buffer_worktree = None;
4339 let mut buffer_file = None;
4340 if let Some(file) = buffer.file.take() {
4341 this.read_with(&cx, |this, cx| {
4342 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4343 let worktree =
4344 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4345 anyhow!("no worktree found for id {}", file.worktree_id)
4346 })?;
4347 buffer_file =
4348 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4349 as Box<dyn language::File>);
4350 buffer_worktree = Some(worktree);
4351 Ok::<_, anyhow::Error>(())
4352 })?;
4353 }
4354
4355 let buffer = cx.add_model(|cx| {
4356 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4357 });
4358
4359 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4360
4361 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4362 Ok(buffer)
4363 }
4364 }
4365 })
4366 }
4367
4368 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4369 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4370 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4371 let start = serialized_symbol
4372 .start
4373 .ok_or_else(|| anyhow!("invalid start"))?;
4374 let end = serialized_symbol
4375 .end
4376 .ok_or_else(|| anyhow!("invalid end"))?;
4377 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4378 let path = PathBuf::from(serialized_symbol.path);
4379 let language = self.languages.select_language(&path);
4380 Ok(Symbol {
4381 source_worktree_id,
4382 worktree_id,
4383 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4384 label: language
4385 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4386 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4387 name: serialized_symbol.name,
4388 path,
4389 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4390 kind,
4391 signature: serialized_symbol
4392 .signature
4393 .try_into()
4394 .map_err(|_| anyhow!("invalid signature"))?,
4395 })
4396 }
4397
4398 async fn handle_buffer_saved(
4399 this: ModelHandle<Self>,
4400 envelope: TypedEnvelope<proto::BufferSaved>,
4401 _: Arc<Client>,
4402 mut cx: AsyncAppContext,
4403 ) -> Result<()> {
4404 let version = deserialize_version(envelope.payload.version);
4405 let mtime = envelope
4406 .payload
4407 .mtime
4408 .ok_or_else(|| anyhow!("missing mtime"))?
4409 .into();
4410
4411 this.update(&mut cx, |this, cx| {
4412 let buffer = this
4413 .opened_buffers
4414 .get(&envelope.payload.buffer_id)
4415 .and_then(|buffer| buffer.upgrade(cx));
4416 if let Some(buffer) = buffer {
4417 buffer.update(cx, |buffer, cx| {
4418 buffer.did_save(version, mtime, None, cx);
4419 });
4420 }
4421 Ok(())
4422 })
4423 }
4424
4425 async fn handle_buffer_reloaded(
4426 this: ModelHandle<Self>,
4427 envelope: TypedEnvelope<proto::BufferReloaded>,
4428 _: Arc<Client>,
4429 mut cx: AsyncAppContext,
4430 ) -> Result<()> {
4431 let payload = envelope.payload.clone();
4432 let version = deserialize_version(payload.version);
4433 let mtime = payload
4434 .mtime
4435 .ok_or_else(|| anyhow!("missing mtime"))?
4436 .into();
4437 this.update(&mut cx, |this, cx| {
4438 let buffer = this
4439 .opened_buffers
4440 .get(&payload.buffer_id)
4441 .and_then(|buffer| buffer.upgrade(cx));
4442 if let Some(buffer) = buffer {
4443 buffer.update(cx, |buffer, cx| {
4444 buffer.did_reload(version, mtime, cx);
4445 });
4446 }
4447 Ok(())
4448 })
4449 }
4450
4451 pub fn match_paths<'a>(
4452 &self,
4453 query: &'a str,
4454 include_ignored: bool,
4455 smart_case: bool,
4456 max_results: usize,
4457 cancel_flag: &'a AtomicBool,
4458 cx: &AppContext,
4459 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4460 let worktrees = self
4461 .worktrees(cx)
4462 .filter(|worktree| worktree.read(cx).is_visible())
4463 .collect::<Vec<_>>();
4464 let include_root_name = worktrees.len() > 1;
4465 let candidate_sets = worktrees
4466 .into_iter()
4467 .map(|worktree| CandidateSet {
4468 snapshot: worktree.read(cx).snapshot(),
4469 include_ignored,
4470 include_root_name,
4471 })
4472 .collect::<Vec<_>>();
4473
4474 let background = cx.background().clone();
4475 async move {
4476 fuzzy::match_paths(
4477 candidate_sets.as_slice(),
4478 query,
4479 smart_case,
4480 max_results,
4481 cancel_flag,
4482 background,
4483 )
4484 .await
4485 }
4486 }
4487
4488 fn edits_from_lsp(
4489 &mut self,
4490 buffer: &ModelHandle<Buffer>,
4491 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4492 version: Option<i32>,
4493 cx: &mut ModelContext<Self>,
4494 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4495 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4496 cx.background().spawn(async move {
4497 let snapshot = snapshot?;
4498 let mut lsp_edits = lsp_edits
4499 .into_iter()
4500 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4501 .peekable();
4502
4503 let mut edits = Vec::new();
4504 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4505 // Combine any LSP edits that are adjacent.
4506 //
4507 // Also, combine LSP edits that are separated from each other by only
4508 // a newline. This is important because for some code actions,
4509 // Rust-analyzer rewrites the entire buffer via a series of edits that
4510 // are separated by unchanged newline characters.
4511 //
4512 // In order for the diffing logic below to work properly, any edits that
4513 // cancel each other out must be combined into one.
4514 while let Some((next_range, next_text)) = lsp_edits.peek() {
4515 if next_range.start > range.end {
4516 if next_range.start.row > range.end.row + 1
4517 || next_range.start.column > 0
4518 || snapshot.clip_point_utf16(
4519 PointUtf16::new(range.end.row, u32::MAX),
4520 Bias::Left,
4521 ) > range.end
4522 {
4523 break;
4524 }
4525 new_text.push('\n');
4526 }
4527 range.end = next_range.end;
4528 new_text.push_str(&next_text);
4529 lsp_edits.next();
4530 }
4531
4532 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4533 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4534 {
4535 return Err(anyhow!("invalid edits received from language server"));
4536 }
4537
4538 // For multiline edits, perform a diff of the old and new text so that
4539 // we can identify the changes more precisely, preserving the locations
4540 // of any anchors positioned in the unchanged regions.
4541 if range.end.row > range.start.row {
4542 let mut offset = range.start.to_offset(&snapshot);
4543 let old_text = snapshot.text_for_range(range).collect::<String>();
4544
4545 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4546 let mut moved_since_edit = true;
4547 for change in diff.iter_all_changes() {
4548 let tag = change.tag();
4549 let value = change.value();
4550 match tag {
4551 ChangeTag::Equal => {
4552 offset += value.len();
4553 moved_since_edit = true;
4554 }
4555 ChangeTag::Delete => {
4556 let start = snapshot.anchor_after(offset);
4557 let end = snapshot.anchor_before(offset + value.len());
4558 if moved_since_edit {
4559 edits.push((start..end, String::new()));
4560 } else {
4561 edits.last_mut().unwrap().0.end = end;
4562 }
4563 offset += value.len();
4564 moved_since_edit = false;
4565 }
4566 ChangeTag::Insert => {
4567 if moved_since_edit {
4568 let anchor = snapshot.anchor_after(offset);
4569 edits.push((anchor.clone()..anchor, value.to_string()));
4570 } else {
4571 edits.last_mut().unwrap().1.push_str(value);
4572 }
4573 moved_since_edit = false;
4574 }
4575 }
4576 }
4577 } else if range.end == range.start {
4578 let anchor = snapshot.anchor_after(range.start);
4579 edits.push((anchor.clone()..anchor, new_text));
4580 } else {
4581 let edit_start = snapshot.anchor_after(range.start);
4582 let edit_end = snapshot.anchor_before(range.end);
4583 edits.push((edit_start..edit_end, new_text));
4584 }
4585 }
4586
4587 Ok(edits)
4588 })
4589 }
4590
4591 fn buffer_snapshot_for_lsp_version(
4592 &mut self,
4593 buffer: &ModelHandle<Buffer>,
4594 version: Option<i32>,
4595 cx: &AppContext,
4596 ) -> Result<TextBufferSnapshot> {
4597 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4598
4599 if let Some(version) = version {
4600 let buffer_id = buffer.read(cx).remote_id();
4601 let snapshots = self
4602 .buffer_snapshots
4603 .get_mut(&buffer_id)
4604 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4605 let mut found_snapshot = None;
4606 snapshots.retain(|(snapshot_version, snapshot)| {
4607 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4608 false
4609 } else {
4610 if *snapshot_version == version {
4611 found_snapshot = Some(snapshot.clone());
4612 }
4613 true
4614 }
4615 });
4616
4617 found_snapshot.ok_or_else(|| {
4618 anyhow!(
4619 "snapshot not found for buffer {} at version {}",
4620 buffer_id,
4621 version
4622 )
4623 })
4624 } else {
4625 Ok((buffer.read(cx)).text_snapshot())
4626 }
4627 }
4628
4629 fn language_server_for_buffer(
4630 &self,
4631 buffer: &Buffer,
4632 cx: &AppContext,
4633 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4634 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4635 let worktree_id = file.worktree_id(cx);
4636 self.language_servers
4637 .get(&(worktree_id, language.lsp_adapter()?.name()))
4638 } else {
4639 None
4640 }
4641 }
4642}
4643
4644impl WorktreeHandle {
4645 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4646 match self {
4647 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4648 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4649 }
4650 }
4651}
4652
4653impl OpenBuffer {
4654 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4655 match self {
4656 OpenBuffer::Strong(handle) => Some(handle.clone()),
4657 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4658 OpenBuffer::Loading(_) => None,
4659 }
4660 }
4661}
4662
4663struct CandidateSet {
4664 snapshot: Snapshot,
4665 include_ignored: bool,
4666 include_root_name: bool,
4667}
4668
4669impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4670 type Candidates = CandidateSetIter<'a>;
4671
4672 fn id(&self) -> usize {
4673 self.snapshot.id().to_usize()
4674 }
4675
4676 fn len(&self) -> usize {
4677 if self.include_ignored {
4678 self.snapshot.file_count()
4679 } else {
4680 self.snapshot.visible_file_count()
4681 }
4682 }
4683
4684 fn prefix(&self) -> Arc<str> {
4685 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4686 self.snapshot.root_name().into()
4687 } else if self.include_root_name {
4688 format!("{}/", self.snapshot.root_name()).into()
4689 } else {
4690 "".into()
4691 }
4692 }
4693
4694 fn candidates(&'a self, start: usize) -> Self::Candidates {
4695 CandidateSetIter {
4696 traversal: self.snapshot.files(self.include_ignored, start),
4697 }
4698 }
4699}
4700
4701struct CandidateSetIter<'a> {
4702 traversal: Traversal<'a>,
4703}
4704
4705impl<'a> Iterator for CandidateSetIter<'a> {
4706 type Item = PathMatchCandidate<'a>;
4707
4708 fn next(&mut self) -> Option<Self::Item> {
4709 self.traversal.next().map(|entry| {
4710 if let EntryKind::File(char_bag) = entry.kind {
4711 PathMatchCandidate {
4712 path: &entry.path,
4713 char_bag,
4714 }
4715 } else {
4716 unreachable!()
4717 }
4718 })
4719 }
4720}
4721
4722impl Entity for Project {
4723 type Event = Event;
4724
4725 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4726 match &self.client_state {
4727 ProjectClientState::Local { remote_id_rx, .. } => {
4728 if let Some(project_id) = *remote_id_rx.borrow() {
4729 self.client
4730 .send(proto::UnregisterProject { project_id })
4731 .log_err();
4732 }
4733 }
4734 ProjectClientState::Remote { remote_id, .. } => {
4735 self.client
4736 .send(proto::LeaveProject {
4737 project_id: *remote_id,
4738 })
4739 .log_err();
4740 }
4741 }
4742 }
4743
4744 fn app_will_quit(
4745 &mut self,
4746 _: &mut MutableAppContext,
4747 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4748 let shutdown_futures = self
4749 .language_servers
4750 .drain()
4751 .filter_map(|(_, (_, server))| server.shutdown())
4752 .collect::<Vec<_>>();
4753 Some(
4754 async move {
4755 futures::future::join_all(shutdown_futures).await;
4756 }
4757 .boxed(),
4758 )
4759 }
4760}
4761
4762impl Collaborator {
4763 fn from_proto(
4764 message: proto::Collaborator,
4765 user_store: &ModelHandle<UserStore>,
4766 cx: &mut AsyncAppContext,
4767 ) -> impl Future<Output = Result<Self>> {
4768 let user = user_store.update(cx, |user_store, cx| {
4769 user_store.fetch_user(message.user_id, cx)
4770 });
4771
4772 async move {
4773 Ok(Self {
4774 peer_id: PeerId(message.peer_id),
4775 user: user.await?,
4776 replica_id: message.replica_id as ReplicaId,
4777 })
4778 }
4779 }
4780}
4781
4782impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4783 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4784 Self {
4785 worktree_id,
4786 path: path.as_ref().into(),
4787 }
4788 }
4789}
4790
4791impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4792 fn from(options: lsp::CreateFileOptions) -> Self {
4793 Self {
4794 overwrite: options.overwrite.unwrap_or(false),
4795 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4796 }
4797 }
4798}
4799
4800impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4801 fn from(options: lsp::RenameFileOptions) -> Self {
4802 Self {
4803 overwrite: options.overwrite.unwrap_or(false),
4804 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4805 }
4806 }
4807}
4808
4809impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4810 fn from(options: lsp::DeleteFileOptions) -> Self {
4811 Self {
4812 recursive: options.recursive.unwrap_or(false),
4813 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4814 }
4815 }
4816}
4817
4818fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4819 proto::Symbol {
4820 source_worktree_id: symbol.source_worktree_id.to_proto(),
4821 worktree_id: symbol.worktree_id.to_proto(),
4822 language_server_name: symbol.language_server_name.0.to_string(),
4823 name: symbol.name.clone(),
4824 kind: unsafe { mem::transmute(symbol.kind) },
4825 path: symbol.path.to_string_lossy().to_string(),
4826 start: Some(proto::Point {
4827 row: symbol.range.start.row,
4828 column: symbol.range.start.column,
4829 }),
4830 end: Some(proto::Point {
4831 row: symbol.range.end.row,
4832 column: symbol.range.end.column,
4833 }),
4834 signature: symbol.signature.to_vec(),
4835 }
4836}
4837
4838fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4839 let mut path_components = path.components();
4840 let mut base_components = base.components();
4841 let mut components: Vec<Component> = Vec::new();
4842 loop {
4843 match (path_components.next(), base_components.next()) {
4844 (None, None) => break,
4845 (Some(a), None) => {
4846 components.push(a);
4847 components.extend(path_components.by_ref());
4848 break;
4849 }
4850 (None, _) => components.push(Component::ParentDir),
4851 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4852 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4853 (Some(a), Some(_)) => {
4854 components.push(Component::ParentDir);
4855 for _ in base_components {
4856 components.push(Component::ParentDir);
4857 }
4858 components.push(a);
4859 components.extend(path_components.by_ref());
4860 break;
4861 }
4862 }
4863 }
4864 components.iter().map(|c| c.as_os_str()).collect()
4865}
4866
4867impl Item for Buffer {
4868 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4869 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4870 }
4871}
4872
4873#[cfg(test)]
4874mod tests {
4875 use super::{Event, *};
4876 use fs::RealFs;
4877 use futures::{future, StreamExt};
4878 use gpui::test::subscribe;
4879 use language::{
4880 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4881 ToPoint,
4882 };
4883 use lsp::Url;
4884 use serde_json::json;
4885 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4886 use unindent::Unindent as _;
4887 use util::{assert_set_eq, test::temp_tree};
4888 use worktree::WorktreeHandle as _;
4889
4890 #[gpui::test]
4891 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4892 let dir = temp_tree(json!({
4893 "root": {
4894 "apple": "",
4895 "banana": {
4896 "carrot": {
4897 "date": "",
4898 "endive": "",
4899 }
4900 },
4901 "fennel": {
4902 "grape": "",
4903 }
4904 }
4905 }));
4906
4907 let root_link_path = dir.path().join("root_link");
4908 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4909 unix::fs::symlink(
4910 &dir.path().join("root/fennel"),
4911 &dir.path().join("root/finnochio"),
4912 )
4913 .unwrap();
4914
4915 let project = Project::test(Arc::new(RealFs), cx);
4916
4917 let (tree, _) = project
4918 .update(cx, |project, cx| {
4919 project.find_or_create_local_worktree(&root_link_path, true, cx)
4920 })
4921 .await
4922 .unwrap();
4923
4924 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4925 .await;
4926 cx.read(|cx| {
4927 let tree = tree.read(cx);
4928 assert_eq!(tree.file_count(), 5);
4929 assert_eq!(
4930 tree.inode_for_path("fennel/grape"),
4931 tree.inode_for_path("finnochio/grape")
4932 );
4933 });
4934
4935 let cancel_flag = Default::default();
4936 let results = project
4937 .read_with(cx, |project, cx| {
4938 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4939 })
4940 .await;
4941 assert_eq!(
4942 results
4943 .into_iter()
4944 .map(|result| result.path)
4945 .collect::<Vec<Arc<Path>>>(),
4946 vec![
4947 PathBuf::from("banana/carrot/date").into(),
4948 PathBuf::from("banana/carrot/endive").into(),
4949 ]
4950 );
4951 }
4952
4953 #[gpui::test]
4954 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4955 cx.foreground().forbid_parking();
4956
4957 let mut rust_language = Language::new(
4958 LanguageConfig {
4959 name: "Rust".into(),
4960 path_suffixes: vec!["rs".to_string()],
4961 ..Default::default()
4962 },
4963 Some(tree_sitter_rust::language()),
4964 );
4965 let mut json_language = Language::new(
4966 LanguageConfig {
4967 name: "JSON".into(),
4968 path_suffixes: vec!["json".to_string()],
4969 ..Default::default()
4970 },
4971 None,
4972 );
4973 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4974 name: "the-rust-language-server",
4975 capabilities: lsp::ServerCapabilities {
4976 completion_provider: Some(lsp::CompletionOptions {
4977 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4978 ..Default::default()
4979 }),
4980 ..Default::default()
4981 },
4982 ..Default::default()
4983 });
4984 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4985 name: "the-json-language-server",
4986 capabilities: lsp::ServerCapabilities {
4987 completion_provider: Some(lsp::CompletionOptions {
4988 trigger_characters: Some(vec![":".to_string()]),
4989 ..Default::default()
4990 }),
4991 ..Default::default()
4992 },
4993 ..Default::default()
4994 });
4995
4996 let fs = FakeFs::new(cx.background());
4997 fs.insert_tree(
4998 "/the-root",
4999 json!({
5000 "test.rs": "const A: i32 = 1;",
5001 "test2.rs": "",
5002 "Cargo.toml": "a = 1",
5003 "package.json": "{\"a\": 1}",
5004 }),
5005 )
5006 .await;
5007
5008 let project = Project::test(fs.clone(), cx);
5009 project.update(cx, |project, _| {
5010 project.languages.add(Arc::new(rust_language));
5011 project.languages.add(Arc::new(json_language));
5012 });
5013
5014 let worktree_id = project
5015 .update(cx, |project, cx| {
5016 project.find_or_create_local_worktree("/the-root", true, cx)
5017 })
5018 .await
5019 .unwrap()
5020 .0
5021 .read_with(cx, |tree, _| tree.id());
5022
5023 // Open a buffer without an associated language server.
5024 let toml_buffer = project
5025 .update(cx, |project, cx| {
5026 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5027 })
5028 .await
5029 .unwrap();
5030
5031 // Open a buffer with an associated language server.
5032 let rust_buffer = project
5033 .update(cx, |project, cx| {
5034 project.open_buffer((worktree_id, "test.rs"), cx)
5035 })
5036 .await
5037 .unwrap();
5038
5039 // A server is started up, and it is notified about Rust files.
5040 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5041 assert_eq!(
5042 fake_rust_server
5043 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5044 .await
5045 .text_document,
5046 lsp::TextDocumentItem {
5047 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5048 version: 0,
5049 text: "const A: i32 = 1;".to_string(),
5050 language_id: Default::default()
5051 }
5052 );
5053
5054 // The buffer is configured based on the language server's capabilities.
5055 rust_buffer.read_with(cx, |buffer, _| {
5056 assert_eq!(
5057 buffer.completion_triggers(),
5058 &[".".to_string(), "::".to_string()]
5059 );
5060 });
5061 toml_buffer.read_with(cx, |buffer, _| {
5062 assert!(buffer.completion_triggers().is_empty());
5063 });
5064
5065 // Edit a buffer. The changes are reported to the language server.
5066 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5067 assert_eq!(
5068 fake_rust_server
5069 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5070 .await
5071 .text_document,
5072 lsp::VersionedTextDocumentIdentifier::new(
5073 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5074 1
5075 )
5076 );
5077
5078 // Open a third buffer with a different associated language server.
5079 let json_buffer = project
5080 .update(cx, |project, cx| {
5081 project.open_buffer((worktree_id, "package.json"), cx)
5082 })
5083 .await
5084 .unwrap();
5085
5086 // A json language server is started up and is only notified about the json buffer.
5087 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5088 assert_eq!(
5089 fake_json_server
5090 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5091 .await
5092 .text_document,
5093 lsp::TextDocumentItem {
5094 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5095 version: 0,
5096 text: "{\"a\": 1}".to_string(),
5097 language_id: Default::default()
5098 }
5099 );
5100
5101 // This buffer is configured based on the second language server's
5102 // capabilities.
5103 json_buffer.read_with(cx, |buffer, _| {
5104 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5105 });
5106
5107 // When opening another buffer whose language server is already running,
5108 // it is also configured based on the existing language server's capabilities.
5109 let rust_buffer2 = project
5110 .update(cx, |project, cx| {
5111 project.open_buffer((worktree_id, "test2.rs"), cx)
5112 })
5113 .await
5114 .unwrap();
5115 rust_buffer2.read_with(cx, |buffer, _| {
5116 assert_eq!(
5117 buffer.completion_triggers(),
5118 &[".".to_string(), "::".to_string()]
5119 );
5120 });
5121
5122 // Changes are reported only to servers matching the buffer's language.
5123 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5125 assert_eq!(
5126 fake_rust_server
5127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5128 .await
5129 .text_document,
5130 lsp::VersionedTextDocumentIdentifier::new(
5131 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5132 1
5133 )
5134 );
5135
5136 // Save notifications are reported to all servers.
5137 toml_buffer
5138 .update(cx, |buffer, cx| buffer.save(cx))
5139 .await
5140 .unwrap();
5141 assert_eq!(
5142 fake_rust_server
5143 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5144 .await
5145 .text_document,
5146 lsp::TextDocumentIdentifier::new(
5147 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5148 )
5149 );
5150 assert_eq!(
5151 fake_json_server
5152 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5153 .await
5154 .text_document,
5155 lsp::TextDocumentIdentifier::new(
5156 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5157 )
5158 );
5159
5160 // Renames are reported only to servers matching the buffer's language.
5161 fs.rename(
5162 Path::new("/the-root/test2.rs"),
5163 Path::new("/the-root/test3.rs"),
5164 Default::default(),
5165 )
5166 .await
5167 .unwrap();
5168 assert_eq!(
5169 fake_rust_server
5170 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5171 .await
5172 .text_document,
5173 lsp::TextDocumentIdentifier::new(
5174 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5175 ),
5176 );
5177 assert_eq!(
5178 fake_rust_server
5179 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5180 .await
5181 .text_document,
5182 lsp::TextDocumentItem {
5183 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5184 version: 0,
5185 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5186 language_id: Default::default()
5187 },
5188 );
5189
5190 rust_buffer2.update(cx, |buffer, cx| {
5191 buffer.update_diagnostics(
5192 DiagnosticSet::from_sorted_entries(
5193 vec![DiagnosticEntry {
5194 diagnostic: Default::default(),
5195 range: Anchor::MIN..Anchor::MAX,
5196 }],
5197 &buffer.snapshot(),
5198 ),
5199 cx,
5200 );
5201 assert_eq!(
5202 buffer
5203 .snapshot()
5204 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5205 .count(),
5206 1
5207 );
5208 });
5209
5210 // When the rename changes the extension of the file, the buffer gets closed on the old
5211 // language server and gets opened on the new one.
5212 fs.rename(
5213 Path::new("/the-root/test3.rs"),
5214 Path::new("/the-root/test3.json"),
5215 Default::default(),
5216 )
5217 .await
5218 .unwrap();
5219 assert_eq!(
5220 fake_rust_server
5221 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5222 .await
5223 .text_document,
5224 lsp::TextDocumentIdentifier::new(
5225 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5226 ),
5227 );
5228 assert_eq!(
5229 fake_json_server
5230 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5231 .await
5232 .text_document,
5233 lsp::TextDocumentItem {
5234 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5235 version: 0,
5236 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5237 language_id: Default::default()
5238 },
5239 );
5240 // We clear the diagnostics, since the language has changed.
5241 rust_buffer2.read_with(cx, |buffer, _| {
5242 assert_eq!(
5243 buffer
5244 .snapshot()
5245 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5246 .count(),
5247 0
5248 );
5249 });
5250
5251 // The renamed file's version resets after changing language server.
5252 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5253 assert_eq!(
5254 fake_json_server
5255 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5256 .await
5257 .text_document,
5258 lsp::VersionedTextDocumentIdentifier::new(
5259 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5260 1
5261 )
5262 );
5263
5264 // Restart language servers
5265 project.update(cx, |project, cx| {
5266 project.restart_language_servers_for_buffers(
5267 vec![rust_buffer.clone(), json_buffer.clone()],
5268 cx,
5269 );
5270 });
5271
5272 let mut rust_shutdown_requests = fake_rust_server
5273 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5274 let mut json_shutdown_requests = fake_json_server
5275 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5276 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5277
5278 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5279 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5280
5281 // Ensure rust document is reopened in new rust language server
5282 assert_eq!(
5283 fake_rust_server
5284 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5285 .await
5286 .text_document,
5287 lsp::TextDocumentItem {
5288 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5289 version: 1,
5290 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5291 language_id: Default::default()
5292 }
5293 );
5294
5295 // Ensure json documents are reopened in new json language server
5296 assert_set_eq!(
5297 [
5298 fake_json_server
5299 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5300 .await
5301 .text_document,
5302 fake_json_server
5303 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5304 .await
5305 .text_document,
5306 ],
5307 [
5308 lsp::TextDocumentItem {
5309 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5310 version: 0,
5311 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5312 language_id: Default::default()
5313 },
5314 lsp::TextDocumentItem {
5315 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5316 version: 1,
5317 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5318 language_id: Default::default()
5319 }
5320 ]
5321 );
5322
5323 // Close notifications are reported only to servers matching the buffer's language.
5324 cx.update(|_| drop(json_buffer));
5325 let close_message = lsp::DidCloseTextDocumentParams {
5326 text_document: lsp::TextDocumentIdentifier::new(
5327 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5328 ),
5329 };
5330 assert_eq!(
5331 fake_json_server
5332 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5333 .await,
5334 close_message,
5335 );
5336 }
5337
5338 #[gpui::test]
5339 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5340 cx.foreground().forbid_parking();
5341
5342 let progress_token = "the-progress-token";
5343 let mut language = Language::new(
5344 LanguageConfig {
5345 name: "Rust".into(),
5346 path_suffixes: vec!["rs".to_string()],
5347 ..Default::default()
5348 },
5349 Some(tree_sitter_rust::language()),
5350 );
5351 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5352 disk_based_diagnostics_progress_token: Some(progress_token),
5353 disk_based_diagnostics_sources: &["disk"],
5354 ..Default::default()
5355 });
5356
5357 let fs = FakeFs::new(cx.background());
5358 fs.insert_tree(
5359 "/dir",
5360 json!({
5361 "a.rs": "fn a() { A }",
5362 "b.rs": "const y: i32 = 1",
5363 }),
5364 )
5365 .await;
5366
5367 let project = Project::test(fs, cx);
5368 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5369
5370 let (tree, _) = project
5371 .update(cx, |project, cx| {
5372 project.find_or_create_local_worktree("/dir", true, cx)
5373 })
5374 .await
5375 .unwrap();
5376 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5377
5378 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5379 .await;
5380
5381 // Cause worktree to start the fake language server
5382 let _buffer = project
5383 .update(cx, |project, cx| {
5384 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5385 })
5386 .await
5387 .unwrap();
5388
5389 let mut events = subscribe(&project, cx);
5390
5391 let mut fake_server = fake_servers.next().await.unwrap();
5392 fake_server.start_progress(progress_token).await;
5393 assert_eq!(
5394 events.next().await.unwrap(),
5395 Event::DiskBasedDiagnosticsStarted
5396 );
5397
5398 fake_server.start_progress(progress_token).await;
5399 fake_server.end_progress(progress_token).await;
5400 fake_server.start_progress(progress_token).await;
5401
5402 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5403 lsp::PublishDiagnosticsParams {
5404 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5405 version: None,
5406 diagnostics: vec![lsp::Diagnostic {
5407 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5408 severity: Some(lsp::DiagnosticSeverity::ERROR),
5409 message: "undefined variable 'A'".to_string(),
5410 ..Default::default()
5411 }],
5412 },
5413 );
5414 assert_eq!(
5415 events.next().await.unwrap(),
5416 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5417 );
5418
5419 fake_server.end_progress(progress_token).await;
5420 fake_server.end_progress(progress_token).await;
5421 assert_eq!(
5422 events.next().await.unwrap(),
5423 Event::DiskBasedDiagnosticsUpdated
5424 );
5425 assert_eq!(
5426 events.next().await.unwrap(),
5427 Event::DiskBasedDiagnosticsFinished
5428 );
5429
5430 let buffer = project
5431 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5432 .await
5433 .unwrap();
5434
5435 buffer.read_with(cx, |buffer, _| {
5436 let snapshot = buffer.snapshot();
5437 let diagnostics = snapshot
5438 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5439 .collect::<Vec<_>>();
5440 assert_eq!(
5441 diagnostics,
5442 &[DiagnosticEntry {
5443 range: Point::new(0, 9)..Point::new(0, 10),
5444 diagnostic: Diagnostic {
5445 severity: lsp::DiagnosticSeverity::ERROR,
5446 message: "undefined variable 'A'".to_string(),
5447 group_id: 0,
5448 is_primary: true,
5449 ..Default::default()
5450 }
5451 }]
5452 )
5453 });
5454 }
5455
5456 #[gpui::test]
5457 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5458 cx.foreground().forbid_parking();
5459
5460 let mut language = Language::new(
5461 LanguageConfig {
5462 name: "Rust".into(),
5463 path_suffixes: vec!["rs".to_string()],
5464 ..Default::default()
5465 },
5466 Some(tree_sitter_rust::language()),
5467 );
5468 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5469 disk_based_diagnostics_sources: &["disk"],
5470 ..Default::default()
5471 });
5472
5473 let text = "
5474 fn a() { A }
5475 fn b() { BB }
5476 fn c() { CCC }
5477 "
5478 .unindent();
5479
5480 let fs = FakeFs::new(cx.background());
5481 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5482
5483 let project = Project::test(fs, cx);
5484 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5485
5486 let worktree_id = project
5487 .update(cx, |project, cx| {
5488 project.find_or_create_local_worktree("/dir", true, cx)
5489 })
5490 .await
5491 .unwrap()
5492 .0
5493 .read_with(cx, |tree, _| tree.id());
5494
5495 let buffer = project
5496 .update(cx, |project, cx| {
5497 project.open_buffer((worktree_id, "a.rs"), cx)
5498 })
5499 .await
5500 .unwrap();
5501
5502 let mut fake_server = fake_servers.next().await.unwrap();
5503 let open_notification = fake_server
5504 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5505 .await;
5506
5507 // Edit the buffer, moving the content down
5508 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5509 let change_notification_1 = fake_server
5510 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5511 .await;
5512 assert!(
5513 change_notification_1.text_document.version > open_notification.text_document.version
5514 );
5515
5516 // Report some diagnostics for the initial version of the buffer
5517 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5518 lsp::PublishDiagnosticsParams {
5519 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5520 version: Some(open_notification.text_document.version),
5521 diagnostics: vec![
5522 lsp::Diagnostic {
5523 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5524 severity: Some(DiagnosticSeverity::ERROR),
5525 message: "undefined variable 'A'".to_string(),
5526 source: Some("disk".to_string()),
5527 ..Default::default()
5528 },
5529 lsp::Diagnostic {
5530 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5531 severity: Some(DiagnosticSeverity::ERROR),
5532 message: "undefined variable 'BB'".to_string(),
5533 source: Some("disk".to_string()),
5534 ..Default::default()
5535 },
5536 lsp::Diagnostic {
5537 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5538 severity: Some(DiagnosticSeverity::ERROR),
5539 source: Some("disk".to_string()),
5540 message: "undefined variable 'CCC'".to_string(),
5541 ..Default::default()
5542 },
5543 ],
5544 },
5545 );
5546
5547 // The diagnostics have moved down since they were created.
5548 buffer.next_notification(cx).await;
5549 buffer.read_with(cx, |buffer, _| {
5550 assert_eq!(
5551 buffer
5552 .snapshot()
5553 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5554 .collect::<Vec<_>>(),
5555 &[
5556 DiagnosticEntry {
5557 range: Point::new(3, 9)..Point::new(3, 11),
5558 diagnostic: Diagnostic {
5559 severity: DiagnosticSeverity::ERROR,
5560 message: "undefined variable 'BB'".to_string(),
5561 is_disk_based: true,
5562 group_id: 1,
5563 is_primary: true,
5564 ..Default::default()
5565 },
5566 },
5567 DiagnosticEntry {
5568 range: Point::new(4, 9)..Point::new(4, 12),
5569 diagnostic: Diagnostic {
5570 severity: DiagnosticSeverity::ERROR,
5571 message: "undefined variable 'CCC'".to_string(),
5572 is_disk_based: true,
5573 group_id: 2,
5574 is_primary: true,
5575 ..Default::default()
5576 }
5577 }
5578 ]
5579 );
5580 assert_eq!(
5581 chunks_with_diagnostics(buffer, 0..buffer.len()),
5582 [
5583 ("\n\nfn a() { ".to_string(), None),
5584 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5585 (" }\nfn b() { ".to_string(), None),
5586 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5587 (" }\nfn c() { ".to_string(), None),
5588 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5589 (" }\n".to_string(), None),
5590 ]
5591 );
5592 assert_eq!(
5593 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5594 [
5595 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5596 (" }\nfn c() { ".to_string(), None),
5597 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5598 ]
5599 );
5600 });
5601
5602 // Ensure overlapping diagnostics are highlighted correctly.
5603 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5604 lsp::PublishDiagnosticsParams {
5605 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5606 version: Some(open_notification.text_document.version),
5607 diagnostics: vec![
5608 lsp::Diagnostic {
5609 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5610 severity: Some(DiagnosticSeverity::ERROR),
5611 message: "undefined variable 'A'".to_string(),
5612 source: Some("disk".to_string()),
5613 ..Default::default()
5614 },
5615 lsp::Diagnostic {
5616 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5617 severity: Some(DiagnosticSeverity::WARNING),
5618 message: "unreachable statement".to_string(),
5619 source: Some("disk".to_string()),
5620 ..Default::default()
5621 },
5622 ],
5623 },
5624 );
5625
5626 buffer.next_notification(cx).await;
5627 buffer.read_with(cx, |buffer, _| {
5628 assert_eq!(
5629 buffer
5630 .snapshot()
5631 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5632 .collect::<Vec<_>>(),
5633 &[
5634 DiagnosticEntry {
5635 range: Point::new(2, 9)..Point::new(2, 12),
5636 diagnostic: Diagnostic {
5637 severity: DiagnosticSeverity::WARNING,
5638 message: "unreachable statement".to_string(),
5639 is_disk_based: true,
5640 group_id: 1,
5641 is_primary: true,
5642 ..Default::default()
5643 }
5644 },
5645 DiagnosticEntry {
5646 range: Point::new(2, 9)..Point::new(2, 10),
5647 diagnostic: Diagnostic {
5648 severity: DiagnosticSeverity::ERROR,
5649 message: "undefined variable 'A'".to_string(),
5650 is_disk_based: true,
5651 group_id: 0,
5652 is_primary: true,
5653 ..Default::default()
5654 },
5655 }
5656 ]
5657 );
5658 assert_eq!(
5659 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5660 [
5661 ("fn a() { ".to_string(), None),
5662 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5663 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5664 ("\n".to_string(), None),
5665 ]
5666 );
5667 assert_eq!(
5668 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5669 [
5670 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5671 ("\n".to_string(), None),
5672 ]
5673 );
5674 });
5675
5676 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5677 // changes since the last save.
5678 buffer.update(cx, |buffer, cx| {
5679 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5680 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5681 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5682 });
5683 let change_notification_2 = fake_server
5684 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5685 .await;
5686 assert!(
5687 change_notification_2.text_document.version
5688 > change_notification_1.text_document.version
5689 );
5690
5691 // Handle out-of-order diagnostics
5692 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5693 lsp::PublishDiagnosticsParams {
5694 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5695 version: Some(change_notification_2.text_document.version),
5696 diagnostics: vec![
5697 lsp::Diagnostic {
5698 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5699 severity: Some(DiagnosticSeverity::ERROR),
5700 message: "undefined variable 'BB'".to_string(),
5701 source: Some("disk".to_string()),
5702 ..Default::default()
5703 },
5704 lsp::Diagnostic {
5705 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5706 severity: Some(DiagnosticSeverity::WARNING),
5707 message: "undefined variable 'A'".to_string(),
5708 source: Some("disk".to_string()),
5709 ..Default::default()
5710 },
5711 ],
5712 },
5713 );
5714
5715 buffer.next_notification(cx).await;
5716 buffer.read_with(cx, |buffer, _| {
5717 assert_eq!(
5718 buffer
5719 .snapshot()
5720 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5721 .collect::<Vec<_>>(),
5722 &[
5723 DiagnosticEntry {
5724 range: Point::new(2, 21)..Point::new(2, 22),
5725 diagnostic: Diagnostic {
5726 severity: DiagnosticSeverity::WARNING,
5727 message: "undefined variable 'A'".to_string(),
5728 is_disk_based: true,
5729 group_id: 1,
5730 is_primary: true,
5731 ..Default::default()
5732 }
5733 },
5734 DiagnosticEntry {
5735 range: Point::new(3, 9)..Point::new(3, 14),
5736 diagnostic: Diagnostic {
5737 severity: DiagnosticSeverity::ERROR,
5738 message: "undefined variable 'BB'".to_string(),
5739 is_disk_based: true,
5740 group_id: 0,
5741 is_primary: true,
5742 ..Default::default()
5743 },
5744 }
5745 ]
5746 );
5747 });
5748 }
5749
5750 #[gpui::test]
5751 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5752 cx.foreground().forbid_parking();
5753
5754 let text = concat!(
5755 "let one = ;\n", //
5756 "let two = \n",
5757 "let three = 3;\n",
5758 );
5759
5760 let fs = FakeFs::new(cx.background());
5761 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5762
5763 let project = Project::test(fs, cx);
5764 let worktree_id = project
5765 .update(cx, |project, cx| {
5766 project.find_or_create_local_worktree("/dir", true, cx)
5767 })
5768 .await
5769 .unwrap()
5770 .0
5771 .read_with(cx, |tree, _| tree.id());
5772
5773 let buffer = project
5774 .update(cx, |project, cx| {
5775 project.open_buffer((worktree_id, "a.rs"), cx)
5776 })
5777 .await
5778 .unwrap();
5779
5780 project.update(cx, |project, cx| {
5781 project
5782 .update_buffer_diagnostics(
5783 &buffer,
5784 vec![
5785 DiagnosticEntry {
5786 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5787 diagnostic: Diagnostic {
5788 severity: DiagnosticSeverity::ERROR,
5789 message: "syntax error 1".to_string(),
5790 ..Default::default()
5791 },
5792 },
5793 DiagnosticEntry {
5794 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5795 diagnostic: Diagnostic {
5796 severity: DiagnosticSeverity::ERROR,
5797 message: "syntax error 2".to_string(),
5798 ..Default::default()
5799 },
5800 },
5801 ],
5802 None,
5803 cx,
5804 )
5805 .unwrap();
5806 });
5807
5808 // An empty range is extended forward to include the following character.
5809 // At the end of a line, an empty range is extended backward to include
5810 // the preceding character.
5811 buffer.read_with(cx, |buffer, _| {
5812 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5813 assert_eq!(
5814 chunks
5815 .iter()
5816 .map(|(s, d)| (s.as_str(), *d))
5817 .collect::<Vec<_>>(),
5818 &[
5819 ("let one = ", None),
5820 (";", Some(DiagnosticSeverity::ERROR)),
5821 ("\nlet two =", None),
5822 (" ", Some(DiagnosticSeverity::ERROR)),
5823 ("\nlet three = 3;\n", None)
5824 ]
5825 );
5826 });
5827 }
5828
5829 #[gpui::test]
5830 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5831 cx.foreground().forbid_parking();
5832
5833 let mut language = Language::new(
5834 LanguageConfig {
5835 name: "Rust".into(),
5836 path_suffixes: vec!["rs".to_string()],
5837 ..Default::default()
5838 },
5839 Some(tree_sitter_rust::language()),
5840 );
5841 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5842
5843 let text = "
5844 fn a() {
5845 f1();
5846 }
5847 fn b() {
5848 f2();
5849 }
5850 fn c() {
5851 f3();
5852 }
5853 "
5854 .unindent();
5855
5856 let fs = FakeFs::new(cx.background());
5857 fs.insert_tree(
5858 "/dir",
5859 json!({
5860 "a.rs": text.clone(),
5861 }),
5862 )
5863 .await;
5864
5865 let project = Project::test(fs, cx);
5866 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5867
5868 let worktree_id = project
5869 .update(cx, |project, cx| {
5870 project.find_or_create_local_worktree("/dir", true, cx)
5871 })
5872 .await
5873 .unwrap()
5874 .0
5875 .read_with(cx, |tree, _| tree.id());
5876
5877 let buffer = project
5878 .update(cx, |project, cx| {
5879 project.open_buffer((worktree_id, "a.rs"), cx)
5880 })
5881 .await
5882 .unwrap();
5883
5884 let mut fake_server = fake_servers.next().await.unwrap();
5885 let lsp_document_version = fake_server
5886 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5887 .await
5888 .text_document
5889 .version;
5890
5891 // Simulate editing the buffer after the language server computes some edits.
5892 buffer.update(cx, |buffer, cx| {
5893 buffer.edit(
5894 [Point::new(0, 0)..Point::new(0, 0)],
5895 "// above first function\n",
5896 cx,
5897 );
5898 buffer.edit(
5899 [Point::new(2, 0)..Point::new(2, 0)],
5900 " // inside first function\n",
5901 cx,
5902 );
5903 buffer.edit(
5904 [Point::new(6, 4)..Point::new(6, 4)],
5905 "// inside second function ",
5906 cx,
5907 );
5908
5909 assert_eq!(
5910 buffer.text(),
5911 "
5912 // above first function
5913 fn a() {
5914 // inside first function
5915 f1();
5916 }
5917 fn b() {
5918 // inside second function f2();
5919 }
5920 fn c() {
5921 f3();
5922 }
5923 "
5924 .unindent()
5925 );
5926 });
5927
5928 let edits = project
5929 .update(cx, |project, cx| {
5930 project.edits_from_lsp(
5931 &buffer,
5932 vec![
5933 // replace body of first function
5934 lsp::TextEdit {
5935 range: lsp::Range::new(
5936 lsp::Position::new(0, 0),
5937 lsp::Position::new(3, 0),
5938 ),
5939 new_text: "
5940 fn a() {
5941 f10();
5942 }
5943 "
5944 .unindent(),
5945 },
5946 // edit inside second function
5947 lsp::TextEdit {
5948 range: lsp::Range::new(
5949 lsp::Position::new(4, 6),
5950 lsp::Position::new(4, 6),
5951 ),
5952 new_text: "00".into(),
5953 },
5954 // edit inside third function via two distinct edits
5955 lsp::TextEdit {
5956 range: lsp::Range::new(
5957 lsp::Position::new(7, 5),
5958 lsp::Position::new(7, 5),
5959 ),
5960 new_text: "4000".into(),
5961 },
5962 lsp::TextEdit {
5963 range: lsp::Range::new(
5964 lsp::Position::new(7, 5),
5965 lsp::Position::new(7, 6),
5966 ),
5967 new_text: "".into(),
5968 },
5969 ],
5970 Some(lsp_document_version),
5971 cx,
5972 )
5973 })
5974 .await
5975 .unwrap();
5976
5977 buffer.update(cx, |buffer, cx| {
5978 for (range, new_text) in edits {
5979 buffer.edit([range], new_text, cx);
5980 }
5981 assert_eq!(
5982 buffer.text(),
5983 "
5984 // above first function
5985 fn a() {
5986 // inside first function
5987 f10();
5988 }
5989 fn b() {
5990 // inside second function f200();
5991 }
5992 fn c() {
5993 f4000();
5994 }
5995 "
5996 .unindent()
5997 );
5998 });
5999 }
6000
6001 #[gpui::test]
6002 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6003 cx.foreground().forbid_parking();
6004
6005 let text = "
6006 use a::b;
6007 use a::c;
6008
6009 fn f() {
6010 b();
6011 c();
6012 }
6013 "
6014 .unindent();
6015
6016 let fs = FakeFs::new(cx.background());
6017 fs.insert_tree(
6018 "/dir",
6019 json!({
6020 "a.rs": text.clone(),
6021 }),
6022 )
6023 .await;
6024
6025 let project = Project::test(fs, cx);
6026 let worktree_id = project
6027 .update(cx, |project, cx| {
6028 project.find_or_create_local_worktree("/dir", true, cx)
6029 })
6030 .await
6031 .unwrap()
6032 .0
6033 .read_with(cx, |tree, _| tree.id());
6034
6035 let buffer = project
6036 .update(cx, |project, cx| {
6037 project.open_buffer((worktree_id, "a.rs"), cx)
6038 })
6039 .await
6040 .unwrap();
6041
6042 // Simulate the language server sending us a small edit in the form of a very large diff.
6043 // Rust-analyzer does this when performing a merge-imports code action.
6044 let edits = project
6045 .update(cx, |project, cx| {
6046 project.edits_from_lsp(
6047 &buffer,
6048 [
6049 // Replace the first use statement without editing the semicolon.
6050 lsp::TextEdit {
6051 range: lsp::Range::new(
6052 lsp::Position::new(0, 4),
6053 lsp::Position::new(0, 8),
6054 ),
6055 new_text: "a::{b, c}".into(),
6056 },
6057 // Reinsert the remainder of the file between the semicolon and the final
6058 // newline of the file.
6059 lsp::TextEdit {
6060 range: lsp::Range::new(
6061 lsp::Position::new(0, 9),
6062 lsp::Position::new(0, 9),
6063 ),
6064 new_text: "\n\n".into(),
6065 },
6066 lsp::TextEdit {
6067 range: lsp::Range::new(
6068 lsp::Position::new(0, 9),
6069 lsp::Position::new(0, 9),
6070 ),
6071 new_text: "
6072 fn f() {
6073 b();
6074 c();
6075 }"
6076 .unindent(),
6077 },
6078 // Delete everything after the first newline of the file.
6079 lsp::TextEdit {
6080 range: lsp::Range::new(
6081 lsp::Position::new(1, 0),
6082 lsp::Position::new(7, 0),
6083 ),
6084 new_text: "".into(),
6085 },
6086 ],
6087 None,
6088 cx,
6089 )
6090 })
6091 .await
6092 .unwrap();
6093
6094 buffer.update(cx, |buffer, cx| {
6095 let edits = edits
6096 .into_iter()
6097 .map(|(range, text)| {
6098 (
6099 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6100 text,
6101 )
6102 })
6103 .collect::<Vec<_>>();
6104
6105 assert_eq!(
6106 edits,
6107 [
6108 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6109 (Point::new(1, 0)..Point::new(2, 0), "".into())
6110 ]
6111 );
6112
6113 for (range, new_text) in edits {
6114 buffer.edit([range], new_text, cx);
6115 }
6116 assert_eq!(
6117 buffer.text(),
6118 "
6119 use a::{b, c};
6120
6121 fn f() {
6122 b();
6123 c();
6124 }
6125 "
6126 .unindent()
6127 );
6128 });
6129 }
6130
6131 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6132 buffer: &Buffer,
6133 range: Range<T>,
6134 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6135 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6136 for chunk in buffer.snapshot().chunks(range, true) {
6137 if chunks.last().map_or(false, |prev_chunk| {
6138 prev_chunk.1 == chunk.diagnostic_severity
6139 }) {
6140 chunks.last_mut().unwrap().0.push_str(chunk.text);
6141 } else {
6142 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6143 }
6144 }
6145 chunks
6146 }
6147
6148 #[gpui::test]
6149 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6150 let dir = temp_tree(json!({
6151 "root": {
6152 "dir1": {},
6153 "dir2": {
6154 "dir3": {}
6155 }
6156 }
6157 }));
6158
6159 let project = Project::test(Arc::new(RealFs), cx);
6160 let (tree, _) = project
6161 .update(cx, |project, cx| {
6162 project.find_or_create_local_worktree(&dir.path(), true, cx)
6163 })
6164 .await
6165 .unwrap();
6166
6167 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6168 .await;
6169
6170 let cancel_flag = Default::default();
6171 let results = project
6172 .read_with(cx, |project, cx| {
6173 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6174 })
6175 .await;
6176
6177 assert!(results.is_empty());
6178 }
6179
6180 #[gpui::test]
6181 async fn test_definition(cx: &mut gpui::TestAppContext) {
6182 let mut language = Language::new(
6183 LanguageConfig {
6184 name: "Rust".into(),
6185 path_suffixes: vec!["rs".to_string()],
6186 ..Default::default()
6187 },
6188 Some(tree_sitter_rust::language()),
6189 );
6190 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6191
6192 let fs = FakeFs::new(cx.background());
6193 fs.insert_tree(
6194 "/dir",
6195 json!({
6196 "a.rs": "const fn a() { A }",
6197 "b.rs": "const y: i32 = crate::a()",
6198 }),
6199 )
6200 .await;
6201
6202 let project = Project::test(fs, cx);
6203 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6204
6205 let (tree, _) = project
6206 .update(cx, |project, cx| {
6207 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6208 })
6209 .await
6210 .unwrap();
6211 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6212 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6213 .await;
6214
6215 let buffer = project
6216 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6217 .await
6218 .unwrap();
6219
6220 let fake_server = fake_servers.next().await.unwrap();
6221 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6222 let params = params.text_document_position_params;
6223 assert_eq!(
6224 params.text_document.uri.to_file_path().unwrap(),
6225 Path::new("/dir/b.rs"),
6226 );
6227 assert_eq!(params.position, lsp::Position::new(0, 22));
6228
6229 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6230 lsp::Location::new(
6231 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6232 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6233 ),
6234 )))
6235 });
6236
6237 let mut definitions = project
6238 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6239 .await
6240 .unwrap();
6241
6242 assert_eq!(definitions.len(), 1);
6243 let definition = definitions.pop().unwrap();
6244 cx.update(|cx| {
6245 let target_buffer = definition.buffer.read(cx);
6246 assert_eq!(
6247 target_buffer
6248 .file()
6249 .unwrap()
6250 .as_local()
6251 .unwrap()
6252 .abs_path(cx),
6253 Path::new("/dir/a.rs"),
6254 );
6255 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6256 assert_eq!(
6257 list_worktrees(&project, cx),
6258 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6259 );
6260
6261 drop(definition);
6262 });
6263 cx.read(|cx| {
6264 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6265 });
6266
6267 fn list_worktrees<'a>(
6268 project: &'a ModelHandle<Project>,
6269 cx: &'a AppContext,
6270 ) -> Vec<(&'a Path, bool)> {
6271 project
6272 .read(cx)
6273 .worktrees(cx)
6274 .map(|worktree| {
6275 let worktree = worktree.read(cx);
6276 (
6277 worktree.as_local().unwrap().abs_path().as_ref(),
6278 worktree.is_visible(),
6279 )
6280 })
6281 .collect::<Vec<_>>()
6282 }
6283 }
6284
6285 #[gpui::test(iterations = 10)]
6286 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6287 let mut language = Language::new(
6288 LanguageConfig {
6289 name: "TypeScript".into(),
6290 path_suffixes: vec!["ts".to_string()],
6291 ..Default::default()
6292 },
6293 None,
6294 );
6295 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6296
6297 let fs = FakeFs::new(cx.background());
6298 fs.insert_tree(
6299 "/dir",
6300 json!({
6301 "a.ts": "a",
6302 }),
6303 )
6304 .await;
6305
6306 let project = Project::test(fs, cx);
6307 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6308
6309 let (tree, _) = project
6310 .update(cx, |project, cx| {
6311 project.find_or_create_local_worktree("/dir", true, cx)
6312 })
6313 .await
6314 .unwrap();
6315 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6316 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6317 .await;
6318
6319 let buffer = project
6320 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6321 .await
6322 .unwrap();
6323
6324 let fake_server = fake_language_servers.next().await.unwrap();
6325
6326 // Language server returns code actions that contain commands, and not edits.
6327 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6328 fake_server
6329 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6330 Ok(Some(vec![
6331 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6332 title: "The code action".into(),
6333 command: Some(lsp::Command {
6334 title: "The command".into(),
6335 command: "_the/command".into(),
6336 arguments: Some(vec![json!("the-argument")]),
6337 }),
6338 ..Default::default()
6339 }),
6340 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6341 title: "two".into(),
6342 ..Default::default()
6343 }),
6344 ]))
6345 })
6346 .next()
6347 .await;
6348
6349 let action = actions.await.unwrap()[0].clone();
6350 let apply = project.update(cx, |project, cx| {
6351 project.apply_code_action(buffer.clone(), action, true, cx)
6352 });
6353
6354 // Resolving the code action does not populate its edits. In absence of
6355 // edits, we must execute the given command.
6356 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6357 |action, _| async move { Ok(action) },
6358 );
6359
6360 // While executing the command, the language server sends the editor
6361 // a `workspaceEdit` request.
6362 fake_server
6363 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6364 let fake = fake_server.clone();
6365 move |params, _| {
6366 assert_eq!(params.command, "_the/command");
6367 let fake = fake.clone();
6368 async move {
6369 fake.server
6370 .request::<lsp::request::ApplyWorkspaceEdit>(
6371 lsp::ApplyWorkspaceEditParams {
6372 label: None,
6373 edit: lsp::WorkspaceEdit {
6374 changes: Some(
6375 [(
6376 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6377 vec![lsp::TextEdit {
6378 range: lsp::Range::new(
6379 lsp::Position::new(0, 0),
6380 lsp::Position::new(0, 0),
6381 ),
6382 new_text: "X".into(),
6383 }],
6384 )]
6385 .into_iter()
6386 .collect(),
6387 ),
6388 ..Default::default()
6389 },
6390 },
6391 )
6392 .await
6393 .unwrap();
6394 Ok(Some(json!(null)))
6395 }
6396 }
6397 })
6398 .next()
6399 .await;
6400
6401 // Applying the code action returns a project transaction containing the edits
6402 // sent by the language server in its `workspaceEdit` request.
6403 let transaction = apply.await.unwrap();
6404 assert!(transaction.0.contains_key(&buffer));
6405 buffer.update(cx, |buffer, cx| {
6406 assert_eq!(buffer.text(), "Xa");
6407 buffer.undo(cx);
6408 assert_eq!(buffer.text(), "a");
6409 });
6410 }
6411
6412 #[gpui::test]
6413 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6414 let fs = FakeFs::new(cx.background());
6415 fs.insert_tree(
6416 "/dir",
6417 json!({
6418 "file1": "the old contents",
6419 }),
6420 )
6421 .await;
6422
6423 let project = Project::test(fs.clone(), cx);
6424 let worktree_id = project
6425 .update(cx, |p, cx| {
6426 p.find_or_create_local_worktree("/dir", true, cx)
6427 })
6428 .await
6429 .unwrap()
6430 .0
6431 .read_with(cx, |tree, _| tree.id());
6432
6433 let buffer = project
6434 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6435 .await
6436 .unwrap();
6437 buffer
6438 .update(cx, |buffer, cx| {
6439 assert_eq!(buffer.text(), "the old contents");
6440 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6441 buffer.save(cx)
6442 })
6443 .await
6444 .unwrap();
6445
6446 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6447 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6448 }
6449
6450 #[gpui::test]
6451 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6452 let fs = FakeFs::new(cx.background());
6453 fs.insert_tree(
6454 "/dir",
6455 json!({
6456 "file1": "the old contents",
6457 }),
6458 )
6459 .await;
6460
6461 let project = Project::test(fs.clone(), cx);
6462 let worktree_id = project
6463 .update(cx, |p, cx| {
6464 p.find_or_create_local_worktree("/dir/file1", true, cx)
6465 })
6466 .await
6467 .unwrap()
6468 .0
6469 .read_with(cx, |tree, _| tree.id());
6470
6471 let buffer = project
6472 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6473 .await
6474 .unwrap();
6475 buffer
6476 .update(cx, |buffer, cx| {
6477 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6478 buffer.save(cx)
6479 })
6480 .await
6481 .unwrap();
6482
6483 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6484 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6485 }
6486
6487 #[gpui::test]
6488 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6489 let fs = FakeFs::new(cx.background());
6490 fs.insert_tree("/dir", json!({})).await;
6491
6492 let project = Project::test(fs.clone(), cx);
6493 let (worktree, _) = project
6494 .update(cx, |project, cx| {
6495 project.find_or_create_local_worktree("/dir", true, cx)
6496 })
6497 .await
6498 .unwrap();
6499 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6500
6501 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6502 buffer.update(cx, |buffer, cx| {
6503 buffer.edit([0..0], "abc", cx);
6504 assert!(buffer.is_dirty());
6505 assert!(!buffer.has_conflict());
6506 });
6507 project
6508 .update(cx, |project, cx| {
6509 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6510 })
6511 .await
6512 .unwrap();
6513 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6514 buffer.read_with(cx, |buffer, cx| {
6515 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6516 assert!(!buffer.is_dirty());
6517 assert!(!buffer.has_conflict());
6518 });
6519
6520 let opened_buffer = project
6521 .update(cx, |project, cx| {
6522 project.open_buffer((worktree_id, "file1"), cx)
6523 })
6524 .await
6525 .unwrap();
6526 assert_eq!(opened_buffer, buffer);
6527 }
6528
6529 #[gpui::test(retries = 5)]
6530 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6531 let dir = temp_tree(json!({
6532 "a": {
6533 "file1": "",
6534 "file2": "",
6535 "file3": "",
6536 },
6537 "b": {
6538 "c": {
6539 "file4": "",
6540 "file5": "",
6541 }
6542 }
6543 }));
6544
6545 let project = Project::test(Arc::new(RealFs), cx);
6546 let rpc = project.read_with(cx, |p, _| p.client.clone());
6547
6548 let (tree, _) = project
6549 .update(cx, |p, cx| {
6550 p.find_or_create_local_worktree(dir.path(), true, cx)
6551 })
6552 .await
6553 .unwrap();
6554 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6555
6556 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6557 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6558 async move { buffer.await.unwrap() }
6559 };
6560 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6561 tree.read_with(cx, |tree, _| {
6562 tree.entry_for_path(path)
6563 .expect(&format!("no entry for path {}", path))
6564 .id
6565 })
6566 };
6567
6568 let buffer2 = buffer_for_path("a/file2", cx).await;
6569 let buffer3 = buffer_for_path("a/file3", cx).await;
6570 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6571 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6572
6573 let file2_id = id_for_path("a/file2", &cx);
6574 let file3_id = id_for_path("a/file3", &cx);
6575 let file4_id = id_for_path("b/c/file4", &cx);
6576
6577 // Wait for the initial scan.
6578 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6579 .await;
6580
6581 // Create a remote copy of this worktree.
6582 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6583 let (remote, load_task) = cx.update(|cx| {
6584 Worktree::remote(
6585 1,
6586 1,
6587 initial_snapshot.to_proto(&Default::default(), true),
6588 rpc.clone(),
6589 cx,
6590 )
6591 });
6592 load_task.await;
6593
6594 cx.read(|cx| {
6595 assert!(!buffer2.read(cx).is_dirty());
6596 assert!(!buffer3.read(cx).is_dirty());
6597 assert!(!buffer4.read(cx).is_dirty());
6598 assert!(!buffer5.read(cx).is_dirty());
6599 });
6600
6601 // Rename and delete files and directories.
6602 tree.flush_fs_events(&cx).await;
6603 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6604 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6605 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6606 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6607 tree.flush_fs_events(&cx).await;
6608
6609 let expected_paths = vec![
6610 "a",
6611 "a/file1",
6612 "a/file2.new",
6613 "b",
6614 "d",
6615 "d/file3",
6616 "d/file4",
6617 ];
6618
6619 cx.read(|app| {
6620 assert_eq!(
6621 tree.read(app)
6622 .paths()
6623 .map(|p| p.to_str().unwrap())
6624 .collect::<Vec<_>>(),
6625 expected_paths
6626 );
6627
6628 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6629 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6630 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6631
6632 assert_eq!(
6633 buffer2.read(app).file().unwrap().path().as_ref(),
6634 Path::new("a/file2.new")
6635 );
6636 assert_eq!(
6637 buffer3.read(app).file().unwrap().path().as_ref(),
6638 Path::new("d/file3")
6639 );
6640 assert_eq!(
6641 buffer4.read(app).file().unwrap().path().as_ref(),
6642 Path::new("d/file4")
6643 );
6644 assert_eq!(
6645 buffer5.read(app).file().unwrap().path().as_ref(),
6646 Path::new("b/c/file5")
6647 );
6648
6649 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6650 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6651 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6652 assert!(buffer5.read(app).file().unwrap().is_deleted());
6653 });
6654
6655 // Update the remote worktree. Check that it becomes consistent with the
6656 // local worktree.
6657 remote.update(cx, |remote, cx| {
6658 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6659 &initial_snapshot,
6660 1,
6661 1,
6662 true,
6663 );
6664 remote
6665 .as_remote_mut()
6666 .unwrap()
6667 .snapshot
6668 .apply_remote_update(update_message)
6669 .unwrap();
6670
6671 assert_eq!(
6672 remote
6673 .paths()
6674 .map(|p| p.to_str().unwrap())
6675 .collect::<Vec<_>>(),
6676 expected_paths
6677 );
6678 });
6679 }
6680
6681 #[gpui::test]
6682 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6683 let fs = FakeFs::new(cx.background());
6684 fs.insert_tree(
6685 "/the-dir",
6686 json!({
6687 "a.txt": "a-contents",
6688 "b.txt": "b-contents",
6689 }),
6690 )
6691 .await;
6692
6693 let project = Project::test(fs.clone(), cx);
6694 let worktree_id = project
6695 .update(cx, |p, cx| {
6696 p.find_or_create_local_worktree("/the-dir", true, cx)
6697 })
6698 .await
6699 .unwrap()
6700 .0
6701 .read_with(cx, |tree, _| tree.id());
6702
6703 // Spawn multiple tasks to open paths, repeating some paths.
6704 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6705 (
6706 p.open_buffer((worktree_id, "a.txt"), cx),
6707 p.open_buffer((worktree_id, "b.txt"), cx),
6708 p.open_buffer((worktree_id, "a.txt"), cx),
6709 )
6710 });
6711
6712 let buffer_a_1 = buffer_a_1.await.unwrap();
6713 let buffer_a_2 = buffer_a_2.await.unwrap();
6714 let buffer_b = buffer_b.await.unwrap();
6715 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6716 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6717
6718 // There is only one buffer per path.
6719 let buffer_a_id = buffer_a_1.id();
6720 assert_eq!(buffer_a_2.id(), buffer_a_id);
6721
6722 // Open the same path again while it is still open.
6723 drop(buffer_a_1);
6724 let buffer_a_3 = project
6725 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6726 .await
6727 .unwrap();
6728
6729 // There's still only one buffer per path.
6730 assert_eq!(buffer_a_3.id(), buffer_a_id);
6731 }
6732
6733 #[gpui::test]
6734 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6735 use std::fs;
6736
6737 let dir = temp_tree(json!({
6738 "file1": "abc",
6739 "file2": "def",
6740 "file3": "ghi",
6741 }));
6742
6743 let project = Project::test(Arc::new(RealFs), cx);
6744 let (worktree, _) = project
6745 .update(cx, |p, cx| {
6746 p.find_or_create_local_worktree(dir.path(), true, cx)
6747 })
6748 .await
6749 .unwrap();
6750 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6751
6752 worktree.flush_fs_events(&cx).await;
6753 worktree
6754 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6755 .await;
6756
6757 let buffer1 = project
6758 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6759 .await
6760 .unwrap();
6761 let events = Rc::new(RefCell::new(Vec::new()));
6762
6763 // initially, the buffer isn't dirty.
6764 buffer1.update(cx, |buffer, cx| {
6765 cx.subscribe(&buffer1, {
6766 let events = events.clone();
6767 move |_, _, event, _| match event {
6768 BufferEvent::Operation(_) => {}
6769 _ => events.borrow_mut().push(event.clone()),
6770 }
6771 })
6772 .detach();
6773
6774 assert!(!buffer.is_dirty());
6775 assert!(events.borrow().is_empty());
6776
6777 buffer.edit(vec![1..2], "", cx);
6778 });
6779
6780 // after the first edit, the buffer is dirty, and emits a dirtied event.
6781 buffer1.update(cx, |buffer, cx| {
6782 assert!(buffer.text() == "ac");
6783 assert!(buffer.is_dirty());
6784 assert_eq!(
6785 *events.borrow(),
6786 &[language::Event::Edited, language::Event::Dirtied]
6787 );
6788 events.borrow_mut().clear();
6789 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6790 });
6791
6792 // after saving, the buffer is not dirty, and emits a saved event.
6793 buffer1.update(cx, |buffer, cx| {
6794 assert!(!buffer.is_dirty());
6795 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6796 events.borrow_mut().clear();
6797
6798 buffer.edit(vec![1..1], "B", cx);
6799 buffer.edit(vec![2..2], "D", cx);
6800 });
6801
6802 // after editing again, the buffer is dirty, and emits another dirty event.
6803 buffer1.update(cx, |buffer, cx| {
6804 assert!(buffer.text() == "aBDc");
6805 assert!(buffer.is_dirty());
6806 assert_eq!(
6807 *events.borrow(),
6808 &[
6809 language::Event::Edited,
6810 language::Event::Dirtied,
6811 language::Event::Edited,
6812 ],
6813 );
6814 events.borrow_mut().clear();
6815
6816 // TODO - currently, after restoring the buffer to its
6817 // previously-saved state, the is still considered dirty.
6818 buffer.edit([1..3], "", cx);
6819 assert!(buffer.text() == "ac");
6820 assert!(buffer.is_dirty());
6821 });
6822
6823 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6824
6825 // When a file is deleted, the buffer is considered dirty.
6826 let events = Rc::new(RefCell::new(Vec::new()));
6827 let buffer2 = project
6828 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6829 .await
6830 .unwrap();
6831 buffer2.update(cx, |_, cx| {
6832 cx.subscribe(&buffer2, {
6833 let events = events.clone();
6834 move |_, _, event, _| events.borrow_mut().push(event.clone())
6835 })
6836 .detach();
6837 });
6838
6839 fs::remove_file(dir.path().join("file2")).unwrap();
6840 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6841 assert_eq!(
6842 *events.borrow(),
6843 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6844 );
6845
6846 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6847 let events = Rc::new(RefCell::new(Vec::new()));
6848 let buffer3 = project
6849 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6850 .await
6851 .unwrap();
6852 buffer3.update(cx, |_, cx| {
6853 cx.subscribe(&buffer3, {
6854 let events = events.clone();
6855 move |_, _, event, _| events.borrow_mut().push(event.clone())
6856 })
6857 .detach();
6858 });
6859
6860 worktree.flush_fs_events(&cx).await;
6861 buffer3.update(cx, |buffer, cx| {
6862 buffer.edit(Some(0..0), "x", cx);
6863 });
6864 events.borrow_mut().clear();
6865 fs::remove_file(dir.path().join("file3")).unwrap();
6866 buffer3
6867 .condition(&cx, |_, _| !events.borrow().is_empty())
6868 .await;
6869 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6870 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6871 }
6872
6873 #[gpui::test]
6874 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6875 use std::fs;
6876
6877 let initial_contents = "aaa\nbbbbb\nc\n";
6878 let dir = temp_tree(json!({ "the-file": initial_contents }));
6879
6880 let project = Project::test(Arc::new(RealFs), cx);
6881 let (worktree, _) = project
6882 .update(cx, |p, cx| {
6883 p.find_or_create_local_worktree(dir.path(), true, cx)
6884 })
6885 .await
6886 .unwrap();
6887 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6888
6889 worktree
6890 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6891 .await;
6892
6893 let abs_path = dir.path().join("the-file");
6894 let buffer = project
6895 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6896 .await
6897 .unwrap();
6898
6899 // TODO
6900 // Add a cursor on each row.
6901 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6902 // assert!(!buffer.is_dirty());
6903 // buffer.add_selection_set(
6904 // &(0..3)
6905 // .map(|row| Selection {
6906 // id: row as usize,
6907 // start: Point::new(row, 1),
6908 // end: Point::new(row, 1),
6909 // reversed: false,
6910 // goal: SelectionGoal::None,
6911 // })
6912 // .collect::<Vec<_>>(),
6913 // cx,
6914 // )
6915 // });
6916
6917 // Change the file on disk, adding two new lines of text, and removing
6918 // one line.
6919 buffer.read_with(cx, |buffer, _| {
6920 assert!(!buffer.is_dirty());
6921 assert!(!buffer.has_conflict());
6922 });
6923 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6924 fs::write(&abs_path, new_contents).unwrap();
6925
6926 // Because the buffer was not modified, it is reloaded from disk. Its
6927 // contents are edited according to the diff between the old and new
6928 // file contents.
6929 buffer
6930 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6931 .await;
6932
6933 buffer.update(cx, |buffer, _| {
6934 assert_eq!(buffer.text(), new_contents);
6935 assert!(!buffer.is_dirty());
6936 assert!(!buffer.has_conflict());
6937
6938 // TODO
6939 // let cursor_positions = buffer
6940 // .selection_set(selection_set_id)
6941 // .unwrap()
6942 // .selections::<Point>(&*buffer)
6943 // .map(|selection| {
6944 // assert_eq!(selection.start, selection.end);
6945 // selection.start
6946 // })
6947 // .collect::<Vec<_>>();
6948 // assert_eq!(
6949 // cursor_positions,
6950 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6951 // );
6952 });
6953
6954 // Modify the buffer
6955 buffer.update(cx, |buffer, cx| {
6956 buffer.edit(vec![0..0], " ", cx);
6957 assert!(buffer.is_dirty());
6958 assert!(!buffer.has_conflict());
6959 });
6960
6961 // Change the file on disk again, adding blank lines to the beginning.
6962 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6963
6964 // Because the buffer is modified, it doesn't reload from disk, but is
6965 // marked as having a conflict.
6966 buffer
6967 .condition(&cx, |buffer, _| buffer.has_conflict())
6968 .await;
6969 }
6970
6971 #[gpui::test]
6972 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6973 cx.foreground().forbid_parking();
6974
6975 let fs = FakeFs::new(cx.background());
6976 fs.insert_tree(
6977 "/the-dir",
6978 json!({
6979 "a.rs": "
6980 fn foo(mut v: Vec<usize>) {
6981 for x in &v {
6982 v.push(1);
6983 }
6984 }
6985 "
6986 .unindent(),
6987 }),
6988 )
6989 .await;
6990
6991 let project = Project::test(fs.clone(), cx);
6992 let (worktree, _) = project
6993 .update(cx, |p, cx| {
6994 p.find_or_create_local_worktree("/the-dir", true, cx)
6995 })
6996 .await
6997 .unwrap();
6998 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6999
7000 let buffer = project
7001 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7002 .await
7003 .unwrap();
7004
7005 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7006 let message = lsp::PublishDiagnosticsParams {
7007 uri: buffer_uri.clone(),
7008 diagnostics: vec![
7009 lsp::Diagnostic {
7010 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7011 severity: Some(DiagnosticSeverity::WARNING),
7012 message: "error 1".to_string(),
7013 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7014 location: lsp::Location {
7015 uri: buffer_uri.clone(),
7016 range: lsp::Range::new(
7017 lsp::Position::new(1, 8),
7018 lsp::Position::new(1, 9),
7019 ),
7020 },
7021 message: "error 1 hint 1".to_string(),
7022 }]),
7023 ..Default::default()
7024 },
7025 lsp::Diagnostic {
7026 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7027 severity: Some(DiagnosticSeverity::HINT),
7028 message: "error 1 hint 1".to_string(),
7029 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7030 location: lsp::Location {
7031 uri: buffer_uri.clone(),
7032 range: lsp::Range::new(
7033 lsp::Position::new(1, 8),
7034 lsp::Position::new(1, 9),
7035 ),
7036 },
7037 message: "original diagnostic".to_string(),
7038 }]),
7039 ..Default::default()
7040 },
7041 lsp::Diagnostic {
7042 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7043 severity: Some(DiagnosticSeverity::ERROR),
7044 message: "error 2".to_string(),
7045 related_information: Some(vec![
7046 lsp::DiagnosticRelatedInformation {
7047 location: lsp::Location {
7048 uri: buffer_uri.clone(),
7049 range: lsp::Range::new(
7050 lsp::Position::new(1, 13),
7051 lsp::Position::new(1, 15),
7052 ),
7053 },
7054 message: "error 2 hint 1".to_string(),
7055 },
7056 lsp::DiagnosticRelatedInformation {
7057 location: lsp::Location {
7058 uri: buffer_uri.clone(),
7059 range: lsp::Range::new(
7060 lsp::Position::new(1, 13),
7061 lsp::Position::new(1, 15),
7062 ),
7063 },
7064 message: "error 2 hint 2".to_string(),
7065 },
7066 ]),
7067 ..Default::default()
7068 },
7069 lsp::Diagnostic {
7070 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7071 severity: Some(DiagnosticSeverity::HINT),
7072 message: "error 2 hint 1".to_string(),
7073 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7074 location: lsp::Location {
7075 uri: buffer_uri.clone(),
7076 range: lsp::Range::new(
7077 lsp::Position::new(2, 8),
7078 lsp::Position::new(2, 17),
7079 ),
7080 },
7081 message: "original diagnostic".to_string(),
7082 }]),
7083 ..Default::default()
7084 },
7085 lsp::Diagnostic {
7086 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7087 severity: Some(DiagnosticSeverity::HINT),
7088 message: "error 2 hint 2".to_string(),
7089 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7090 location: lsp::Location {
7091 uri: buffer_uri.clone(),
7092 range: lsp::Range::new(
7093 lsp::Position::new(2, 8),
7094 lsp::Position::new(2, 17),
7095 ),
7096 },
7097 message: "original diagnostic".to_string(),
7098 }]),
7099 ..Default::default()
7100 },
7101 ],
7102 version: None,
7103 };
7104
7105 project
7106 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7107 .unwrap();
7108 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7109
7110 assert_eq!(
7111 buffer
7112 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7113 .collect::<Vec<_>>(),
7114 &[
7115 DiagnosticEntry {
7116 range: Point::new(1, 8)..Point::new(1, 9),
7117 diagnostic: Diagnostic {
7118 severity: DiagnosticSeverity::WARNING,
7119 message: "error 1".to_string(),
7120 group_id: 0,
7121 is_primary: true,
7122 ..Default::default()
7123 }
7124 },
7125 DiagnosticEntry {
7126 range: Point::new(1, 8)..Point::new(1, 9),
7127 diagnostic: Diagnostic {
7128 severity: DiagnosticSeverity::HINT,
7129 message: "error 1 hint 1".to_string(),
7130 group_id: 0,
7131 is_primary: false,
7132 ..Default::default()
7133 }
7134 },
7135 DiagnosticEntry {
7136 range: Point::new(1, 13)..Point::new(1, 15),
7137 diagnostic: Diagnostic {
7138 severity: DiagnosticSeverity::HINT,
7139 message: "error 2 hint 1".to_string(),
7140 group_id: 1,
7141 is_primary: false,
7142 ..Default::default()
7143 }
7144 },
7145 DiagnosticEntry {
7146 range: Point::new(1, 13)..Point::new(1, 15),
7147 diagnostic: Diagnostic {
7148 severity: DiagnosticSeverity::HINT,
7149 message: "error 2 hint 2".to_string(),
7150 group_id: 1,
7151 is_primary: false,
7152 ..Default::default()
7153 }
7154 },
7155 DiagnosticEntry {
7156 range: Point::new(2, 8)..Point::new(2, 17),
7157 diagnostic: Diagnostic {
7158 severity: DiagnosticSeverity::ERROR,
7159 message: "error 2".to_string(),
7160 group_id: 1,
7161 is_primary: true,
7162 ..Default::default()
7163 }
7164 }
7165 ]
7166 );
7167
7168 assert_eq!(
7169 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7170 &[
7171 DiagnosticEntry {
7172 range: Point::new(1, 8)..Point::new(1, 9),
7173 diagnostic: Diagnostic {
7174 severity: DiagnosticSeverity::WARNING,
7175 message: "error 1".to_string(),
7176 group_id: 0,
7177 is_primary: true,
7178 ..Default::default()
7179 }
7180 },
7181 DiagnosticEntry {
7182 range: Point::new(1, 8)..Point::new(1, 9),
7183 diagnostic: Diagnostic {
7184 severity: DiagnosticSeverity::HINT,
7185 message: "error 1 hint 1".to_string(),
7186 group_id: 0,
7187 is_primary: false,
7188 ..Default::default()
7189 }
7190 },
7191 ]
7192 );
7193 assert_eq!(
7194 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7195 &[
7196 DiagnosticEntry {
7197 range: Point::new(1, 13)..Point::new(1, 15),
7198 diagnostic: Diagnostic {
7199 severity: DiagnosticSeverity::HINT,
7200 message: "error 2 hint 1".to_string(),
7201 group_id: 1,
7202 is_primary: false,
7203 ..Default::default()
7204 }
7205 },
7206 DiagnosticEntry {
7207 range: Point::new(1, 13)..Point::new(1, 15),
7208 diagnostic: Diagnostic {
7209 severity: DiagnosticSeverity::HINT,
7210 message: "error 2 hint 2".to_string(),
7211 group_id: 1,
7212 is_primary: false,
7213 ..Default::default()
7214 }
7215 },
7216 DiagnosticEntry {
7217 range: Point::new(2, 8)..Point::new(2, 17),
7218 diagnostic: Diagnostic {
7219 severity: DiagnosticSeverity::ERROR,
7220 message: "error 2".to_string(),
7221 group_id: 1,
7222 is_primary: true,
7223 ..Default::default()
7224 }
7225 }
7226 ]
7227 );
7228 }
7229
7230 #[gpui::test]
7231 async fn test_rename(cx: &mut gpui::TestAppContext) {
7232 cx.foreground().forbid_parking();
7233
7234 let mut language = Language::new(
7235 LanguageConfig {
7236 name: "Rust".into(),
7237 path_suffixes: vec!["rs".to_string()],
7238 ..Default::default()
7239 },
7240 Some(tree_sitter_rust::language()),
7241 );
7242 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7243
7244 let fs = FakeFs::new(cx.background());
7245 fs.insert_tree(
7246 "/dir",
7247 json!({
7248 "one.rs": "const ONE: usize = 1;",
7249 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7250 }),
7251 )
7252 .await;
7253
7254 let project = Project::test(fs.clone(), cx);
7255 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7256
7257 let (tree, _) = project
7258 .update(cx, |project, cx| {
7259 project.find_or_create_local_worktree("/dir", true, cx)
7260 })
7261 .await
7262 .unwrap();
7263 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7264 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7265 .await;
7266
7267 let buffer = project
7268 .update(cx, |project, cx| {
7269 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7270 })
7271 .await
7272 .unwrap();
7273
7274 let fake_server = fake_servers.next().await.unwrap();
7275
7276 let response = project.update(cx, |project, cx| {
7277 project.prepare_rename(buffer.clone(), 7, cx)
7278 });
7279 fake_server
7280 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7281 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7282 assert_eq!(params.position, lsp::Position::new(0, 7));
7283 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7284 lsp::Position::new(0, 6),
7285 lsp::Position::new(0, 9),
7286 ))))
7287 })
7288 .next()
7289 .await
7290 .unwrap();
7291 let range = response.await.unwrap().unwrap();
7292 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7293 assert_eq!(range, 6..9);
7294
7295 let response = project.update(cx, |project, cx| {
7296 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7297 });
7298 fake_server
7299 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7300 assert_eq!(
7301 params.text_document_position.text_document.uri.as_str(),
7302 "file:///dir/one.rs"
7303 );
7304 assert_eq!(
7305 params.text_document_position.position,
7306 lsp::Position::new(0, 7)
7307 );
7308 assert_eq!(params.new_name, "THREE");
7309 Ok(Some(lsp::WorkspaceEdit {
7310 changes: Some(
7311 [
7312 (
7313 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7314 vec![lsp::TextEdit::new(
7315 lsp::Range::new(
7316 lsp::Position::new(0, 6),
7317 lsp::Position::new(0, 9),
7318 ),
7319 "THREE".to_string(),
7320 )],
7321 ),
7322 (
7323 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7324 vec![
7325 lsp::TextEdit::new(
7326 lsp::Range::new(
7327 lsp::Position::new(0, 24),
7328 lsp::Position::new(0, 27),
7329 ),
7330 "THREE".to_string(),
7331 ),
7332 lsp::TextEdit::new(
7333 lsp::Range::new(
7334 lsp::Position::new(0, 35),
7335 lsp::Position::new(0, 38),
7336 ),
7337 "THREE".to_string(),
7338 ),
7339 ],
7340 ),
7341 ]
7342 .into_iter()
7343 .collect(),
7344 ),
7345 ..Default::default()
7346 }))
7347 })
7348 .next()
7349 .await
7350 .unwrap();
7351 let mut transaction = response.await.unwrap().0;
7352 assert_eq!(transaction.len(), 2);
7353 assert_eq!(
7354 transaction
7355 .remove_entry(&buffer)
7356 .unwrap()
7357 .0
7358 .read_with(cx, |buffer, _| buffer.text()),
7359 "const THREE: usize = 1;"
7360 );
7361 assert_eq!(
7362 transaction
7363 .into_keys()
7364 .next()
7365 .unwrap()
7366 .read_with(cx, |buffer, _| buffer.text()),
7367 "const TWO: usize = one::THREE + one::THREE;"
7368 );
7369 }
7370
7371 #[gpui::test]
7372 async fn test_search(cx: &mut gpui::TestAppContext) {
7373 let fs = FakeFs::new(cx.background());
7374 fs.insert_tree(
7375 "/dir",
7376 json!({
7377 "one.rs": "const ONE: usize = 1;",
7378 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7379 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7380 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7381 }),
7382 )
7383 .await;
7384 let project = Project::test(fs.clone(), cx);
7385 let (tree, _) = project
7386 .update(cx, |project, cx| {
7387 project.find_or_create_local_worktree("/dir", true, cx)
7388 })
7389 .await
7390 .unwrap();
7391 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7392 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7393 .await;
7394
7395 assert_eq!(
7396 search(&project, SearchQuery::text("TWO", false, true), cx)
7397 .await
7398 .unwrap(),
7399 HashMap::from_iter([
7400 ("two.rs".to_string(), vec![6..9]),
7401 ("three.rs".to_string(), vec![37..40])
7402 ])
7403 );
7404
7405 let buffer_4 = project
7406 .update(cx, |project, cx| {
7407 project.open_buffer((worktree_id, "four.rs"), cx)
7408 })
7409 .await
7410 .unwrap();
7411 buffer_4.update(cx, |buffer, cx| {
7412 buffer.edit([20..28, 31..43], "two::TWO", cx);
7413 });
7414
7415 assert_eq!(
7416 search(&project, SearchQuery::text("TWO", false, true), cx)
7417 .await
7418 .unwrap(),
7419 HashMap::from_iter([
7420 ("two.rs".to_string(), vec![6..9]),
7421 ("three.rs".to_string(), vec![37..40]),
7422 ("four.rs".to_string(), vec![25..28, 36..39])
7423 ])
7424 );
7425
7426 async fn search(
7427 project: &ModelHandle<Project>,
7428 query: SearchQuery,
7429 cx: &mut gpui::TestAppContext,
7430 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7431 let results = project
7432 .update(cx, |project, cx| project.search(query, cx))
7433 .await?;
7434
7435 Ok(results
7436 .into_iter()
7437 .map(|(buffer, ranges)| {
7438 buffer.read_with(cx, |buffer, _| {
7439 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7440 let ranges = ranges
7441 .into_iter()
7442 .map(|range| range.to_offset(buffer))
7443 .collect::<Vec<_>>();
7444 (path, ranges)
7445 })
7446 })
7447 .collect())
7448 }
7449 }
7450}